query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Create the reference file of a test using the response received. The file will be created in the git references folder provided in the settings file
def create_reference( self, response_checker=default_checker.default_journey_checker ): # Check that the file doesn't already exist filename = self.get_file_name() filepath = os.path.join(config["REFERENCE_FILE_PATH"], filename) if os.path.isfile(filepath): logger.warning( "NO REF FILE CREATED - {} is already present".format(filepath) ) else: # Concatenate reference file info reference_text = OrderedDict() reference_text["query"] = self.query.replace( config["URL_JORMUN"][7:], "localhost" ) logger.warning("Query: {}".format(self.query)) reference_text["response"] = response_checker.filter( json.loads(self.full_resp) ) reference_text["full_response"] = json.loads( self.full_resp.replace(config["URL_JORMUN"][7:], "localhost") ) # Write reference file directly in the references folder with open(filepath, "w") as ref: ref.write(json.dumps(reference_text, indent=4)) logger.info("Created reference file : {}".format(filepath))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ref(request):\n r = referencepytest.ref(request)\n this_dir = os.path.abspath(os.path.dirname(__file__))\n r.set_data_location(os.path.join(this_dir, '..', 'reference'))\n return r", "def test_with_new_file(self):\n repository = self.create_repository(tool_name='Test')\n review_request = self.create_review_request(\n repository=repository,\n submitter=self.user,\n publish=True)\n diffset = self.create_diffset(review_request)\n filediff = self.create_filediff(diffset,\n source_revision=PRE_CREATION)\n\n rsp = self.api_get(\n get_original_file_url(review_request, diffset, filediff),\n expected_status=404)\n self.assertEqual(rsp['stat'], 'fail')\n self.assertEqual(rsp['err']['code'], DOES_NOT_EXIST.code)", "def ref_resp2files(output_file, output_json):\n with open(output_file, \"w\") as reference_text:\n reference_text.write(output_json)", "def create_ref_file(self):\n id = self.task_record.create_published_output_name()\n ctx = self.block_store.make_local_output(id)\n self.open_ref_contexts[ctx.get_filename()] = ctx\n return ctx.get_filename()", "def compare_with_ref(\n self, response, response_checker=default_checker.default_journey_checker\n ):\n\n def ref_resp2files(output_file, output_json):\n \"\"\"\n Create a file for the filtered response and for the filtered reference\n \"\"\"\n with open(output_file, \"w\") as reference_text:\n reference_text.write(output_json)\n\n def print_diff(ref_file, resp_file):\n \"\"\"\n Print differences between reference and response in console\n \"\"\"\n # open reference\n with open(ref_file) as reference_text:\n reference = reference_text.readlines()\n # open response\n with open(resp_file) as response_text:\n response = response_text.readlines()\n\n # Print failed test name\n print_color(\"\\n\\n\" + str(file_name) + \" failed :\" + \"\\n\\n\", Colors.PINK)\n\n symbol2color = {\"+\": Colors.GREEN, \"-\": Colors.RED}\n for line in difflib.unified_diff(reference, response):\n print_color(line, symbol2color.get(line[0], Colors.DEFAULT))\n\n # Filtering the answer. (We compare to a reference also filtered with the same filter)\n filtered_response = response_checker.filter(response)\n\n # Get the reference\n\n # Create the file name\n filename = self.get_file_name()\n filepath = os.path.join(config[\"REFERENCE_FILE_PATH\"], filename)\n\n assert os.path.isfile(filepath), \"{} is not a file\".format(filepath)\n\n with open(filepath, \"r\") as f:\n raw_reference = f.read()\n\n # Transform the string into a dictionary\n dict_ref = json.loads(raw_reference)\n\n # Get only the full_response part from the ref\n ref_full_response = dict_ref[\"full_response\"]\n\n # Filtering the reference\n filtered_reference = response_checker.filter(ref_full_response)\n\n # Compare response and reference\n try:\n response_checker.compare(filtered_response, filtered_reference)\n except AssertionError as e:\n # print the assertion error message\n logging.error(\"Assertion Error: %s\" % str(e))\n # find name of test\n file_name = filename.split(\"/\")[-1]\n file_name = file_name[:-5]\n\n # create a folder\n dir_path = config[\"RESPONSE_FILE_PATH\"]\n if not os.path.exists(dir_path):\n os.makedirs(dir_path)\n\n # create path to ref and resp\n full_file_name_ref = dir_path + \"/reference_\" + file_name + \".txt\"\n full_file_name_resp = dir_path + \"/response_\" + file_name + \".txt\"\n\n json_filtered_reference = json.dumps(filtered_reference, indent=4)\n json_filtered_response = json.dumps(filtered_response, indent=4)\n\n # Save resp and ref as txt files in folder named outputs\n ref_resp2files(full_file_name_ref, json_filtered_reference)\n ref_resp2files(full_file_name_resp, json_filtered_response)\n\n # Print difference in console\n print_diff(full_file_name_ref, full_file_name_resp)\n\n raise", "def test_output_source_file(self):\n response = self.client.open(\n '/v1/control/file/{id}'.format(id='id_example'),\n method='GET',\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_create_files(self):\n\n testdir = \"test_output\"\n test_submission = Submission()\n self.addCleanup(os.remove, \"submission.tar.gz\")\n self.addCleanup(shutil.rmtree, testdir)\n\n test_submission.create_files(testdir)\n\n self.doCleanups()", "def testExampleFileGeneration(ref):\n outdir = ref.tmp_dir\n outpath = os.path.join(outdir, 'file_result.html')\n generate_file(outpath)\n ref.assertTextFileCorrect(outpath, 'file_result.html',\n ignore_substrings=['Copyright', 'Version'])", "def test_download_file(token):\n\n # github => repo => release => asset_list => asset => url => download\n\n g_h = github.Github(token, per_page=100)\n repo = g_h.get_repo(TEST_SLUG, lazy=False)\n release = repo.get_release(TEST_TAG)\n asset_list = release.get_assets()\n sha_filename = Template(Arguments.HASH_FILE).safe_substitute({\n 'platform': platform.system().lower()\n })\n\n assets_calculated_sha = 'notasha'\n sha_dict = {}\n\n for check_asset in asset_list:\n # look through list of assets for uploaded file and sha file\n\n if check_asset.name == os.path.basename(TEST_FILENAME):\n\n # the uploaded asset\n request = requests.get(check_asset.browser_download_url)\n open(TEST_DOWNLOAD, 'wb').write(request.content)\n\n # recalc hash of downloaded file\n assets_calculated_sha = Arguments.get_hash(TEST_DOWNLOAD)\n\n elif check_asset.name == sha_filename:\n\n # the sha hash file\n request = requests.get(check_asset.browser_download_url)\n sha_dict = request.json()\n\n assert assets_calculated_sha == sha_dict[os.path.basename(TEST_FILENAME)]", "def test_use_generated_reference_tests_directory(\n self,\n tmp_path_factory,\n platform_url,\n setup_student_repos,\n workdir,\n rtd_path,\n ):\n # arrange\n run_generate_rtd(base_url=platform_url, rtd=rtd_path, workdir=workdir)\n clone_dir = workdir / \"clone_dir\"\n clone_dir.mkdir()\n\n # act\n results = repobee_testhelpers.funcs.run_repobee(\n f\"repos clone -a {ASSIGNMENTS_ARG} \"\n f\"--base-url {platform_url} \"\n f\"--junit4-reference-tests-dir {rtd_path} \"\n f\"--junit4-hamcrest-path {HAMCREST_PATH} \"\n f\"--junit4-junit-path {JUNIT_PATH} \",\n plugins=[junit4],\n workdir=clone_dir,\n )\n\n # assert\n iterations = 0\n for repo_name in plug.generate_repo_names(\n repobee_testhelpers.const.STUDENT_TEAMS, ASSIGNMENT_NAMES\n ):\n iterations += 1\n first_result, *rest = results[repo_name]\n assert not rest, \"there should only be one result\"\n assert first_result.name == SECTION\n assert first_result.status != plug.Status.ERROR\n\n assert iterations > 0, \"the assertion loop did not execute\"", "def create_test_file(test_path, robot_test_name, entry_url, full_path):\n new_test_file = test_path + '\\\\' + robot_test_name + '.tstest'\n shutil.copyfile(template_test_file, new_test_file) #note shutil.copyfile() overwrites target file if it exists\n r = requests.get(entry_url)\n # print r.content\n # fill in TestPrototypeParameter interface XML element and replace hard coded Param1 by variable name\n # fill in SingleVariable interface XML element and replace hard coded default_val by default value\n robot_arguments = ''\n replacements = dict()\n if VAR:\n interface_section = ''\n variable_section = ''\n report_expression_section = ''\n\n # by default, no need to rename robot variable in test unless there is space in the name\n variable_renames = dict()\n for variable in retrieve_variables(r.content):\n variable_name = variable[0]\n variable_renames[variable_name] = variable_name\n # print variable_name\n\n # if variable name has single spaces in it, e.g. 'Example Input 1', replace by '_', e.g. 'Example_Input_1'\n # however if there is also robot variable 'Example_Input_1', then keep appending '_' for the corresponding\n # TestShell test variable until it is unique\n for variable_name, rename in variable_renames.iteritems():\n if ' ' in variable_name:\n # rename = variable_name.replace(' ', '_') #replace space in the name by underscore\n rename = re.sub('[^0-9a-zA-Z_]', '_', variable_name) # replace each unsupported char by underscore\n while rename in variable_renames:\n rename += '_'\n variable_renames[variable_name] = rename\n\n for variable in retrieve_variables(r.content):\n variable_name = variable[0]\n default_value = variable[1]\n replacements[variable_name_in_template] = variable_renames[variable_name]\n replacements[variable_original_name_in_template] = variable_name\n replacements[variable_default_value_in_template] = default_value\n interface_section += fill_template(test_interface_template, replacements)\n variable_section += fill_template(test_variable_template, replacements)\n report_expression_section += fill_template(report_expression_template, replacements)\n robot_arguments += \" --variable \\'\" + variable_name + \"\\':\\'{\" + variable_renames[variable_name] + \"}\\'\"\n\n replacements = {\"test1.robot\": robot_arguments + \" \\'\" + full_path + \"\\'\"} # reset dictionary\n if VAR:\n replacements[test_interface_template_fill_tag] = interface_section\n replacements[test_variable_template_fill_tag] = variable_section\n replacements[report_expression_template_fill_tag] = report_expression_section\n # the following initial values of required variables are hard coded in test template\n replacements['CLOUDSHELL_SERVER_ADDRESS_VALUE'] = cloudshell_server_address\n replacements['CLOUDSHELL_SERVER_PORT_VALUE'] = cloudshell_server_port\n replacements['CLOUDSHELL_USERNAME_VALUE'] = cloudshell_server_username\n replacements['CLOUDSHELL_PASSWORD_VALUE'] = cloudshell_server_password\n replacements['CLOUDSHELL_DOMAIN_VALUE'] = cloudshell_server_domain\n replacements['EXEC_SERVER_ADDRESS_VALUE'] = exec_server_address\n replacements['EXEC_USERNAME_VALUE'] = exec_server_username\n replacements['EXEC_PASSWORD_VALUE'] = exec_server_password\n replacements['BITBUCKET_REPOSITORY_URL'] = bitbucket_repository_url\n replacements['EXEC_SERVER_WORKING_DIR'] = exec_server_working_directory\n replacements['ROBOT_TESTS_DIR'] = robot_tests_directory\n replacements['ARCHIVE_OUTPUT_DIR'] = archive_output_directory\n replacements['LOCAL_WORKING_DIR'] = local_working_directory\n # print replacements\n substitute_string_in_tstest_file(new_test_file, replacements)\n new_test_file_ascii_name = new_test_file.encode('ascii', 'ignore') # otherwise UnicodeDecodeError\n return new_test_file_ascii_name", "def test_generate_diff_download(self, mock_response, mock_request, mock_test_result_file):\n from mod_test.controllers import generate_diff\n\n mock_request.accept_mimetypes.best = 'application/json'\n\n response = generate_diff(1, 1, 1, to_view=0)\n\n self.assertTrue(response, mock_response())", "def test_set_api_url(self):\n UI_path = './resources/'\n test_js_filename = 'test_main.js'\n new_js_filename = UI_path + 'main_blabla.js'\n reference_js = UI_path + 'test_main_reference.js'\n\n os.system('cp {} {}'.format(\n UI_path + test_js_filename,\n new_js_filename))\n\n api_url = 'https://app.etabot.ai:8000/api/'\n set_api_url.set_api_url(\n UI_path, api_url, api_url_var_name='apiUrl')\n\n ute.assertFileEqual(new_js_filename, reference_js, self)\n os.remove(new_js_filename)", "def test_create_symlink_file(self):\n pass", "def test_make_file():\n with tempfile.TemporaryDirectory() as STATUS_DIR:\n Status.make_job_file(STATUS_DIR, 'generation', 'test1', TEST_1_ATTRS_1)\n status = Status.retrieve_job_status(STATUS_DIR, 'generation', 'test1')\n msg = 'Failed, status is \"{}\"'.format(status)\n assert status == 'R', msg", "def test_create_content(self):\n url = reverse('content-list')\n with tempfile.NamedTemporaryFile(suffix='.txt') as content_file:\n content_file.write(b\"The contents of the temporary file.\\n\")\n content_file.seek(0)\n data = {\n 'name': 'Content File',\n 'description': 'File 1',\n 'content_file': content_file,\n 'updated_time': date.today(),\n 'creators': [],\n 'coverage': '',\n 'subjects': [],\n 'keywords': [],\n 'workareas': [],\n 'language': '',\n 'cataloger': ''\n }\n response = self.client.post(url, data, format='multipart')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def file_factory(test_workspace):\n\n return FileCreator(test_workspace)", "def test_download_to_file(req, tmpdir):\n req.get(ENTREZ_URL, text='This works.')\n outdir = tmpdir.mkdir('outdir')\n filename = outdir.join('foo')\n expected = outdir.join('foo.gbk')\n config = core.Config(molecule='nucleotide', verbose=False)\n\n core.download_to_file('FOO', config, filename=filename)\n\n assert expected.check()", "def file(c, path=local.http_path):\r\n c = conn(c)\r\n print(\"make file repo on {}, path [{}]\".format(c.host, path))\r\n\r\n system.install(c, 'createrepo')\r\n c.run('createrepo {}'.format(path))", "def test_CRUD_ProjectFiles_Successfully(self):\n\n real_file_code = get_content('BasicTemplateAlgorithm.py')\n second_real_file_code = get_content('BasicTemplateForexAlgorithm.py')\n\n fakeFile = {\"name\":\"Hello.py\", \"code\": \"Hello World!\"}\n realFile = {\"name\":\"main.py\", \"code\": real_file_code}\n secondRealFile = {\"name\":\"lol.py\", \"code\": second_real_file_code}\n\n # Create a new project and make sure there are no files\n project = self.api.create_project(\"Test project - \", \"Py\")\n self.assertTrue(project['success'])\n self.assertTrue(project['projects'][0]['projectId'] > 0)\n\n # Add random file\n randomAdd = self.api.add_project_file(project['projects'][0]['projectId'], fakeFile[\"name\"], fakeFile[\"code\"])\n self.assertTrue(randomAdd['success'])\n self.assertTrue(randomAdd['files'][0]['content'] == fakeFile['code'])\n self.assertTrue(randomAdd['files'][0]['name'] == fakeFile['name'])\n\n # Update names of file\n updatedName = self.api.update_project_filename(project['projects'][0]['projectId'], randomAdd['files'][0]['name'], realFile['name'])\n self.assertTrue(updatedName['success'])\n\n # Replace content of file\n updateContents = self.api.update_project_file_content(project['projects'][0]['projectId'], realFile[\"name\"], realFile['code'])\n self.assertTrue(updateContents['success'])\n\n # Read single file\n readFile = self.api.read_project_file(project['projects'][0]['projectId'], realFile['name'])\n self.assertTrue(readFile['success'])\n self.assertTrue(readFile['files'][0]['content'] == realFile['code'])\n self.assertTrue(readFile['files'][0]['name'] == realFile['name'])\n\n # Add a second file\n secondFile = self.api.add_project_file(project['projects'][0]['projectId'], secondRealFile['name'], secondRealFile['code'])\n self.assertTrue(secondFile['success'])\n self.assertTrue(secondFile['files'][0]['content'] == secondRealFile['code'])\n self.assertTrue(secondFile['files'][0]['name'] == secondRealFile['name'])\n\n # Read multiple files\n readFiles = self.api.read_project_files(project['projects'][0]['projectId'])\n self.assertTrue(readFiles['success'])\n self.assertTrue(len(readFiles['files']) == 2)\n\n # Delete the second file\n deleteFile = self.api.delete_project_file(project['projects'][0]['projectId'], secondRealFile['name'])\n self.assertTrue(deleteFile['success'])\n\n # Read files\n readFilesAgain = self.api.read_project_files(project['projects'][0]['projectId'])\n self.assertTrue(readFilesAgain['success'])\n self.assertTrue(len(readFilesAgain['files']) == 1)\n self.assertTrue(readFilesAgain['files'][0]['name'] == realFile['name'])\n\n # Delete the project\n deleteProject = self.api.delete_project(project['projects'][0]['projectId'])\n self.assertTrue(deleteProject['success'])", "def writeFile(self,fileLink,fileBuffer,testChars=''):\n # 026 Unit test should test also urllib file like object aside the real file.\n #self.debug.printHeader() # Too many times -- need to move to debuglevel=4\n filePath=fileLink.replace('http://','')\n [fileDir,fileName]=os.path.split(filePath)\n if not os.path.exists(self.pathStorage.workDir()+os.sep+fileDir): os.makedirs(self.pathStorage.workDir()+os.sep+fileDir)\n localFile=file(self.pathStorage.workDir()+os.sep+fileDir+os.sep+fileName,'wb')\n localFile.write(testChars)\n localFile.write(fileBuffer.read())\n localFile.close()", "def commit_test(self, test_case, file_name):\n self.logger.info('found fuzzing target')\n\n case_folder = os.path.join(self.crashes, file_name)\n\n if os.path.exists(case_folder):\n self.logger.error('duplicate case folder')\n sys.exit(1)\n\n os.mkdir(case_folder)\n\n dest = os.path.join(case_folder, 'input')\n with open(dest, 'w+') as f:\n f.write(test_case)", "def test_with_training_file(\n self, mock_get_ai_details, mock_get_ai, mock_get_categories\n ):\n\n mock_get_ai.return_value = self.ai\n mock_get_ai_details.return_value = self.ai_details\n\n mock_get_ai_details.return_value['training_file'] = 'This is my training file'\n\n response = self.client.get(reverse(\n 'studio:edit_bot',\n kwargs={'aiid': self.ai['aiid']}\n ))\n\n self.assertContains(response, 'This is my training file')\n self.assertNotContains(response, 'Simply upload historical conversations '\n 'or conversation samples between your users.')", "def fetch_test_feature_file(context, filename):\n resource_package = \"quantarhei\"\n resource_path = '/'.join(('testing', 'resources', 'behave', filename))\n\n content = pkg_resources.resource_string(resource_package, resource_path)\n\n with open(filename, \"w\") as file:\n file.write(content.decode(\"utf-8\"))\n\n context.output = \"\"", "def test_download(self):\n pass", "def create_test(self, test_case, file_name):\n with open(os.path.join(self.tests, file_name), 'w+') as f:\n f.write(test_case)", "def test_open_write(self, client, remote_temp_dir):\n\n file_path = posixpath.join(remote_temp_dir, \"test2.txt\")\n assert not client.exists(file_path)\n\n with HdfsHook() as hook:\n with hook.open(file_path, \"wb\") as file_:\n file_.write(b\"Test file\\n\")\n\n assert client.exists(file_path)", "def test_save_and_add_another_redirects_to_create(self):\n with open(fixture_file, 'rb') as fp:\n params = {\n \"caption\": \"some file\",\n \"publication\": fp,\n \"_addanother\": \"\"\n }\n response = self.client.post(reverse(\"admin2:files_captionedfile_create\"),\n params)\n self.assertTrue(\n CaptionedFile.objects.filter(caption=\"some file\").exists())\n self.assertRedirects(\n response, reverse(\"admin2:files_captionedfile_create\"))", "def test_master(self, tmpgitdir, branch):\n with tmpgitdir.join('file_a.txt').open('w') as handle:\n handle.write('first file')\n\n subprocess.check_call(['git', 'checkout', '-b', branch])\n subprocess.check_call(['git', 'add', '.'])\n subprocess.check_call(['git', 'commit', '-m', 'first'])\n\n assert git_head_ref_name(tmpgitdir) == branch", "def test_upload_file(self):\n\n uploadFile = os.path.join(testdatadir, \"upload.data\")\n r = gracedb.writeFile(eventId, uploadFile)\n self.assertEqual(r.status, 201) # CREATED\n r_content = r.json()\n link = r_content['permalink']\n\n self.assertEqual(\n open(uploadFile, 'r').read(),\n gracedb.get(gracedb.files(eventId).json()['upload.data']).read()\n )\n\n self.assertEqual(\n open(uploadFile, 'r').read(),\n gracedb.get(link).read()\n )\n\n # Re-upload slightly different file.\n uploadFile2 = os.path.join(testdatadir, \"upload2.data\")\n r = gracedb.writeFile(\n eventId,\n filename=\"upload.data\",\n filecontents=open(uploadFile2, 'r'))\n self.assertEqual(r.status, 201) # CREATED\n r_content = r.json()\n link2 = r_content['permalink']\n\n self.assertEqual(\n open(uploadFile2, 'r').read(),\n gracedb.get(gracedb.files(eventId).json()['upload.data']).read()\n )\n\n self.assertEqual(\n open(uploadFile2, 'r').read(),\n gracedb.get(link2).read()\n )\n\n self.assertNotEqual(link, link2)", "def create_robot_tests(fout):\n processed = {} # dict to avoid circular imports\n\n def get_all_keywords(resource):\n \"\"\"\n Helper function to recursively get keywords from resource files\n \"\"\"\n keywords = []\n resource.populate()\n for res in [i for i in resource.imports.data if isinstance(i, robot.parsing.settings.Resource)]:\n keyword_file = os.path.abspath('{}/{}'.format(res.directory, res.name))\n if keyword_file not in processed:\n res_obj = ResourceFile(keyword_file)\n processed[keyword_file] = res_obj\n keywords += get_all_keywords(res_obj)\n for keyword in resource.keywords:\n print(keyword.name)\n keywords.append(tuple((keyword.source, keyword.name, keyword.args.value if keyword.args.value else [])))\n return keywords\n\n fusion_api_resource = ResourceFile(MAIN_RESOURCE_FILE)\n for keyword in get_all_keywords(fusion_api_resource):\n fout.write('Test {}\\n'.format(keyword[1]))\n fout.write(' [Documentation] {}\\n'.format(keyword[0]))\n fout.write(' {} '.format(keyword[1]))\n for arg in keyword[2]:\n if '=' in arg:\n fout.write(' {} '.format(arg.split('=')[-1]))\n else:\n if arg.startswith('&'):\n fout.write(' testk=testv ')\n else:\n fout.write(' test ')\n fout.write('\\n\\n')", "def test_remote_ref(tmp_path, _clean_remote_schemas_store):\n # Create file\n directory = tmp_path / \"base\"\n directory.mkdir()\n schemas_file = directory / \"original.json\"\n remote_schemas_file = directory / \"remote.json\"\n remote_schemas_file.write_text('{\"Table\": {\"key\": \"value\"}}')\n # Set up remote schemas store\n ref.set_context(path=str(schemas_file))\n schemas = {\"RefTable\": {\"$ref\": \"remote.json#/Table\"}}\n model_factory = mock.MagicMock()\n\n define_all.define_all(model_factory=model_factory, schemas=schemas)", "def create_target(cls, relpath, target):\r\n cls.create_file(cls.build_path(relpath), target, mode='a')", "def test_call_write_to_file(self):\r\n app = ReferenceRepSetPicker(params={'Algorithm': 'first',\r\n 'ChoiceF': first_id})\r\n app(self.tmp_seq_filepath,\r\n self.tmp_otu_filepath,\r\n self.ref_seq_filepath,\r\n result_path=self.result_filepath)\r\n with open(self.result_filepath) as f:\r\n actual = SequenceCollection.from_fasta_records(parse_fasta(f), DNA)\r\n expected = SequenceCollection.from_fasta_records(\r\n parse_fasta(rep_seqs_reference_result_file_exp.split('\\n')), DNA)\r\n # we don't care about order in the results\r\n self.assertEqual(set(actual), set(expected))", "def target_test_file_content():\n return 'initial content'", "def saveInGit(file_content, file_name, report_date):\n file_path = \"/\".join([crs_reports_dir,file_name])\n existed = os.path.isfile(file_path) \n if existed:\n # TODO Check that this specific version of this file isn't already\n # in the comment history\n pass\n with open(file_path, 'w') as f: \n f.write(file_content)\n f.close()\n gitAdd(file_name, crs_reports_dir)\n if existed:\n # TODO Set the commit date to be the CRS release date\n gitCommit(file_name, crs_reports_dir, '%s was updated' % file_name,\n report_date)\n else:\n gitCommit(file_name, crs_reports_dir, 'Added %s' % file_name,\n report_date)\n \n \n \n # 1.) If file_name exists:\n # 1.)overwrite it, \n # 2.) Commit an update to the file_name\n # else:\n # 1.) Create and save a new file\n # 2.) Commit the new file", "def write_test(test_contents, new_test_host_path):\n with open(new_test_host_path, 'w') as f:\n f.write(test_contents)", "def test_DL_export_create_file(self):\n filepath = '1.txt'\n dl = flow_processing_input.DetectorsLocation(2021)\n dl.detectors_location_dict = createDLDataset(1).dataset\n dl.export_to_file(filepath)\n # Check if file was created at filepath\n self.assertTrue(os.path.exists(filepath))\n os.remove(filepath)", "def create_reference_model(self, config, tmp_path_factory: pytest.TempPathFactory, *args):\n config = copy.deepcopy(config) # ensure the reference model is not passed to tests\n\n save_folder = tmp_path_factory.mktemp('{device}-{precision}'.format(**config))\n config.update({'save_interval': '1ep', 'save_folder': str(save_folder), 'save_filename': 'ep{epoch}.pt'})\n\n trainer = Trainer(**config)\n trainer.fit()\n\n self.reference_model = trainer.state.model\n self.reference_folder = save_folder", "def test_create_json(civic, diff):\n test_date = '19980108'\n civic._create_json(diff, test_date)\n file_name = APP_ROOT / 'data' / 'civic' / 'delta' / f'civic_deltas' \\\n f'_{test_date}.json'\n assert file_name.exists()\n os.remove(file_name)\n assert not file_name.exists()", "def _test_path(self, request, artifact_dir):\n self.test_path = artifact_dir / request.module.__name__ / request.node.name\n self.test_path.mkdir(parents=True, exist_ok=True)\n self.export_path = self.test_path / \"sample_processed.nii.gz\"", "def test_download(self):\n test_file = os.path.join(self._system.get_temporary_path(), \"nusoft.test\")\n self._system.download(\"http://www.github.com\", name=test_file)\n self.assertTrue(os.path.exists(test_file))\n os.remove(test_file)", "def test_make_json_advanced(self):\n resources = get_test_resources()\n output, filename = make_json(commit=\"1245\", exclude=[\"tei:note\", \"tei:orig\"], credit=\"PerseusDL\", **resources)\n output = json.loads(output)\n self.assertEqual(\n output[\"text\"][\"0\"][\"0\"][\"0\"], \"Spero me secutum in libellis meis tale temperamen-\",\n \"Text passages should be parsed correctly\"\n )\n self.assertEqual(\n output[\"text\"][\"1\"][\"0\"][\"1\"], \"Et comites longae quaeris habere viae, \",\n \"Text passages should be parsed correctly and note removed\"\n )\n self.assertEqual(\n output[\"text\"][\"1\"][\"1\"][\"3\"], \"Crede mihi, nimium Martia turba sapit. \",\n \"Text passages should be parsed correctly and note removed\"\n )\n self.assertEqual(\n output[\"text\"][\"1\"][\"0\"][\"0\"], \"Qui tecum cupis esse meos ubicumque libellos \",\n \"Text passages should be parsed correctly\"\n )\n self.assertEqual(\n filename, \"textgroup__work__lat.json\",\n \"Filename should be created in a stable and understandable manner\"\n )\n self.assertEqual(\n output[\"original-urn\"], \"urn:cts:latinLit:textgroup.work.version-lat1\",\n \"Original URN should be fed\"\n )\n self.assertEqual(\n output[\"urn\"], \"urn:cts:latinLit:textgroup.work.version-lat1-simple\",\n \"CLTK URN should be suffixed\"\n )\n self.assertEqual(\n output[\"credit\"], \"PerseusDL\",\n \"Credit should be empty by default\"\n )\n self.assertEqual(\n output[\"meta\"], \"book-poem-line\",\n \"meta should reflect the citation scheme\"\n )\n self.assertEqual(\n output[\"author\"], \"textgroup\",\n \"Author name should be the English textgroup name\"\n )\n self.assertEqual(\n output[\"work\"], \"work\",\n \"Work name should be the English work name\"\n )\n self.assertEqual(\n output[\"edition\"], \"description\",\n \"We should have the English description\"\n )\n self.assertEqual(\n output[\"commit\"], \"1245\",\n \"We should have the commit information\"\n )", "def create_response_info(self, response):\n output_path = os.path.join(self.output_folder, self.file_name)\n output_path += \".response.txt\"\n with open(output_path, 'w') as file:\n file.write(json.dumps(response))", "def create_201_response(self, file: str) -> bytes:\n date = datetime.datetime.now(datetime.timezone.utc).strftime(\"%a, %d %b %Y %H:%M:%S GMT\")\n location = \"http://\" + self.ipv4 + \":\" + str(HttpServer.PORT) + file\n header = \"HTTP/1.1 201 Created\" + \"\\r\\nDate: \" + date + \"\\r\\nLocation:\" + location + \"\\r\\n\\r\\n\"\n\n print(header)\n return header.encode(HttpServer.FORMAT)", "def test_file_generation(self):\n # For file generation submission, call generate route for D1 and check results\n post_json = {\"submission_id\": self.generation_submission_id, \"file_type\": \"D1\",\n \"start\": \"01/02/2016\", \"end\": \"02/03/2016\"}\n response = self.app.post_json(\"/v1/generate_file/\", post_json, headers={\"x-session-id\": self.session_id})\n\n self.assertEqual(response.status_code, 200)\n json = response.json\n\n # use_aws is true when the PR unit tests run so the date range specified returns no results.\n # checking is in place for \"failed\" until use_aws is flipped to false\n self.assertIn(json[\"status\"], [\"failed\", \"waiting\", \"finished\"])\n self.assertEqual(json[\"file_type\"], \"D1\")\n self.assertIn(\"url\", json)\n self.assertEqual(json[\"start\"], \"01/02/2016\")\n self.assertEqual(json[\"end\"], \"02/03/2016\")\n\n # this is to accommodate for checking for the \"failed\" status\n self.assertIn(json[\"message\"], [\"\", \"D1 data unavailable for the specified date range\"])\n\n # Then call check generation route for D2, E and F and check results\n post_json = {\"submission_id\": self.generation_submission_id, \"file_type\": \"E\"}\n response = self.app.post_json(\"/v1/check_generation_status/\", post_json,\n headers={\"x-session-id\": self.session_id})\n\n self.assertEqual(response.status_code, 200)\n json = response.json\n self.assertEqual(json[\"status\"], \"finished\")\n self.assertEqual(json[\"file_type\"], \"E\")\n self.assertEqual(json[\"url\"], \"#\")\n self.assertEqual(json[\"message\"], \"\")\n\n post_json = {\"submission_id\": self.generation_submission_id, \"file_type\": \"D2\"}\n response = self.app.post_json(\"/v1/check_generation_status/\", post_json,\n headers={\"x-session-id\": self.session_id})\n\n self.assertEqual(response.status_code, 200)\n json = response.json\n self.assertEqual(json[\"status\"], \"failed\")\n self.assertEqual(json[\"file_type\"], \"D2\")\n self.assertEqual(json[\"url\"], \"#\")\n self.assertEqual(json[\"message\"], \"Generated file had file-level errors\")\n\n post_json = {\"submission_id\": self.generation_submission_id, \"file_type\": \"F\"}\n response = self.app.post_json(\"/v1/check_generation_status/\", post_json,\n headers={\"x-session-id\": self.session_id})\n\n self.assertEqual(response.status_code, 200)\n json = response.json\n self.assertEqual(json[\"status\"], \"failed\")\n self.assertEqual(json[\"file_type\"], \"F\")\n self.assertEqual(json[\"url\"], \"#\")\n self.assertEqual(json[\"message\"], \"File was invalid\")\n\n # Test permission error\n self.login_user()\n post_json = {\"submission_id\": self.generation_submission_id, \"file_type\": \"D1\",\n \"start\": \"01/02/2016\", \"end\": \"02/03/2016\"}\n response = self.app.post_json(\"/v1/generate_file/\", post_json,\n headers={\"x-session-id\": self.session_id}, expect_errors=True)\n\n self.assertEqual(response.status_code, 403)\n json = response.json\n self.assertEqual(json[\"status\"], \"failed\")\n self.assertEqual(json[\"file_type\"], \"D1\")\n self.assertEqual(json[\"url\"], \"#\")\n self.assertEqual(json[\"start\"], \"\")\n self.assertEqual(json[\"end\"], \"\")\n self.assertEqual(json[\"message\"], \"User does not have permission to view that submission\")", "def test_invalid_pull_request(self):\n with tempfile.TemporaryDirectory() as tmp_dir:\n out_path = os.path.join(tmp_dir, 'out')\n os.mkdir(out_path)\n self.assertTrue(\n cifuzz.build_fuzzers(EXAMPLE_PROJECT,\n 'oss-fuzz',\n tmp_dir,\n pr_ref='ref-1/merge'))", "def setUp(self):\n\n self._tempdir = mkdtemp()\n self._sample_token = \"FVQ__sample_token__QYzzRracgjH\"\n self._sample_url = 'https://api.badgr.io/v2/badgeclasses'\n self.sample_token_file =\\\n os.path.join(self._tempdir, \"sample_token_file.json\")\n\n with open(self.sample_token_file, 'w',\n encoding=\"utf8\", errors=\"surrogateescape\") as stf_h:\n stf_h.write(json.dumps(\n {\"access_token\": self._sample_token,\n \"token_type\": \"Bearer\",\n \"refresh_token\": \"vK__sample_refresh_token__AlPZ\"}\n ))", "def test_multiple_branches(self, tmpgitdir):\n with tmpgitdir.join('file_a.txt').open('w') as handle:\n handle.write('first file')\n\n subprocess.check_call(['git', 'add', '.'])\n subprocess.check_call(['git', 'commit', '-m', 'first'])\n\n subprocess.check_call(['git', 'checkout', '-b', 'testbranch'])\n\n with tmpgitdir.join('file_b.txt').open('w') as handle:\n handle.write('second file')\n\n subprocess.check_call(['git', 'add', '.'])\n subprocess.check_call(['git', 'commit', '-m', 'second'])\n\n assert git_head_ref_name(tmpgitdir) == 'testbranch'", "def _fixture(self):\n fdir = os.path.join(FIXTURES_DIR, 'errata.devel.redhat.com/')\n filename = self._url_with_params.replace(\n 'https://errata.devel.redhat.com/', fdir)\n # If we need to represent this API endpoint as both a directory and a\n # file, check for a \".body\" file.\n if os.path.isdir(filename):\n return filename + '.body'\n return filename", "def create_file(path):\n command = ['touch', TEST_FILE]\n file_operation(path, command)", "def test_get_project_file(self):\n query_string = [('id', 'id_example'),\n ('path', 'path_example')]\n headers = { \n 'Accept': 'application/json',\n 'Authorization': 'Bearer special-key',\n }\n response = self.client.open(\n '/api/v1/project-files/download',\n method='GET',\n headers=headers,\n query_string=query_string)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_download_to_file_append(req, tmpdir):\n req.get(ENTREZ_URL, text='This works.\\n')\n outdir = tmpdir.mkdir('outdir')\n filename = outdir.join('foo.txt')\n expected = outdir.join('foo.txt')\n config = core.Config(molecule='nucleotide', verbose=False, out='foo.txt')\n\n core.download_to_file('FOO', config, filename=str(filename), append=False)\n core.download_to_file('BAR', config, filename=str(filename), append=True)\n core.download_to_file('BAZ', config, filename=str(filename), append=True)\n\n assert expected.check()\n assert len(expected.readlines()) == 3", "def test_to_file(self):\n fd, fp = mkstemp()\n close(fd)\n st = SampleTemplate.create(self.metadata, self.new_study)\n st.to_file(fp)\n self._clean_up_files.append(fp)\n with open(fp, 'U') as f:\n obs = f.read()\n self.assertEqual(obs, EXP_SAMPLE_TEMPLATE)\n\n fd, fp = mkstemp()\n close(fd)\n st.to_file(fp, {'2.Sample1', '2.Sample3'})\n self._clean_up_files.append(fp)\n\n with open(fp, 'U') as f:\n obs = f.read()\n self.assertEqual(obs, EXP_SAMPLE_TEMPLATE_FEWER_SAMPLES)", "def test_execution_plan_type_downloable_git(self, mock_makedir, mock_path,\n mock_git):\n mock_makedir.return_value = None\n mock_path.return_value = True\n mock_git.clone.return_value = None\n template = self.get_template_downloable_git()\n files = files_manager.FilesManager(self.get_template_downloable(5))\n files._download_url_file(template.Files['mycoockbook'], \"script\")", "def create_reference2(md5, pos, ref_md5, ref_pos):\n ua = Upload.objects.filter(md5=md5).first()\n ub = Upload.objects.filter(md5=ref_md5).first()\n if not ua or not ub:\n abort(404)\n # Create the reference\n try:\n r = Reference(upload=ua, raw_pos=pos, ref_upload=ub, raw_ref_pos=ref_pos)\n r.ref_thing = Thing.objects.filter(files=r.ref_upload).first()\n r.save()\n return jsonify({'success': True})\n except:\n return jsonify({'failed': True})", "def create_submission_file(\n json_out_file, challenge, submission_url, model_name, model_description, nyu_data_only,\n participants=None, paper_url=None, code_url=None\n):\n\n if challenge not in {'singlecoil', 'multicoil'}:\n raise ValueError(f'Challenge should be singlecoil or multicoil, not {challenge}')\n\n phase_name = f'{challenge}_leaderboard'\n submission_data = dict(\n recon_zip_url=submission_url,\n model_name=model_name,\n model_description=model_description,\n nyudata_only=nyu_data_only,\n participants=participants,\n paper_url=paper_url,\n code_url=code_url\n )\n submission_data = dict(result=[{\n phase_name: submission_data\n }])\n\n with open(json_out_file, 'w') as json_file:\n json.dump(submission_data, json_file, indent=2)", "def test_create_get(self):\n self.shell.onecmd(\"create %s/one 'hello'\" % (self.tests_path))\n self.shell.onecmd(\"get %s/one\" % (self.tests_path))\n self.assertEqual(\"hello\\n\", self.output.getvalue())", "def __gitBisectCreateReplay(self):\n self.vcs.gitBisectCreateReplayFile(self.project.getProjectPath())", "def test_download1(self):\n pass", "def test_version_is_written_into_file_info_file(self):\n # This functionality is only provided by the msvc compiler.\n if not (self.is_visual_studio_config() and self.is_shared_libraries_config()):\n return\n\n self.generate_project()\n self.build_target(simpleonelibcpftestprojectfixture.MYLIB_TESTS_TARGET)\n\n # VERIFY\n package = 'MyLib'\n packageType = 'LIB'\n owner = 'Knitschi'\n version = self.get_package_version(package)\n binBaseDir = self.locations.get_full_path_binary_output_folder(testprojectfixture.PARENT_CONFIG, testprojectfixture.COMPILER_CONFIG)\n \n libFile = binBaseDir / self.get_package_shared_lib_path(package, packageType, version)\n shortLibFile = self.get_shared_lib_short_name(package, packageType, version)\n\n print(libFile)\n\n # Read the properties from the binary file.\n props = testprojectfixture.get_file_properties(str(libFile))['StringFileInfo']\n\n # Compare the values\n self.assertEqual(props['CompanyName'], owner)\n self.assertEqual(props['FileDescription'], 'A C++ library used for testing the CPF')\n self.assertEqual(props['FileVersion'], '{0}'.format(version))\n self.assertEqual(props['InternalName'], 'MyLib')\n self.assertEqual(props['LegalCopyright'], 'Copyright {0} {1}'.format(datetime.datetime.now().year, owner) )\n self.assertEqual(props['OriginalFilename'], str(shortLibFile))\n self.assertEqual(props['ProductName'], 'MyLib')\n self.assertEqual(props['ProductVersion'], '{0}'.format(version))", "def test_fs_outputs(self,\n mocker,\n repo,\n github_repo_id,\n gitlab_repo_id,\n exp_display,\n exp_command,\n ):\n mocker.patch(\n 'dockci.models.project.CONFIG',\n namedtuple('Config', ['gitlab_base_url'])(\n 'http://localhost:8000'\n ),\n )\n\n project = Project(\n repo=repo,\n github_repo_id=github_repo_id,\n gitlab_repo_id=gitlab_repo_id,\n )\n\n service = None\n if github_repo_id is not None:\n service = 'github'\n elif gitlab_repo_id is not None:\n service = 'gitlab'\n\n if service is not None:\n project.external_auth_token = OAuthToken(\n key='authkey',\n secret='authsecret',\n service=service,\n )\n\n assert project.display_repo == exp_display\n assert project.command_repo == exp_command", "def test_apiLinking(self):\n version = \"1.2.3\"\n input, output = self.getArbitraryLoreInputAndOutput(version)\n self.howtoDir.child(\"one.xhtml\").setContent(input)\n\n self.builder.build(version, self.howtoDir, self.howtoDir,\n self.templateFile, \"scheme:apilinks/%s.ext\")\n out = self.howtoDir.child('one.html')\n self.assertIn(\n '<a href=\"scheme:apilinks/foobar.ext\" title=\"foobar\">foobar</a>',\n out.getContent())", "def make_testcase(testcase: Dict, dir_path: Text = None) -> Text:\n # ensure compatibility with testcase format v2\n testcase = ensure_testcase_v3(testcase)\n\n # validate testcase format\n load_testcase(testcase)\n\n testcase_abs_path = __ensure_absolute(testcase[\"config\"][\"path\"])\n logger.info(f\"start to make testcase: {testcase_abs_path}\")\n\n testcase_python_abs_path, testcase_cls_name = convert_testcase_path(\n testcase_abs_path\n )\n if dir_path:\n testcase_python_abs_path = os.path.join(\n dir_path, os.path.basename(testcase_python_abs_path)\n )\n\n global pytest_files_made_cache_mapping\n if testcase_python_abs_path in pytest_files_made_cache_mapping:\n return testcase_python_abs_path\n\n config = testcase[\"config\"]\n config[\"path\"] = convert_relative_project_root_dir(testcase_python_abs_path)\n config[\"variables\"] = convert_variables(\n config.get(\"variables\", {}), testcase_abs_path\n )\n\n # prepare reference testcase\n imports_list = []\n teststeps = testcase[\"teststeps\"]\n for teststep in teststeps:\n if not teststep.get(\"testcase\"):\n continue\n\n # make ref testcase pytest file\n ref_testcase_path = __ensure_absolute(teststep[\"testcase\"])\n test_content = load_test_file(ref_testcase_path)\n\n if not isinstance(test_content, Dict):\n raise exceptions.TestCaseFormatError(f\"Invalid teststep: {teststep}\")\n\n # api in v2 format, convert to v3 testcase\n if \"request\" in test_content and \"name\" in test_content:\n test_content = ensure_testcase_v3_api(test_content)\n\n test_content.setdefault(\"config\", {})[\"path\"] = ref_testcase_path\n ref_testcase_python_abs_path = make_testcase(test_content)\n\n # override testcase export\n ref_testcase_export: List = test_content[\"config\"].get(\"export\", [])\n if ref_testcase_export:\n step_export: List = teststep.setdefault(\"export\", [])\n step_export.extend(ref_testcase_export)\n teststep[\"export\"] = list(set(step_export))\n\n # prepare ref testcase class name\n ref_testcase_cls_name = pytest_files_made_cache_mapping[\n ref_testcase_python_abs_path\n ]\n teststep[\"testcase\"] = ref_testcase_cls_name\n\n # prepare import ref testcase\n ref_testcase_python_relative_path = convert_relative_project_root_dir(\n ref_testcase_python_abs_path\n )\n ref_module_name, _ = os.path.splitext(ref_testcase_python_relative_path)\n ref_module_name = ref_module_name.replace(os.sep, \".\")\n import_expr = f\"from {ref_module_name} import TestCase{ref_testcase_cls_name} as {ref_testcase_cls_name}\"\n if import_expr not in imports_list:\n imports_list.append(import_expr)\n\n testcase_path = convert_relative_project_root_dir(testcase_abs_path)\n # current file compared to ProjectRootDir\n diff_levels = len(testcase_path.split(os.sep))\n\n data = {\n \"version\": __version__,\n \"testcase_path\": testcase_path,\n \"diff_levels\": diff_levels,\n \"class_name\": f\"TestCase{testcase_cls_name}\",\n \"imports_list\": imports_list,\n \"config_chain_style\": make_config_chain_style(config),\n \"parameters\": config.get(\"parameters\"),\n \"teststeps_chain_style\": [\n make_teststep_chain_style(step) for step in teststeps\n ],\n }\n content = __TEMPLATE__.render(data)\n\n # ensure new file's directory exists\n dir_path = os.path.dirname(testcase_python_abs_path)\n if not os.path.exists(dir_path):\n os.makedirs(dir_path)\n\n with open(testcase_python_abs_path, \"w\", encoding=\"utf-8\") as f:\n f.write(content)\n\n pytest_files_made_cache_mapping[testcase_python_abs_path] = testcase_cls_name\n __ensure_testcase_module(testcase_python_abs_path)\n\n logger.info(f\"generated testcase: {testcase_python_abs_path}\")\n\n return testcase_python_abs_path", "def test_create(client):\n rv = create(client, reponame='Michael', url='https://github.com/Michael')\n assert json.loads(rv.data.decode())['code'] == 0\n assert json.loads(rv.data.decode())['owner'] == 'Michael'\n assert json.loads(rv.data.decode())['url'] == 'https://github.com/Michael'", "def test_get_file_object(self):\n pass", "def test_download_build_log_file(self, mock_serve, mock_test, mock_os):\n from mod_test.controllers import (TestNotFoundException,\n download_build_log_file)\n\n response = download_build_log_file('1')\n\n self.assertEqual(response, mock_serve())\n mock_test.query.filter.assert_called_once()\n mock_os.path.isfile.assert_called_once()", "def save_response(response, file_name, path='~/tmp/fcb-analyzer'):\n \n path = ensure_path(path)\n f = open(path + '/' + file_name, 'w')\n f.write(response.text)", "def tests_ti_file_create(self):\n indicator_data = {\n 'md5': uuid.uuid4().hex.upper(),\n 'owner': self.owner,\n 'rating': randint(0, 5),\n }\n ti = self.ti.file(**indicator_data)\n r = ti.create()\n\n # assert response\n assert r.status_code == 201\n\n # retrieve indicator for asserts\n ti = self.ti.file(**indicator_data)\n r = ti.single()\n response_data = r.json()\n ti_data = response_data.get('data', {}).get(ti.api_entity)\n\n # validate response data\n assert r.status_code == 200\n assert response_data.get('status') == 'Success'\n\n # validate ti data\n assert ti_data.get('confidence') == indicator_data.get('confidence')\n assert ti_data.get(ti.api_entity) == indicator_data.get(ti.api_entity)\n assert ti_data.get('rating') == indicator_data.get('rating')\n\n # cleanup indicator\n r = ti.delete()\n assert r.status_code == 200", "def store_module_result(data):\n gh, repo, branch = connect_to_github()\n remote_path = \"data/%s/%d.data\" % (trojan_id, random.randint(1000, 100000))\n repo.create_file(remote_path, \"Commit message\", base64.b64encode(data))\n return", "def testCreateSymlinkOutput(self): # pylint: disable=no-self-use\n gcs_client = mock.MagicMock(spec=storage.Client)\n blob = prow.create_symlink(gcs_client, \"gs://bucket/symlink\",\n \"gs://bucket/output\")\n\n blob.upload_from_string.assert_called_once_with(\"gs://bucket/output\")", "def test_dump_and_report_workflow(self):\n project = \"\"\"file://result <- file://A\n echo result > result\n error\n \"\"\"\n rcode, output = run_tuttle_file(project)\n assert rcode == 2\n assert path.isfile(path.join(\".tuttle\", \"last_workflow.pickle\"))", "def test_create_files_with_version(self):\n opts = mock.Mock()\n opts.repo = self.repo\n opts.create_version_file = True\n opts.source = 'src'\n opts.version = '0.0.1'\n opts.org = \"ORG\"\n opts.version_file = None\n opts.test_mode = 'False'\n opts.desc = \"DESCRIPTION\"\n opts.templates = ['include steve/*']\n opts.history_file = 'HISTORY.md'\n opts.package = 'unittests'\n opts.requirements = 'requirements.txt'\n opts.pypi_package_name = None\n opts.develop = 'develop'\n opts.python = None\n opts.gitignore_url = \"GIT_IGNORE_URL\"\n opts.add_gitignore = False\n opts.test_requirements = 'test-requirements.txt'\n version = os.path.join(self.repo, 'src', 'unittests', '__init__.py')\n os.system('rm -f {}'.format(version))\n create_files(opts)\n\n dir_list = os.listdir(self.repo)\n self.failUnless('cirrus.conf' in dir_list)\n self.failUnless('HISTORY.md' in dir_list)\n self.failUnless('MANIFEST.in' in dir_list)\n self.failUnless('setup.py' in dir_list)\n\n cirrus_conf = os.path.join(self.repo, 'cirrus.conf')\n config = ConfigParser.RawConfigParser()\n config.read(cirrus_conf)\n self.assertEqual(config.get('package', 'name'), opts.package)\n self.assertEqual(config.get('package', 'version'), opts.version)\n\n history = os.path.join(self.repo, 'HISTORY.md')\n with open(history, 'r') as handle:\n self.failUnless('CIRRUS_HISTORY_SENTINEL' in handle.read())\n\n manifest = os.path.join(self.repo, 'MANIFEST.in')\n with open(manifest, 'r') as handle:\n content = handle.read()\n self.failUnless('include requirements.txt' in content)\n self.failUnless('include cirrus.conf' in content)\n self.failUnless('include steve/*' in content)\n\n version = os.path.join(self.repo, 'src', 'unittests', '__init__.py')\n with open(version, 'r') as handle:\n self.failUnless(opts.version in handle.read())", "def test_valid_pull_request(self):\n with tempfile.TemporaryDirectory() as tmp_dir:\n out_path = os.path.join(tmp_dir, 'out')\n os.mkdir(out_path)\n self.assertTrue(\n cifuzz.build_fuzzers(EXAMPLE_PROJECT,\n 'oss-fuzz',\n tmp_dir,\n pr_ref='refs/pull/1757/merge'))\n self.assertTrue(\n os.path.exists(os.path.join(out_path, EXAMPLE_BUILD_FUZZER)))", "def test_prereleases(tmp_path_factory: pytest.TempPathFactory) -> None:\n src, dst = map(tmp_path_factory.mktemp, (\"src\", \"dst\"))\n with local.cwd(src):\n # Build template in v1.0.0\n build_file_tree(\n {\n \"version.txt\": \"v1.0.0\",\n \"[[ _copier_conf.answers_file ]].jinja\": \"[[_copier_answers|to_nice_yaml]]\",\n \"copier.yaml\": (\n f\"\"\"\\\n _envops: {BRACKET_ENVOPS_JSON}\n _migrations:\n - version: v1.9\n before:\n - [python, -c, \"import pathlib; pathlib.Path('v1.9').touch()\"]\n - version: v2.dev0\n before:\n - [python, -c, \"import pathlib; pathlib.Path('v2.dev0').touch()\"]\n - version: v2.dev2\n before:\n - [python, -c, \"import pathlib; pathlib.Path('v2.dev2').touch()\"]\n - version: v2.a1\n before:\n - [python, -c, \"import pathlib; pathlib.Path('v2.a1').touch()\"]\n - version: v2.a2\n before:\n - [python, -c, \"import pathlib; pathlib.Path('v2.a2').touch()\"]\n \"\"\"\n ),\n }\n )\n git(\"init\")\n git(\"add\", \".\")\n git(\"commit\", \"-mv1\")\n git(\"tag\", \"v1.0.0\")\n # Evolve template to v2.0.0.dev1\n build_file_tree({\"version.txt\": \"v2.0.0.dev1\"})\n git(\"commit\", \"-amv2dev1\")\n git(\"tag\", \"v2.0.0.dev1\")\n # Evolve template to v2.0.0.alpha1\n build_file_tree({\"version.txt\": \"v2.0.0.alpha1\"})\n git(\"commit\", \"-amv2a1\")\n git(\"tag\", \"v2.0.0.alpha1\")\n # Copying with use_prereleases=False copies v1\n run_copy(src_path=str(src), dst_path=dst, defaults=True, overwrite=True)\n answers = yaml.safe_load((dst / \".copier-answers.yml\").read_text())\n assert answers[\"_commit\"] == \"v1.0.0\"\n assert (dst / \"version.txt\").read_text() == \"v1.0.0\"\n assert not (dst / \"v1.9\").exists()\n assert not (dst / \"v2.dev0\").exists()\n assert not (dst / \"v2.dev2\").exists()\n assert not (dst / \"v2.a1\").exists()\n assert not (dst / \"v2.a2\").exists()\n with local.cwd(dst):\n # Commit subproject\n git(\"init\")\n git(\"add\", \".\")\n git(\"commit\", \"-mv1\")\n # Update it without prereleases; nothing changes\n run_update(defaults=True, overwrite=True)\n assert not git(\"status\", \"--porcelain\")\n assert not (dst / \"v1.9\").exists()\n assert not (dst / \"v2.dev0\").exists()\n assert not (dst / \"v2.dev2\").exists()\n assert not (dst / \"v2.a1\").exists()\n assert not (dst / \"v2.a2\").exists()\n # Update it with prereleases\n run_update(\n dst_path=dst, defaults=True, overwrite=True, use_prereleases=True, unsafe=True\n )\n answers = yaml.safe_load((dst / \".copier-answers.yml\").read_text())\n assert answers[\"_commit\"] == \"v2.0.0.alpha1\"\n assert (dst / \"version.txt\").read_text() == \"v2.0.0.alpha1\"\n assert (dst / \"v1.9\").exists()\n assert (dst / \"v2.dev0\").exists()\n assert (dst / \"v2.dev2\").exists()\n assert (dst / \"v2.a1\").exists()\n assert not (dst / \"v2.a2\").exists()\n # It should fail if downgrading\n with pytest.raises(UserMessageError):\n run_update(dst_path=dst, defaults=True, overwrite=True)", "def test_repo_to_json(self):\n if path.exists(\"report.csv\"):\n remove(\"report.csv\")\n\n with open(\"test/fixtures/results.json\", 'r') as data:\n data = json.load(data)\n client = CSVReport()\n client.process(data)\n\n self.assertTrue(path.exists(\"report.csv\"))", "async def test_create_and_forget_post_on_reference(fixture_account):\n _ = await create_and_forget_post(fixture_account, REFERENCE_NODE, TARGET_NODE)", "def test_download2(self):\n pass", "def build_url(cls, config, namespace, name):\n return \"hxxp://mock.repo.url/\" + namespace + \"/\" + name + \".git\"", "def test_create_files(self):\n opts = mock.Mock()\n opts.repo = self.repo\n opts.source = 'src'\n opts.version = '0.0.1'\n opts.version_file = None\n opts.test_mode = False\n opts.templates = ['include steve/*']\n opts.history_file = 'HISTORY.md'\n opts.package = 'unittests'\n opts.desc = \"DESCRIPTION\"\n opts.org = \"ORG\"\n opts.develop = 'develop'\n opts.requirements = 'requirements.txt'\n opts.test_requirements = 'test-requirements.txt'\n opts.pypi_package_name = None\n opts.python = None\n opts.create_version_file = False\n opts.gitignore_url = \"GIT_IGNORE_URL\"\n opts.add_gitignore = False\n create_files(opts)\n\n dir_list = os.listdir(self.repo)\n self.failUnless('cirrus.conf' in dir_list)\n self.failUnless('HISTORY.md' in dir_list)\n self.failUnless('MANIFEST.in' in dir_list)\n self.failUnless('setup.py' in dir_list)\n\n cirrus_conf = os.path.join(self.repo, 'cirrus.conf')\n config = ConfigParser.RawConfigParser()\n config.read(cirrus_conf)\n self.assertEqual(config.get('package', 'name'), opts.package)\n self.assertEqual(config.get('package', 'version'), opts.version)\n\n history = os.path.join(self.repo, 'HISTORY.md')\n with open(history, 'r') as handle:\n self.failUnless('CIRRUS_HISTORY_SENTINEL' in handle.read())\n\n manifest = os.path.join(self.repo, 'MANIFEST.in')\n with open(manifest, 'r') as handle:\n content = handle.read()\n self.failUnless('include requirements.txt' in content)\n self.failUnless('include cirrus.conf' in content)\n self.failUnless('include steve/*' in content)\n\n version = os.path.join(self.repo, 'src', 'unittests', '__init__.py')\n with open(version, 'r') as handle:\n self.failUnless(opts.version in handle.read())", "def create_IMPACT505_b37_reference_files(apps, schema_editor):\n File = apps.get_model(\"file_system\", \"File\")\n FileMetadata = apps.get_model(\"file_system\", \"FileMetadata\")\n FileGroup = apps.get_model(\"file_system\", \"FileGroup\")\n FileType = apps.get_model(\"file_system\", \"FileType\")\n try:\n file_group = FileGroup.objects.get(name=\"Reference Files\")\n txt = FileType.objects.get(name=\"txt\")\n ilist = FileType.objects.get(name=\"ilist\")\n interval_list = FileType.objects.get(name=\"interval_list\")\n except Exception:\n print(\"No file group or file_types defined\")\n return\n try:\n\n file1 = File.objects.create(\n path=\"/juno/work/ci/resources/genomic_resources/targets/IMPACT505/b37/IMPACT505_FP_tiling_genotypes.txt\",\n file_name=\"IMPACT505_FP_tiling_genotypes.txt\",\n file_group=file_group,\n file_type=txt,\n size=0,\n )\n file_metadata_1 = FileMetadata.objects.create(\n file=file1, version=0, metadata={\"assay\": \"IMPACT505_b37\", \"data_type\": \"FP_genotypes\"}\n )\n print(\"File created\")\n except Exception as e:\n print(\"Fail to create file\")\n print(str(e))\n try:\n file2 = File.objects.create(\n path=\"/juno/work/ci/resources/genomic_resources/targets/IMPACT505/b37/IMPACT505_b37_targets.ilist\",\n file_name=\"IMPACT505_b37_targets.ilist\",\n file_group=file_group,\n file_type=ilist,\n size=0,\n )\n file_metadata_2 = FileMetadata.objects.create(\n file=file2, version=0, metadata={\"assay\": \"IMPACT505_b37\", \"data_type\": \"targets_list\"}\n )\n print(\"File created\")\n except Exception as e:\n print(\"Fail to create file\")\n print(str(e))\n try:\n file3 = File.objects.create(\n path=\"/juno/work/ci/resources/genomic_resources/targets/IMPACT505/b37/IMPACT505_b37_baits.ilist\",\n file_name=\"IMPACT505_b37_baits.ilist\",\n file_group=file_group,\n file_type=ilist,\n size=0,\n )\n file_metadata_3 = FileMetadata.objects.create(\n file=file3, version=0, metadata={\"assay\": \"IMPACT505_b37\", \"data_type\": \"baits_list\"}\n )\n print(\"File created\")\n except Exception as e:\n print(\"Fail to create file\")\n print(str(e))\n try:\n file4 = File.objects.create(\n path=\"/juno/work/ci/resources/genomic_resources/targets/IMPACT505/b37/IMPACT505_FP_tiling_intervals.intervals\",\n file_name=\"IMPACT505_FP_tiling_intervals.intervals\",\n file_group=file_group,\n file_type=interval_list,\n size=0,\n )\n file_metadata_4 = FileMetadata.objects.create(\n file=file4, version=0, metadata={\"assay\": \"IMPACT505_b37\", \"data_type\": \"FP_intervals\"}\n )\n print(\"File created\")\n except Exception as e:\n print(\"Fail to create file\")\n print(str(e))", "def test_md(tmp_path) -> Path:\n yield Path(tmp_path)/\"test.md\"", "def generate_test_txt(name, path):\n with open(path + '/test.txt', 'a') as file:\n file.write('data/test/' + name + '\\n')", "def setUp(self):\n\n try:\n os.mkdir(self.pipeline_folder)\n except FileExistsError:\n pass\n\n with open(self.pipeline_spec_file, 'w+') as stream:\n json.dump(self.pipeline_spec, stream)\n\n with open(self.pipeline_source_file, 'w+') as stream:\n json.dump(self.source_description, stream)\n\n self.source = Source(folder=self.pipeline_folder)", "def create_ref(self, commit_id=None):\n pass", "def makeTestFile(text):\n f = tempfile.NamedTemporaryFile()\n f.write(text)\n f.flush()\n return f", "def test_create_files_with_python(self):\n opts = mock.Mock()\n opts.repo = self.repo\n opts.create_version_file = True\n opts.source = 'src'\n opts.version = '0.0.1'\n opts.version_file = None\n opts.org = \"ORG\"\n opts.desc = \"DESCRIPTION\"\n opts.templates = []\n opts.test_mode = False\n opts.history_file = 'HISTORY.md'\n opts.package = 'unittests'\n opts.develop = 'develop'\n opts.requirements = 'requirements.txt'\n opts.pypi_package_name = 'pypi.package.unittest'\n opts.python = 'python3'\n opts.gitignore_url = \"GIT_IGNORE_URL\"\n opts.add_gitignore = False\n opts.test_requirements = 'test-requirements.txt'\n version = os.path.join(self.repo, 'src', 'unittests', '__init__.py')\n os.system('rm -f {}'.format(version))\n create_files(opts)\n\n dir_list = os.listdir(self.repo)\n self.failUnless('cirrus.conf' in dir_list)\n self.failUnless('HISTORY.md' in dir_list)\n self.failUnless('MANIFEST.in' in dir_list)\n self.failUnless('setup.py' in dir_list)\n\n cirrus_conf = os.path.join(self.repo, 'cirrus.conf')\n config = ConfigParser.RawConfigParser()\n config.read(cirrus_conf)\n self.assertEqual(config.get('package', 'name'), opts.pypi_package_name)\n self.assertEqual(config.get('package', 'version'), opts.version)\n self.assertEqual(config.get('build', 'python'), 'python3')", "def __write_file(path, link_name, context):\n file_name = ''\n for strng in ['/', 'http:']:\n if not file_name:\n file_name = link_name.replace(strng, '')\n else:\n file_name = file_name.replace(strng, '')\n\n full_path = os.path.join(path)\n if not os.path.exists(full_path):\n os.makedirs(full_path)\n open(full_path + '/' + file_name, 'w').write(context.encode('utf-8'))\n logging.info('File: %s Created', full_path + '/' + file_name)", "def test_successful(self, mock_create, mock_msg_mgr):\n\n json_data = {\n \"input\" : {\n 'version': '6',\n 'files': {'input_a': [self.source_file.id]},\n 'json': {}\n },\n \"job_type_id\" : self.job_type1.pk\n }\n\n url = '/%s/jobs/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.content)\n\n result = json.loads(response.content)\n\n #Response should be new v6 job detail response\n self.assertEqual(result['execution'], None)\n self.assertEqual(result['max_tries'], 3)\n self.assertTrue('/%s/jobs/' % self.api in response['location'])\n mock_create.assert_called_once()", "def test_tmp_file_content(self, mocker):\n payload = dict(id=\"B\", data={\"some\": \"data\"}, ai_service='A')\n headers = {'x-rh-identity': 'ABC'}\n\n with mocker.mock_module.patch.object(os, 'remove') as mock:\n self.client.post(self.url, json=payload, headers=headers)\n filename = mock.call_args[0][0]\n\n with tarfile.open(filename, 'r:gz') as tar:\n content = tar.extractfile('A_B.json').read()\n assert content == b'{\"some\": \"data\"}'\n\n os.remove(filename)", "def test_create_files_with_gitignore(self, mock_get):\n\n mock_resp = mock.Mock()\n mock_resp.raise_for_status = mock.Mock()\n mock_resp.content = \"IGNORE ME\\n\"\n mock_get.return_value = mock_resp\n\n opts = mock.Mock()\n opts.repo = self.repo\n opts.create_version_file = True\n opts.source = 'src'\n opts.version = '0.0.1'\n opts.version_file = None\n opts.org = \"ORG\"\n opts.desc = \"DESCRIPTION\"\n opts.templates = []\n opts.test_mode = False\n opts.history_file = 'HISTORY.md'\n opts.package = 'unittests'\n opts.develop = 'develop'\n opts.requirements = 'requirements.txt'\n opts.pypi_package_name = 'pypi.package.unittest'\n opts.python = 'python3'\n opts.gitignore_url = \"GIT_IGNORE_URL\"\n opts.add_gitignore = True\n opts.test_requirements = 'test-requirements.txt'\n version = os.path.join(self.repo, 'src', 'unittests', '__init__.py')\n os.system('rm -f {}'.format(version))\n create_files(opts)\n\n dir_list = os.listdir(self.repo)\n self.failUnless('cirrus.conf' in dir_list)\n self.failUnless('HISTORY.md' in dir_list)\n self.failUnless('MANIFEST.in' in dir_list)\n self.failUnless('setup.py' in dir_list)\n self.failUnless('.gitignore' in dir_list)\n\n gitignore = os.path.join(self.repo, '.gitignore')\n with open(gitignore, 'r') as handle:\n content = handle.read()\n self.assertEqual(content.strip(), \"IGNORE ME\")", "def get_test_file(self, test_name: str) -> str:\n test_files_location = self._resource_config.test_files_location\n\n if not test_files_location:\n raise BPRunnerException(self.__class__.__name__, \"Test Files Location attribute is not defined\")\n if not os.path.exists(test_files_location) or os.access(test_files_location, os.W_OK) is not True:\n raise BPRunnerException(\n self.__class__.__name__,\n 'The location of the test files \"{}\" does not exist or is not writable'.format(test_files_location),\n )\n reservation_files = os.path.join(test_files_location, self.reservation_id)\n if not os.path.exists(reservation_files):\n os.makedirs(reservation_files)\n test_file_path = os.path.join(\n reservation_files,\n test_name + \".bpt\",\n )\n test_file_content = self._download_test_file_flow.download_test_file(test_name)\n with open(test_file_path, \"w\") as f:\n f.write(test_file_content.decode(\"utf-8\"))\n return test_file_path", "def target_repo(\n vcs, vcs_commands, repo_base_dir, target_repo_name, target_repo_branch, target_test_file_name,\n target_test_file_content):\n path = repo_base_dir.join(target_repo_name)\n os.makedirs(path.strpath)\n subprocess.check_call(vcs_commands['init'] + [path.strpath])\n if 'config' in vcs_commands:\n for commands in vcs_commands['config']:\n subprocess.check_call(commands, cwd=path.strpath)\n if vcs == 'bzr':\n path = path.join(target_repo_branch)\n subprocess.check_call(vcs_commands['init-branch'] + [path.strpath])\n path.join(target_test_file_name).open('w').write(target_test_file_content)\n subprocess.check_call(vcs_commands['add'], cwd=path.strpath)\n subprocess.check_call(vcs_commands['commit'], cwd=path.strpath)\n if vcs == 'git':\n subprocess.check_call(vcs_commands['bare'], cwd=path.strpath)\n return path", "def _create_file(content=''):\r\n sjson_file = tempfile.NamedTemporaryFile(prefix=\"subs_\", suffix=\".srt.sjson\")\r\n sjson_file.content_type = 'application/json'\r\n sjson_file.write(textwrap.dedent(content))\r\n sjson_file.seek(0)\r\n return sjson_file", "def test_05_resource_create(self, Mock):\r\n pkg_request = FakeRequest(json.dumps(self.pkg_json_found), 200,\r\n {'content-type': 'application/json'})\r\n\r\n rsrc_request = FakeRequest(json.dumps(\r\n self.pkg_json_found['result']['resources'][0]),\r\n 200,\r\n {'content-type': 'text/html'})\r\n Mock.return_value = pkg_request\r\n with self.flask_app.test_request_context('/'):\r\n # Resource that exists\r\n app = App(short_name='urbanpark', name='Urban Parks')\r\n user = User(fullname='Daniel Lombrana Gonzalez')\r\n self.ckan.package_create(app=app, user=user, url=\"http://something.com\")\r\n Mock.return_value = rsrc_request\r\n out = self.ckan.resource_create(name='task')\r\n err_msg = \"It should create the task resource\"\r\n assert out[\"id\"] == self.task_resource_id, err_msg\r\n Mock.return_value = self.server_error\r\n try:\r\n self.ckan.resource_create(name='something-goes-wrong')\r\n except Exception as out:\r\n type, msg, status_code = out.args\r\n assert \"Server Error\" in msg, msg\r\n assert 500 == status_code, status_code\r\n assert \"CKAN: the remote site failed! resource_create failed\" == type, type", "def test_get_file_content(self):\n pass", "def github_link(self):\n if self.test_type == TestType.commit:\n test_type = 'commit'\n test_id = self.commit\n else:\n test_type = 'pull'\n test_id = self.pr_nr\n\n return f\"{self.fork.github_url}/{test_type}/{test_id}\"", "def makeFile(self, relativePath, content):\n baseDirectory = FilePath(self.mktemp())\n directory, filename = os.path.split(relativePath)\n directory = baseDirectory.preauthChild(directory)\n directory.makedirs()\n file = directory.child(filename)\n directory.child(filename).setContent(content)\n return file", "def test_get_file_with_git_and_revision(self):\n self._test_get_file(\n tool_name='Git',\n revision='123',\n base_commit_id=None,\n expected_revision='123')", "def test_export(self):\n structure = {\n \"README.rst\": \"Hi this is 1.0.0.\",\n \"twisted\": {\n \"newsfragments\": {\"README\": \"Hi this is 1.0.0\"},\n \"_version.py\": genVersion(\"twisted\", 1, 0, 0),\n \"web\": {\n \"newsfragments\": {\"README\": \"Hi this is 1.0.0\"},\n \"_version.py\": genVersion(\"twisted.web\", 1, 0, 0),\n },\n },\n }\n reposDir = self.makeRepository(self.tmpDir)\n self.createStructure(reposDir, structure)\n self.commitRepository(reposDir)\n\n exportDir = FilePath(self.mktemp()).child(\"export\")\n self.createCommand.exportTo(reposDir, exportDir)\n self.assertStructure(exportDir, structure)" ]
[ "0.65603554", "0.62640995", "0.62635124", "0.62011635", "0.6008765", "0.5936762", "0.5884765", "0.5857535", "0.5827133", "0.5797445", "0.5793405", "0.5725234", "0.57189417", "0.571022", "0.5687655", "0.5664678", "0.5638524", "0.56348014", "0.5630147", "0.5594338", "0.55792326", "0.55502665", "0.55477107", "0.5520945", "0.54971486", "0.5490254", "0.54767", "0.5470865", "0.54565525", "0.54542285", "0.5419388", "0.5404234", "0.5404147", "0.5385603", "0.53795505", "0.53731513", "0.5365318", "0.53647196", "0.53573734", "0.53542304", "0.5349417", "0.534077", "0.53388625", "0.53376555", "0.5336613", "0.5333481", "0.53282905", "0.53270537", "0.5319865", "0.53143716", "0.5302715", "0.52986425", "0.5296514", "0.52949554", "0.5292361", "0.5289019", "0.52873135", "0.5286698", "0.5285636", "0.5282886", "0.5271637", "0.52686715", "0.5267339", "0.52656776", "0.526418", "0.52639323", "0.52638185", "0.52542615", "0.5248561", "0.52479005", "0.52474993", "0.5245418", "0.52425605", "0.52394664", "0.52380216", "0.5227116", "0.5226006", "0.52237695", "0.52193725", "0.5204505", "0.5201587", "0.5194134", "0.5192596", "0.5192344", "0.51911926", "0.5189624", "0.51828086", "0.51826113", "0.51818925", "0.51809406", "0.5180895", "0.5174117", "0.51719636", "0.51657134", "0.516454", "0.51643497", "0.5161195", "0.51605487", "0.51578337", "0.51555926" ]
0.70232326
0
Compare the response (which is a dictionary) to the reference First, the function retrieves the reference then filters both ref and resp Finally, it compares them
def compare_with_ref( self, response, response_checker=default_checker.default_journey_checker ): def ref_resp2files(output_file, output_json): """ Create a file for the filtered response and for the filtered reference """ with open(output_file, "w") as reference_text: reference_text.write(output_json) def print_diff(ref_file, resp_file): """ Print differences between reference and response in console """ # open reference with open(ref_file) as reference_text: reference = reference_text.readlines() # open response with open(resp_file) as response_text: response = response_text.readlines() # Print failed test name print_color("\n\n" + str(file_name) + " failed :" + "\n\n", Colors.PINK) symbol2color = {"+": Colors.GREEN, "-": Colors.RED} for line in difflib.unified_diff(reference, response): print_color(line, symbol2color.get(line[0], Colors.DEFAULT)) # Filtering the answer. (We compare to a reference also filtered with the same filter) filtered_response = response_checker.filter(response) # Get the reference # Create the file name filename = self.get_file_name() filepath = os.path.join(config["REFERENCE_FILE_PATH"], filename) assert os.path.isfile(filepath), "{} is not a file".format(filepath) with open(filepath, "r") as f: raw_reference = f.read() # Transform the string into a dictionary dict_ref = json.loads(raw_reference) # Get only the full_response part from the ref ref_full_response = dict_ref["full_response"] # Filtering the reference filtered_reference = response_checker.filter(ref_full_response) # Compare response and reference try: response_checker.compare(filtered_response, filtered_reference) except AssertionError as e: # print the assertion error message logging.error("Assertion Error: %s" % str(e)) # find name of test file_name = filename.split("/")[-1] file_name = file_name[:-5] # create a folder dir_path = config["RESPONSE_FILE_PATH"] if not os.path.exists(dir_path): os.makedirs(dir_path) # create path to ref and resp full_file_name_ref = dir_path + "/reference_" + file_name + ".txt" full_file_name_resp = dir_path + "/response_" + file_name + ".txt" json_filtered_reference = json.dumps(filtered_reference, indent=4) json_filtered_response = json.dumps(filtered_response, indent=4) # Save resp and ref as txt files in folder named outputs ref_resp2files(full_file_name_ref, json_filtered_reference) ref_resp2files(full_file_name_resp, json_filtered_response) # Print difference in console print_diff(full_file_name_ref, full_file_name_resp) raise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compare_original_response_with_copy(context):\n original = context.response.json()\n copy = context.response_copy\n\n def compare_top_level_values():\n # get the list of fields that are JSON values not arrays\n keys = [val for val in original.iterkeys() if not isinstance(original[val], (dict, list, set))]\n assert keys, ('Expected at least 1 field key to compare but got none!')\n logging.debug('List of top tier field keys to compare: %s', keys)\n for key in keys:\n assert original[key] == copy[key]\n logging.debug(\n 'All top level fields in the response copy have the same values as'\n ' in the original response. Here is a list of compared fields:\\n%s',\n ', '.join(keys))\n\n def compare_items():\n original_items = original['items']\n copy_items = copy['items']\n skip = ['title', 'last_activity_date']\n for original_item in original_items:\n # get all item field keys\n keys = [val for val in original_item.iterkeys()]\n # remove the keys that need to be skipped\n keys = [x for x in keys if x not in skip]\n for copy_item in copy_items:\n # find matching items\n if original_item['question_id'] == copy_item['question_id']:\n # compare original an copied items\n for key in keys:\n assert original_item[key] == copy_item[key]\n logging.debug(\n 'All fields in the copied item ID: %s'\n ' have the same values as in in the original items',\n copy_item['question_id'])\n\n compare_top_level_values()\n compare_items()", "def validate_get_response(response, status, count, job_templates, keys=None):\n assert (response[\"status\"]) == status\n json_response = json.loads(response[\"body\"])\n assert (json_response[\"count\"]) == count\n results = json_response[\"results\"]\n for item in results:\n matching_item = find_by_id(item[\"id\"], job_templates)\n if not keys:\n keys = list(matching_item.keys())\n assert sorted(keys) == sorted(list(item.keys()))\n compare(item, matching_item, keys)", "def compare_result_with_reference(parsed_result:dict, parsed_reference:dict, tolerance:dict, debug_mode=False):\n\n result = [{key: value} for key, value in get_hashable_entries(parsed_result)]\n reference = [{key: value} for key, value in get_hashable_entries(parsed_reference)]\n\n if debug_mode:\n check_key_consistency(reference, result)\n\n def key_from_single_entry(a:dict):\n return [x for x in a.keys()][0]\n\n def value_from_single_entry(a:dict):\n return [x for x in a.values()][0]\n\n for i in range(0, len(result)):\n\n result_key = key_from_single_entry(result[i])\n reference_key = key_from_single_entry(reference[i])\n assert result_key == reference_key\n\n result_value = value_from_single_entry(result[i])\n reference_value = value_from_single_entry(reference[i])\n\n assert_equal(result_value, reference_value, tolerance[reference_key], message=reference_key)", "def test_fetch_related_data_valid(self):\n resp = requests.post(\n _CONF[\"re_api_url\"] + \"/api/v1/query_results\",\n params={\"stored_query\": \"ws_fetch_related_data\", \"show_public\": True},\n data=json.dumps({\"obj_key\": \"1:1:1\"}),\n ).json()\n self.assertEqual(resp[\"count\"], 1)\n self.assertEqual(resp[\"has_more\"], False)\n res = resp[\"results\"][0]\n # Check the root object results\n self.assertEqual(res[\"obj\"][\"_key\"], \"1:1:1\")\n self.assertEqual(res[\"obj_type\"][\"_key\"], \"Module.Type1-1.0\")\n # Check the copy results\n self.assertEqual(res[\"copies\"][\"count\"], 1)\n self.assertEqual(len(res[\"copies\"][\"data\"]), 1)\n self.assertEqual(\n res[\"copies\"][\"data\"][0][\"data\"][\"_id\"], \"ws_object_version/1:2:1\"\n )\n self.assertEqual(res[\"copies\"][\"data\"][0][\"hops\"], 1)\n self.assertEqual(\n res[\"copies\"][\"data\"][0][\"type\"][\"_id\"], \"ws_type_version/Module.Type1-1.0\"\n )\n # Check the provenance results\n self.assertEqual(res[\"prov\"][\"count\"], 1)\n self.assertEqual(len(res[\"prov\"][\"data\"]), 1)\n self.assertEqual(\n res[\"prov\"][\"data\"][0][\"data\"][\"_id\"], \"ws_object_version/1:3:1\"\n )\n self.assertEqual(res[\"prov\"][\"data\"][0][\"hops\"], 1)\n self.assertEqual(\n res[\"prov\"][\"data\"][0][\"type\"][\"_id\"], \"ws_type_version/Module.Type1-1.0\"\n )\n # Check the ref results\n self.assertEqual(res[\"refs\"][\"count\"], 1)\n self.assertEqual(len(res[\"refs\"][\"data\"]), 1)\n self.assertEqual(\n res[\"refs\"][\"data\"][0][\"data\"][\"_id\"], \"ws_object_version/1:4:1\"\n )\n self.assertEqual(res[\"refs\"][\"data\"][0][\"hops\"], 1)\n self.assertEqual(\n res[\"refs\"][\"data\"][0][\"type\"][\"_id\"], \"ws_type_version/Module.Type1-1.0\"\n )", "def _slack_get_value(slack_response, search_value, search_field, return_field, classifier):\n if not slack_response['ok']:\n return False\n for item in slack_response[classifier]:\n if search_field in item and search_value == item[search_field] and return_field in item:\n return item[return_field]", "def http_get_and_compare_resp(url, expected_get_json_resp, check_util=default_full_compare):\n get_resp_obj = RestClientApis.http_get_and_check_success(url)\n if get_resp_obj.success:\n get_resp_json = get_resp_obj.json_body\n success = check_util(json.loads(expected_get_json_resp), get_resp_json)\n message, return_code = assign_message_code(success)\n else:\n return get_resp_obj\n\n rest_return_obj = RestReturn(success=success, message=message, http_status=return_code,\n json_body=get_resp_json,\n response_object=get_resp_obj.response_object)\n return rest_return_obj", "def filter_pro_matches(resp):\n\n return [x for x in resp if x[\"dire_name\"] and x[\"radiant_name\"]]", "def fusion_api_validate_response(self, respDict, valDict):\n success = True\n returnDict = {}\n keys = []\n for key in valDict:\n if not valDict[key]:\n continue\n # logger._log_to_console_and_log_file('key: %s' % (key))\n keyDict = {'key': key, 'expected': valDict[\n key], 'actual': respDict[key], 'success': True}\n if key in respDict:\n pattern = re.compile(str(valDict[key]))\n # if not re.search(str(valDict[key]), str(respDict[key])):\n # t = re.compile('(?i)Warning|Unknown|Terminated|Killed|Error|Completed')\n\n if not re.search(pattern, str(respDict[key])):\n\n success = False\n keyDict['success'] = False\n else:\n success = False\n keyDict['success'] = False\n keys.append(keyDict)\n\n returnDict['success'] = success\n returnDict['keys'] = keys\n return returnDict", "def compare():\n body: t.Any = request.json\n check_error({'input': {'old': {}, 'new': {}}}, body)\n response_new = rpc_search({'input': body['input']['new']})\n response_old = rpc_search({'input': body['input']['old']})\n\n modules_new = response_new['yang-catalog:modules']['module']\n modules_old = response_old['yang-catalog:modules']['module']\n\n if len(modules_new) == 0 or len(modules_old) == 0:\n abort(404, description='No hits found either in old or new input')\n\n new_mods = []\n for mod_new in modules_new:\n new_rev = mod_new['revision']\n new_name = mod_new['name']\n found = False\n new_rev_found = False\n for mod_old in modules_old:\n old_rev = mod_old['revision']\n old_name = mod_old['name']\n if new_name == old_name and new_rev == old_rev:\n found = True\n break\n if new_name == old_name and new_rev != old_rev:\n new_rev_found = True\n if not found:\n mod_new['reason-to-show'] = 'New module'\n new_mods.append(mod_new)\n if new_rev_found:\n mod_new['reason-to-show'] = 'Different revision'\n new_mods.append(mod_new)\n if len(new_mods) == 0:\n abort(404, description='No new modules or modules with different revisions found')\n output = {'output': new_mods}\n return output", "def compare(self, base, head):\r\n url = '{0}/compare/{1}...{2}'.format(self.parent.get_url(), base, head)\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def _referencedChecker(self, entity, params):\n\n if 'ref_logic' not in params:\n return False\n\n logic = self.helper.getLogicForItem(params, 'ref_logic')\n filter = {\n params['ref_field']: entity.key()\n }\n ref_entity = logic.getForFields(filter=filter, unique=True)\n\n result = ref_entity is not None\n\n no_ref = params.get('no_ref')\n if no_ref:\n result = not result\n\n return result", "def _matcher(r1: vcr.request.Request, r2: vcr.request.Request) -> None:\n assert r1.uri == r2.uri and r1.body == r2.body and r1.headers == r2.headers", "def _check_response(self, response_contents, correct_jsons):\r\n for username, content in response_contents.items():\r\n\r\n # Used in debugger for comparing objects.\r\n # self.maxDiff = None\r\n\r\n # We should compare top_words for manually,\r\n # because they are unsorted.\r\n keys_to_compare = set(content.keys()).difference(set(['top_words']))\r\n self.assertDictEqual(\r\n {k: content[k] for k in keys_to_compare},\r\n {k: correct_jsons[username][k] for k in keys_to_compare})\r\n\r\n # comparing top_words:\r\n top_words_content = sorted(\r\n content['top_words'],\r\n key=itemgetter('text')\r\n )\r\n top_words_correct = sorted(\r\n correct_jsons[username]['top_words'],\r\n key=itemgetter('text')\r\n )\r\n self.assertListEqual(top_words_content, top_words_correct)", "def _test_single_prerecorded_api_call(app, path, prerecorded, contexts={}):\n rv = app.get(path)\n assert rv.status_code == 200\n response = json.loads(rv.get_data().decode('utf8'))\n if type(prerecorded) is list:\n response = response['items']\n compare_objects(contexts, '', prerecorded, response)\n return False", "def _filter_entries_by_response(self, urls, har=None):\r\n if not har:\r\n har = self.har\r\n \r\n matches = []\r\n if len(har[\"log\"][\"entries\"]) > 1:\r\n for entry in har[\"log\"][\"entries\"]:\r\n for url in urls:\r\n if url in entry[\"request\"][\"url\"]:\r\n tempObject = {}\r\n if entry[\"response\"][\"status\"] == 200 and entry[\"response\"][\"content\"].get(\"text\") and entry[\"response\"][\"content\"][\"text\"] != \"\":\r\n tempObject['url'] = entry[\"request\"][\"url\"]\r\n tempObject['response'] = entry[\"response\"][\"content\"][\"text\"].encode('ascii', 'ignore')\r\n matches.append(tempObject)\r\n return matches", "def check_recommendation_in_result(context):\n json_data = context.response.json()\n result = json_data[\"recommendation\"]\n assert result == {}", "def verify(self, response):", "def references(md5):\n u = Upload.objects.filter(md5=md5).first()\n if not u:\n abort(404)\n # first, is this searchable?\n is_searchable = False\n count = elastic.count('page', filter={'md5': md5})\n if count > 0:\n is_searchable = True\n #annotations = Reference.objects.filter(upload=u, ref_url__exists=True)\n annotations = Reference.objects.filter(upload=u).order_by('ref_pos')\n # create a list of referenced things\n references = {'references':[], 'searchable': is_searchable}\n for a in annotations:\n try:\n references['references'].append({\n 'pos_x': a.pos_x, \n 'pos': a.pos, \n 'ref': a.ref_upload.md5, \n 'ref_pos': a.ref_pos\n })\n except:\n pass\n return jsonify(references)", "def get_matching_citizens():\n try:\n volunteer = request.headers.get('X-volunteer')\n except:\n return jsonify(\"X-volunteer header is missing\")\n logger.info(\"X-volunteer header is missing\")\n \n vaibhav_interests = ['sleeping','home building','garden walks']\n arsalan_interests = ['music','politics','science','reading']\n senior_list = table.scan()[\"Items\"]\n if request.headers['X-volunteer'] == \"Vaibhav\":\n dummy_volunteer_interest_list = vaibhav_interests\n matching_list = []\n for senior in senior_list:\n match = len(set(dummy_volunteer_interest_list) & set(senior['interests'])) / float(len(set(dummy_volunteer_interest_list) | set(senior['interests']))) * 100\n if match >= 20:\n matching_list.append(senior)\n if len(matching_list) == 0:\n return(jsonify(\"No matches found!\"))\n logger.info(\"Vaibhav Matching citizens returned\")\n elif request.headers['X-volunteer'] == \"Arsalan\":\n dummy_volunteer_interest_list = arsalan_interests\n matching_list = []\n # senior_list = [post for post in posts.find()]\n for senior in senior_list:\n match = len(set(dummy_volunteer_interest_list) & set(senior['interests'])) / float(len(set(dummy_volunteer_interest_list) | set(senior['interests']))) * 100\n if match >= 20:\n matching_list.append(senior)\n if len(matching_list) == 0:\n return jsonify(\"No matches found!\")\n logger.info(\"Arsalan Matching citizens returned\")\n else:\n return jsonify(\"Send a valid user header!\")\n return jsonify(matching_list)", "def test_get_data_success(monkeypatch):\n\n class MockResponse(object):\n def __init__(self):\n self.status_code = 200\n\n def json(self):\n return {\n \"continue\": {\"excontinue\": 1, \"continue\": \"||info\"},\n \"query\": {\n \"pages\": {\n \"151688\": {\n \"pageid\": 151688,\n \"ns\": 0,\n \"title\": \"Naantali\",\n \"index\": -1,\n \"extract\": \"Naantali (en suédois Nådendal, en latin Vallis Gratiae - la vallée de grâce) est une ville du sud-ouest de la Finlande. Cette petite ville, qui compte une population de 19 000 habitants, se situe dans la province de Finlande occidentale et la région de Finlande du Sud-Ouest, à 15 km à l'ouest de Turku, la capitale provinciale.\", # noqa: E501\n \"contentmodel\": \"wikitext\",\n \"pagelanguage\": \"fr\",\n \"pagelanguagehtmlcode\": \"fr\",\n \"pagelanguagedir\": \"ltr\",\n \"touched\": \"2020-10-08T00:35:54Z\",\n \"lastrevid\": 169716755,\n \"length\": 14393,\n \"fullurl\": \"https://fr.wikipedia.org/wiki/Naantali\", # noqa: E501\n \"editurl\": \"https://fr.wikipedia.org/w/index.php?title=Naantali&action=edit\", # noqa: E501\n \"canonicalurl\": \"https://fr.wikipedia.org/wiki/Naantali\", # noqa: E501\n },\n \"2709037\": {\n \"pageid\": 2709037,\n \"ns\": 0,\n \"title\": \"Muumimaailma\",\n \"index\": 0,\n \"contentmodel\": \"wikitext\",\n \"pagelanguage\": \"fr\",\n \"pagelanguagehtmlcode\": \"fr\",\n \"pagelanguagedir\": \"ltr\",\n \"touched\": \"2020-10-04T16:19:49Z\",\n \"lastrevid\": 168229306,\n \"length\": 1447,\n \"fullurl\": \"https://fr.wikipedia.org/wiki/Muumimaailma\", # noqa: E501\n \"editurl\": \"https://fr.wikipedia.org/w/index.php?title=Muumimaailma&action=edit\", # noqa: E501\n \"canonicalurl\": \"https://fr.wikipedia.org/wiki/Muumimaailma\", # noqa: E501\n },\n \"5751499\": {\n \"pageid\": 5751499,\n \"ns\": 0,\n \"title\": \"Kultaranta\",\n \"index\": 1,\n \"contentmodel\": \"wikitext\",\n \"pagelanguage\": \"fr\",\n \"pagelanguagehtmlcode\": \"fr\",\n \"pagelanguagedir\": \"ltr\",\n \"touched\": \"2020-10-08T00:43:32Z\",\n \"lastrevid\": 164009230,\n \"length\": 11889,\n \"fullurl\": \"https://fr.wikipedia.org/wiki/Kultaranta\", # noqa: E501\n \"editurl\": \"https://fr.wikipedia.org/w/index.php?title=Kultaranta&action=edit\", # noqa: E501\n \"canonicalurl\": \"https://fr.wikipedia.org/wiki/Kultaranta\", # noqa: E501\n },\n \"7700543\": {\n \"pageid\": 7700543,\n \"ns\": 0,\n \"title\": \"Port de Naantali\",\n \"index\": 2,\n \"contentmodel\": \"wikitext\",\n \"pagelanguage\": \"fr\",\n \"pagelanguagehtmlcode\": \"fr\",\n \"pagelanguagedir\": \"ltr\",\n \"touched\": \"2020-10-04T16:25:50Z\",\n \"lastrevid\": 162923416,\n \"length\": 2675,\n \"fullurl\": \"https://fr.wikipedia.org/wiki/Port_de_Naantali\", # noqa: E501\n \"editurl\": \"https://fr.wikipedia.org/w/index.php?title=Port_de_Naantali&action=edit\", # noqa: E501\n \"canonicalurl\": \"https://fr.wikipedia.org/wiki/Port_de_Naantali\", # noqa: E501\n },\n }\n },\n }\n\n parameters = {\n \"action\": \"query\",\n \"prop\": \"extracts|info\",\n \"inprop\": \"url\",\n \"explaintext\": True,\n \"exsentences\": 2,\n \"exlimit\": 1,\n \"generator\": \"geosearch\",\n \"ggsradius\": 10000,\n \"ggscoord\": f\"{00000}|{00000}\",\n \"format\": \"json\",\n }\n\n headers = {\n \"date\": \"10/10/2020\",\n \"user-agent\": '\"MoominPappaBot/fake_version',\n }\n\n def mock_get(url, params=parameters, headers=headers, timeout=10):\n return MockResponse()\n\n monkeypatch.setattr(requests, \"get\", mock_get)\n\n request = MediawikiApi(\"user_input1\", \"user_input2\")\n result = request.get_data()\n expected_result = {\n \"title\": \"Naantali\",\n \"extract\": \"Naantali (en suédois Nådendal, en latin Vallis Gratiae - la vallée de grâce) est une ville du sud-ouest de la Finlande. Cette petite ville, qui compte une population de 19 000 habitants, se situe dans la province de Finlande occidentale et la région de Finlande du Sud-Ouest, à 15 km à l'ouest de Turku, la capitale provinciale.\", # noqa: E501\n \"fullurl\": \"https://fr.wikipedia.org/wiki/Naantali\",\n }\n assert result == expected_result", "def test_list_referrals_by_desc_object(self):\n user = factories.UserFactory()\n referrals = [\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n object=\"First by alphabetical order\",\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n object=\"Second by alphabetical order\",\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n ]\n\n self.setup_elasticsearch()\n response = self.client.get(\n f\"/api/referrallites/?user={user.id}&sort=object.keyword&sort_dir=desc\",\n HTTP_AUTHORIZATION=f\"Token {Token.objects.get_or_create(user=user)[0]}\",\n )\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json()[\"count\"], 2)\n self.assertEqual(response.json()[\"results\"][0][\"id\"], referrals[1].id)\n self.assertEqual(response.json()[\"results\"][1][\"id\"], referrals[0].id)", "def rpc_match():", "def pare_dict(d, ref, strict_b=False, **kw):\n strict_b = kw.get(\"strict\", strict_b)\n if strict_b:\n return {k: v for k, v in d.items() if k in ref and v != ref.get(k)}\n return {k: v for k, v in d.items() if k not in ref or v != ref.get(k)}", "def compare_response_to_model_instance(self, response, model_instance):\n parsed_response = json_decode(response)\n headers = parsed_response['headers']\n data = parsed_response['data']\n self.assertEquals(len(data), len(model_instance))\n for i in range(len(data)):\n datum = self.deserialize(headers, data[i])\n self.compare_model_instance(datum, model_instance[i])", "def compare_promises(fi, se, en, ref) -> bool:\n same = False\n if (fi == ref):\n return True\n if (fi != None):\n if (fi == ref) or (format_promise(fi) == format_promise(ref)):\n return True\n if (se == ref):\n return True\n if (en == ref):\n return True\n if (se != None):\n if (se == ref) or (format_promise(se) == format_promise(ref)):\n return True\n return same", "def compareTwoReco(reference, new, histos, debug=1):\n\n # Tracks with index False are the ones that have been matched to the reference track collection\n new_valid = [True for i in new]\n\n # Tracks with index False are the ones that have been matched to the comparison track collection\n original_valid = [True for i in reference]\n print \" \".join(\"%10s\" % k for k in variables)\n debug_verbose = checkDebug(debug, 'Verbose')\n debug_ordinary = checkDebug(debug, 'Ordinary')\n debug_recovery = checkDebug(debug, 'Recovery')\n debug_lost = checkDebug(debug, 'Lost')\n debug_fake = checkDebug(debug, 'Fake')\n\n for original_index, original in enumerate(reference):\n # Fill in cumulative plots for the reference sample first\n histos['reference_hits_vs_algo'].Fill(original.algo, original.hits)\n histos['reference_hits_vs_orialgo'].Fill(original.orialgo, original.hits)\n histos['reference_hits_vs_pt'].Fill(original.pt, original.hits)\n histos['den'].Fill(original.pt)\n histos['den_eta'].Fill(original.eta)\n histos['den_phi'].Fill(original.phi)\n histos['den_hits'].Fill(original.hits)\n histos['den_algo'].Fill(original.algo)\n histos['den_orialgo'].Fill(original.orialgo)\n\n # Now start to look for a matching track in the comparison track collection\n window_depth = 400 # elements to span to look for best candidate\n iBest, bestDeltaRMatch, bestDeltaPt_over_PtMatch = -1, 100, 100\n if debug_verbose:\n print original\n for i,j in enumerate(new):\n if new_valid[i] == True:\n if debug_verbose:\n print \" \", i, j\n if window_depth == 0:\n break\n dr_squared, dPt_over_pt = match(original, j)\n if dr_squared < bestDeltaRMatch*bestDeltaRMatch and dPt_over_pt < DELTA_PT_OVER_PT_CUT:\n iBest, bestDeltaRMatch, bestDeltaPt_over_PtMatch = i, dr_squared, dPt_over_pt\n if debug_verbose:\n print \" \", window_depth, iBest, bestDeltaRMatch, dr_squared, bestDeltaPt_over_PtMatch, dPt_over_pt\n if bestDeltaRMatch <= 0.0001 or bestDeltaPt_over_PtMatch == 0.0001:\n break\n window_depth -= 1\n if iBest != -1 and bestDeltaRMatch < DELTA_R_CUT:\n # These are the tracks in the reference track collection\n # that have been matched to a track in the comparison\n # track collection\n new_valid[iBest] = False\n original_valid[original_index] = False\n assert original.run == new[iBest].run, \"run mismatch\"\n assert original.ls == new[iBest].ls, \"ls mismatch\"\n assert original.event == new[iBest].event, \"event mismatch\"\n if debug_ordinary:\n print original\n print new[iBest]\n print iBest, bestDeltaRMatch, bestDeltaPt_over_PtMatch, '\\n'\n histos['num'].Fill(original.pt)\n histos['num_eta'].Fill(original.eta)\n histos['num_phi'].Fill(original.phi)\n histos['num_hits'].Fill(original.hits)\n histos['num_algo'].Fill(original.algo)\n histos['num_orialgo'].Fill(original.orialgo)\n histos['fake_num'].Fill(new[iBest].pt)\n histos['fake_num_eta'].Fill(new[iBest].eta)\n histos['fake_num_phi'].Fill(new[iBest].phi)\n histos['fake_num_hits'].Fill(new[iBest].hits)\n histos['fake_num_algo'].Fill(new[iBest].algo)\n histos['fake_num_orialgo'].Fill(new[iBest].orialgo)\n histos['comparison_algo_vs_reference_algo'].Fill(original.algo, new[iBest].algo)\n histos['comparison_orialgo_vs_reference_orialgo'].Fill(original.orialgo, new[iBest].orialgo)\n histos['comparison_hits_vs_reference_hits'].Fill(original.hits, new[iBest].hits)\n\n # Let's try a recovery loop with somewhat lesser stringent cuts\n for original_index, original in enumerate(reference):\n if original_valid[original_index]:\n # Now start to look for a matching track in the comparison track collection\n window_depth = 300 # elements to span to look for best candidate\n iBest, bestDeltaRMatch, bestDeltaPt_over_PtMatch = -1, 100, 100\n if debug_verbose:\n print \"Recovery \", original\n for i,j in enumerate(new):\n if new_valid[i] == True:\n if debug_verbose:\n print \"Recovery \", i, j\n if window_depth == 0:\n break\n dr_squared, dPt_over_pt = match(original, j)\n if dr_squared < bestDeltaRMatch*bestDeltaRMatch and dPt_over_pt < DELTA_PT_OVER_PT_CUT*6:\n iBest, bestDeltaRMatch, bestDeltaPt_over_PtMatch = i, dr_squared, dPt_over_pt\n if debug_verbose:\n print \"Recovery \", window_depth, iBest, bestDeltaRMatch, dr_squared, bestDeltaPt_over_PtMatch, dPt_over_pt\n if bestDeltaRMatch <= 0.0001 or bestDeltaPt_over_PtMatch == 0.0001:\n break\n window_depth -= 1\n if iBest != -1 and bestDeltaRMatch < DELTA_R_CUT*10: # inflate cut on DeltaR to recover some good-medium matching\n # These are the tracks in the reference track collection\n # that have been matched to a track in the comparison\n # track collection\n new_valid[iBest] = False\n original_valid[original_index] = False\n if debug_recovery:\n print \"Recovery \", original\n print \"Recovery \", new[iBest]\n print \"Recovery \", iBest, bestDeltaRMatch, bestDeltaPt_over_PtMatch\n histos['num'].Fill(original.pt)\n histos['num_eta'].Fill(original.eta)\n histos['num_phi'].Fill(original.phi)\n histos['num_hits'].Fill(original.hits)\n histos['num_algo'].Fill(original.algo)\n histos['num_orialgo'].Fill(original.orialgo)\n histos['fake_num'].Fill(new[iBest].pt)\n histos['fake_num_eta'].Fill(new[iBest].eta)\n histos['fake_num_hits'].Fill(new[iBest].hits)\n histos['fake_num_algo'].Fill(new[iBest].algo)\n histos['fake_num_orialgo'].Fill(new[iBest].orialgo)\n histos['comparison_algo_vs_reference_algo'].Fill(original.algo, new[iBest].algo)\n histos['comparison_orialgo_vs_reference_orialgo'].Fill(original.orialgo, new[iBest].orialgo)\n histos['comparison_hits_vs_reference_hits'].Fill(original.hits, new[iBest].hits)\n\n\n # These are the tracks in the reference track collection\n # that have *not* been associated to any track in the\n # comparison collection == > LOST TRACKS\n reference_not_assigned = [j for i,j in enumerate(reference) if original_valid[i]]\n reference_not_assigned.sort(key=lambda tr: tr.algo)\n if debug_lost:\n print \"**** Lost tracks **** %d\" % len(reference_not_assigned)\n for j in reference_not_assigned:\n histos['lost_hits_vs_algo'].Fill(j.algo, j.hits)\n histos['lost_hits_vs_orialgo'].Fill(j.orialgo, j.hits)\n histos['lost_hits_vs_pt'].Fill(j.pt, j.hits)\n histos['lost_eta'].Fill(j.eta)\n if debug:\n print j\n if debug_lost:\n print \"**** End of Lost tracks ****\"\n\n # Fake Tracks\n for i, j in enumerate(new):\n # Fill in the cumulative plots related to tracks in the comparison track collection\n histos['comparison_hits_vs_algo'].Fill(j.algo, j.hits)\n histos['comparison_hits_vs_orialgo'].Fill(j.orialgo, j.hits)\n histos['comparison_hits_vs_pt'].Fill(j.pt, j.hits)\n histos['fake_den'].Fill(j.pt)\n histos['fake_den_eta'].Fill(j.eta)\n histos['fake_den_phi'].Fill(j.phi)\n histos['fake_den_hits'].Fill(j.hits)\n histos['fake_den_algo'].Fill(j.algo)\n histos['fake_den_orialgo'].Fill(j.orialgo)\n\n # These are the tracks in the comparison track collection\n # that have *not* been associated to any track in the\n # reference collection ==> FAKE TRACKS\n new_not_assigned = [j for i,j in enumerate(new) if new_valid[i]]\n new_not_assigned.sort(key=lambda tr: tr.algo)\n if debug_fake:\n print \"**** Fake tracks **** %d\" % len(new_not_assigned)\n for j in new_not_assigned:\n histos['fake_hits_vs_algo'].Fill(j.algo, j.hits)\n histos['fake_hits_vs_orialgo'].Fill(j.orialgo, j.hits)\n histos['fake_hits_vs_pt'].Fill(j.pt, j.hits)\n if debug:\n print j\n if debug_fake:\n print \"**** End of Fake tracks ****\"", "def test_get_with_filter_person_factoid(mockclient_cl1):\n r = mockclient_cl1.get(TEST_URL + \"?size=100&f=F00062&p=P00063\")\n assert r.status_code == 200\n assert r.json[\"statements\"][0][\"@id\"] == \"Stmt00184\"\n r = mockclient_cl1.get(TEST_URL + \"?size=100&f=F00062&p=P00064\")\n assert r.status_code == 404", "def compare(self, **kwargs):\n\n source_params = {'sid': kwargs.get('source_sid'),\n 'did': kwargs.get('source_did'),\n 'scid': kwargs.get('source_scid')\n }\n\n target_params = {'sid': kwargs.get('target_sid'),\n 'did': kwargs.get('target_did'),\n 'scid': kwargs.get('target_scid')\n }\n\n if 'source_tid' in kwargs:\n source_params['tid'] = kwargs['source_tid']\n if 'target_tid' in kwargs:\n target_params['tid'] = kwargs['target_tid']\n\n source = self.fetch_objects_to_compare(**source_params)\n\n target = self.fetch_objects_to_compare(**target_params)\n\n # If both the dict have no items then return None.\n if not (source or target) or (\n len(source) <= 0 and len(target) <= 0):\n return None\n\n return compare_dictionaries(source, target,\n self.node_type,\n self.blueprint.COLLECTION_LABEL,\n self.keys_to_ignore)", "def isResp(obxDict):\n readingCode = getReadingCode(obxDict)\n return readingCode == '76270-8'", "def __eq__(self, other):\n if not isinstance(other, ClientDetailResponseResponse):\n return False\n\n return self.__dict__ == other.__dict__", "def compare_json(json1, json2):\r\n return JsonType.eq(json1, json2)", "def compare(isamAppliance1, isamAppliance2):\n ret_obj1 = get_all(isamAppliance1)\n ret_obj2 = get_all(isamAppliance2)\n\n for obj in ret_obj1['data']:\n del obj['id']\n for obj in ret_obj2['data']:\n del obj['id']\n\n return tools.json_compare(ret_obj1, ret_obj2, deleted_keys=['id'])", "def verify_object(self, data):\n rv = self.get(data[self.id_field])\n result = not is_404(rv)\n if result:\n for key, value in data:\n if not in_response(rv, value):\n return False\n return result", "def verify_response_dict(api_key, response):\n LOGGER.debug('Verifying WSAPI response signature')\n\n # Remove signature from the response\n r = dict(response)\n del r['h']\n\n # Convert to HTML query as that is used by Yubico to sign the response\n query = sorted_urlencode(list(r.iteritems()))\n\n # We unquote it because it's not the HTTP quoted version\n query = urllib.unquote_plus(query)\n\n status = sign(api_key, query) == response['h']\n LOGGER.debug('Signature result ' + str(status))\n return status", "def dict_match(left, right, res=None):\n if res is None:\n res = [True, ]\n if res[0] == False:\n return False\n for k in right.keys():\n if (k in left):\n if (isinstance(left[k], dict) and isinstance(right[k], dict)):\n dict_match(left[k], right[k], res=res)\n else:\n res[0] = res[0] and left[k] == right[k]\n if res[0] == False:\n break\n return res[0]", "def _processGETResp(self, output, request):\r\n msg = {'key' : output}\r\n\r\n self._render_GET(request, httplib.OK,\r\n 'application/json; charset=utf-8', json.dumps(msg))", "def print_diff(ref_file, resp_file):\n # open reference\n with open(ref_file) as reference_text:\n reference = reference_text.readlines()\n # open response\n with open(resp_file) as response_text:\n response = response_text.readlines()\n\n # Print failed test name\n print_color(\"\\n\\n\" + str(file_name) + \" failed :\" + \"\\n\\n\", Colors.PINK)\n\n symbol2color = {\"+\": Colors.GREEN, \"-\": Colors.RED}\n for line in difflib.unified_diff(reference, response):\n print_color(line, symbol2color.get(line[0], Colors.DEFAULT))", "def test_list_referrals_by_desc_units_requesters(self):\n user = factories.UserFactory(unit_name=\"a_unite\")\n referrals = [\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n post__users=[factories.UserFactory(unit_name=\"c_unite\"), user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n post__users=[factories.UserFactory(unit_name=\"b_unite\"), user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n ]\n\n self.setup_elasticsearch()\n response = self.client.get(\n f\"/api/referrallites/?user={user.id}&sort=users_unit_name_sorting&sort_dir=asc\",\n HTTP_AUTHORIZATION=f\"Token {Token.objects.get_or_create(user=user)[0]}\",\n )\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json()[\"count\"], 2)\n self.assertEqual(response.json()[\"results\"][0][\"id\"], referrals[0].id)\n self.assertEqual(response.json()[\"results\"][1][\"id\"], referrals[1].id)", "def __eq__(self, other):\n if not isinstance(other, InlineResponse200):\n return False\n\n return self.to_dict() == other.to_dict()", "def _api_call(self, url, response_checker):\n self.request_compare(url)", "def compare_json(js1, js2):\n return js1.items() <= js2.items()", "def _get_res(cls, res_df, output_request):\n\n out_req_cleaned = copy.deepcopy(output_request)\n res_out = None\n\n res_reqs = []\n ti = res_df.index\n for req in out_req_cleaned:\n if req in res_df:\n res_reqs.append(req)\n if res_out is None:\n res_out = {}\n res_out[req] = cls.ensure_res_len(res_df[req].values, ti)\n\n for req in res_reqs:\n out_req_cleaned.remove(req)\n\n return res_out, out_req_cleaned", "def find_book_dois_in_crossref(isbn_list):\n ret_value = {\n \"success\": False,\n \"dois\": []\n }\n if type(isbn_list) != type([]) or len(isbn_list) == 0:\n ret_value['error_msg'] = \"Parameter must be a non-empty list!\"\n return ret_value\n filter_list = [\"isbn:\" + isbn.strip() for isbn in isbn_list]\n filters = \",\".join(filter_list)\n api_url = \"https://api.crossref.org/works?filter=\"\n url = api_url + filters + \"&rows=500\"\n request = Request(url)\n request.add_header(\"User-Agent\", USER_AGENT)\n try:\n ret = urlopen(request)\n content = ret.read()\n data = json.loads(content)\n if data[\"message\"][\"total-results\"] == 0:\n ret_value[\"success\"] = True\n else:\n for item in data[\"message\"][\"items\"]:\n if item[\"type\"] in [\"monograph\", \"book\"] and item[\"DOI\"] not in ret_value[\"dois\"]:\n ret_value[\"dois\"].append(item[\"DOI\"])\n if len(ret_value[\"dois\"]) == 0:\n msg = \"No monograph/book DOI type found in Crossref ISBN search result ({})!\"\n raise ValueError(msg.format(url))\n else:\n ret_value[\"success\"] = True\n except HTTPError as httpe:\n ret_value['error_msg'] = \"HTTPError: {} - {}\".format(httpe.code, httpe.reason)\n except URLError as urle:\n ret_value['error_msg'] = \"URLError: {}\".format(urle.reason)\n except ValueError as ve:\n ret_value['error_msg'] = str(ve)\n return ret_value", "def test_list_referrals_no_sorting_specified(self):\n user = factories.UserFactory()\n referrals = [\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n ]\n\n self.setup_elasticsearch()\n response = self.client.get(\n f\"/api/referrallites/?user={user.id}\",\n HTTP_AUTHORIZATION=f\"Token {Token.objects.get_or_create(user=user)[0]}\",\n )\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json()[\"count\"], 2)\n self.assertEqual(response.json()[\"results\"][0][\"id\"], referrals[1].id)\n self.assertEqual(response.json()[\"results\"][1][\"id\"], referrals[0].id)", "def compare_ao3_rec(db_rec, current_rec, report_gen):\r\n tests = [\r\n 'hits', 'kudos',\r\n 'comments', 'bookmarks', 'ref']\r\n current_dict = current_rec\r\n db_dict = db_rec.__dict__\r\n changed = []\r\n for value_key in tests:\r\n if report_gen.compare_and_print(\r\n current_rec[\"title\"],\r\n value_key,\r\n current_dict,\r\n db_dict) > 0:\r\n setattr(db_rec, value_key, current_dict[value_key])\r\n changed.append(value_key)\r\n return changed", "def test_handle_response_value_results_in_node_lookup_callback(self):\n lookup = Lookup(FindValue, self.target, self.node, self.event_loop)\n uuids = [uuid for uuid in lookup.pending_requests.keys()]\n uuid = uuids[0]\n contact = lookup.shortlist[0]\n other_request1 = lookup.pending_requests[uuids[1]]\n other_request2 = lookup.pending_requests[uuids[2]]\n msg = Value(uuid, self.node.network_id, self.node.network_id,\n self.reply_port, self.version, self.seal, self.target,\n 'value', time.time(), time.time() + 99999, self.version,\n PUBLIC_KEY, 'name', 'signature')\n response = asyncio.Future()\n response.set_result(msg)\n lookup._handle_response(uuid, contact, response)\n self.event_loop.run_until_complete(blip())\n # Check the lookup has fired correctly.\n self.assertTrue(lookup.done())\n self.assertEqual(lookup.result(), msg)\n # Check the other requests are cancelled.\n self.assertTrue(other_request1.cancelled())\n self.assertTrue(other_request2.cancelled())\n # Make sure the pending_requests dict is empty.\n self.assertEqual(0, len(lookup.pending_requests))\n # Ensure the contact that provided the result is NOT in the shortlist.\n self.assertNotIn(contact, lookup.shortlist)", "def test_book_related(self):\n client = APIClient()\n client.login(username=self.students[0].username, password=\"salam*123\")\n response = client.get(\"/books/4/related/\")\n json = response.json()\n self.assertEqual(json[\"count\"], 2)\n self.assertEqual(json[\"results\"][0][\"id\"], 5)\n self.assertEqual(json[\"results\"][1][\"id\"], 2)", "def test_get_with_filter_person(mockclient_cl1):\n r = mockclient_cl1.get(TEST_URL + \"?size=100&p=P00022\")\n assert r.status_code == 200\n assert len(r.json[\"statements\"]) == 6", "def validate_post_response(response, status, job, keys=None):\n assert (response[\"status\"]) == status\n json_response = json.loads(response[\"body\"])\n if not keys:\n keys = list(job.keys())\n assert sorted(keys) == sorted(list(json_response.keys()))\n compare(json_response, job, keys)", "def test_build_reference_dupes(self):\n items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]\n obs1, obs2 = build_reference(items, 3)\n self.assertEqual(len(obs1), 3)\n self.assertEqual(len(obs2), 7)\n #check that the ref and nonref are same\n finals = set([])\n for item in obs1:\n if item in finals:\n raise AssertionError(\"Duplicate in reference!\")\n finals.add(item)\n for item in obs2:\n if item in finals:\n raise AssertionError(\"Duplicate in nonreference!\")\n finals.add(item)", "def diff_json(response_data, assert_data):\n if isinstance(response_data, dict):\n \"\"\" dict format \"\"\"\n for key in assert_data:\n if key not in response_data:\n info = \"❌ Response data has no key: {}\".format(key)\n print(info)\n AssertInfo.data.append(info)\n for key in response_data:\n if key in assert_data:\n \"\"\" recursion \"\"\"\n diff_json(response_data[key], assert_data[key])\n else:\n info = \"💡 Assert data has not key: {}\".format(key)\n print(info)\n elif isinstance(response_data, list):\n \"\"\" list format \"\"\"\n if len(response_data) == 0:\n print(\"response is []\")\n if len(response_data) != len(assert_data):\n print(\"list len: '{}' != '{}'\".format(len(response_data), len(assert_data)))\n\n if isinstance(response_data[0], dict):\n response_data = sorted(response_data, key=lambda x: x[list(response_data[0].keys())[0]])\n else:\n response_data = sorted(response_data)\n if isinstance(assert_data[0], dict):\n assert_data = sorted(assert_data, key=lambda x: x[list(assert_data[0].keys())[0]])\n else:\n assert_data = sorted(assert_data)\n\n for src_list, dst_list in zip(response_data, assert_data):\n \"\"\" recursion \"\"\"\n diff_json(src_list, dst_list)\n else:\n if str(response_data) != str(assert_data):\n info = \"❌ Value are not equal: {}\".format(response_data)\n print(info)\n AssertInfo.data.append(info)", "def test_intersection(self, client):\n\n expected = {\n 'a': [0,2,4,6,8],\n 'b': [4,6,8,10,12,14,16],\n 'result': [4,6,8]\n }\n\n res = client.post('/api/v1/intersection', json={'a': expected['a'], 'b': expected['b'] })\n assert res.status_code == 200\n assert res.json['data'] == expected['result']\n assert res.json['status'] == 2000", "def is_match(self, response):\n return response.find(' Matched') != -1", "def __eq__(self, other):\n if not isinstance(other, PrefetchResponse):\n return False\n\n return self.__dict__ == other.__dict__", "def fusion_api_validate_response_follow(self, expected, response, uriCache={}, wordy=False, depth=0,\n disable_dict_sorting=False,\n disable_list_sorting=False,\n called_by_logged=False):\n\n tabs = '\\t' * depth\n\n try:\n TEST_NAME = BuiltIn().get_variable_value(\"${TEST NAME}\")\n except:\n TEST_NAME = \"Suite Setup\"\n\n SUITE_NAME = BuiltIn().get_variable_value(\"${SUITE NAME}\")\n\n keyValueErrors = 0\n if BuiltIn().get_variable_value(\"${VALIDATE_ENTIRE_DTO}\"):\n VALIDATE_ENTIRE_DTO = BuiltIn().get_variable_value(\"${VALIDATE_ENTIRE_DTO}\")\n else:\n VALIDATE_ENTIRE_DTO = False\n\n CALLED_BY_LOGGED_MESSAGE = \"First fail in this Fusion API Validate Response Follow.\\nSuite Name: %s, TEST CASE: %s at key: %s, depth: %s\"\n\n for key in expected.keys():\n if wordy:\n logger.info((\"%sKey: %s\" % (tabs, key)), also_console=False)\n\n if expected[key] is None and response[key] is None:\n logger.info((\"%sExpected and response are None: Key %s\" % (tabs, key)), also_console=False)\n elif expected[key] is None and response[key] is not None:\n if (isinstance(response[key], str) or isinstance(response[key], unicode)) and response[key] == '':\n logger.info((\"%sExpected is None and response is empty string: Key %s\" % (tabs, key)), also_console=False)\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sExpected is None but something returned in response: Key %s\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif expected[key] is not None and response[key] is None:\n if (isinstance(expected[key], str) or isinstance(expected[key], unicode)) and expected[key] == '':\n logger.info((\n \"%sExpected is empty string and response is None: Key %s\" % (tabs, key)), also_console=False)\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sExpected something but response is None: Key %s\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n if isinstance(expected[key], list):\n if len(expected[key]) == 0 and len(response[key]) == 0:\n continue\n elif len(expected[key]) == 0 and len(response[key]) != 0:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sFor key %s, Expected is empty but actual is not\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif len(expected[key]) != 0 and len(response[key]) == 0:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sFor key %s, Actual is empty but expcted is not\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n if (key in response) and isinstance(response[key], list) and (len(expected[key]) == len(response[key])):\n # Lists of dictionaries can return in any order. Try to sort\n if isinstance(expected[key][0], dict):\n # logger.info((\"Pre sort Res: %s\" % response[key][0])\n # logger.info((\"Pre sort Exp: %s\" % expected[key][0])\n if not disable_dict_sorting:\n if \"name\" in expected[key][0] and expected[key][0][\"name\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: name\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('name'))\n expected[key] = sorted(expected[key], key=itemgetter('name'))\n if \"userName\" in expected[key][0] and expected[key][0][\"userName\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: userName\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('userName'))\n expected[key] = sorted(expected[key], key=itemgetter('userName'))\n elif \"portName\" in expected[key][0] and expected[key][0][\"portName\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: portName\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('portName'))\n expected[key] = sorted(expected[key], key=itemgetter('portName'))\n elif \"bayNumber\" in expected[key][0] and expected[key][0][\"bayNumber\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: bayNumber\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('bayNumber'))\n expected[key] = sorted(expected[key], key=itemgetter('bayNumber'))\n elif \"enclosureIndex\" in expected[key][0] and expected[key][0][\"enclosureIndex\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: enclosureIndex\") % tabs, also_console=False)\n # First sort on logicalLocation as a dict if it exists, then enclosureIndex.\n # Order of first sort is maintained in the second sort\n if \"logicalLocation\" in expected[key][0] and expected[key][0][\"logicalLocation\"] is not None:\n response[key] = sorted(response[key], key=itemgetter('logicalLocation'))\n expected[key] = sorted(expected[key], key=itemgetter('logicalLocation'))\n response[key] = sorted(response[key], key=itemgetter('enclosureIndex'))\n expected[key] = sorted(expected[key], key=itemgetter('enclosureIndex'))\n elif \"connectionId\" in expected[key][0] and expected[key][0][\"connectionId\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: connectionId\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('connectionId'))\n expected[key] = sorted(expected[key], key=itemgetter('connectionId'))\n elif \"id\" in expected[key][0] and expected[key][0][\"id\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: id\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('id'))\n expected[key] = sorted(expected[key], key=itemgetter('id'))\n elif \"relativeValue\" in expected[key][0] and expected[key][0][\"relativeValue\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: relativeValue\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('relativeValue'))\n expected[key] = sorted(expected[key], key=itemgetter('relativeValue'))\n elif \"serialNumber\" in expected[key][0] and expected[key][0][\"serialNumber\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: serialNumber\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('serialNumber'))\n expected[key] = sorted(expected[key], key=itemgetter('serialNumber'))\n elif \"deviceSlot\" in expected[key][0] and expected[key][0][\"deviceSlot\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: deviceSlot\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('deviceSlot'))\n expected[key] = sorted(expected[key], key=itemgetter('deviceSlot'))\n elif \"type\" in expected[key][0] and expected[key][0][\"type\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: type\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('type'))\n expected[key] = sorted(expected[key], key=itemgetter('type'))\n elif \"iSCSIBootAttemptInstance\" in expected[key][0] and expected[key][0][\"iSCSIBootAttemptInstance\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: iSCSIBootAttemptInstance\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('iSCSIBootAttemptInstance'))\n expected[key] = sorted(expected[key], key=itemgetter('iSCSIBootAttemptInstance'))\n elif \"iSCSIAttemptInstance\" in expected[key][0] and expected[key][0][\"iSCSIAttemptInstance\"] is not None:\n if wordy:\n logger.info((\"%sSorting List of Dict by: iSCSIAttemptInstance\") % tabs, also_console=False)\n response[key] = sorted(response[key], key=itemgetter('iSCSIAttemptInstance'))\n expected[key] = sorted(expected[key], key=itemgetter('iSCSIAttemptInstance'))\n else:\n # sort on a key with \"name\" in it, if it has an actual value\n randomkey = \"changeme\"\n for namekey in expected[key][0].keys():\n if (re.match(r'.*name', namekey, re.I)) and (expected[key][0][namekey] is not None):\n randomkey = namekey\n break\n\n # if randomkey not changed then just sort on a random key and hope for the best\n if randomkey == \"changeme\":\n randomkey = random.choice(expected[key][0].keys())\n if wordy:\n logger.info((\"%sSorting List of Dict by random: %s\" % (tabs, namekey)), also_console=False)\n response[key] = sorted(response[key], key=itemgetter(randomkey))\n expected[key] = sorted(expected[key], key=itemgetter(randomkey))\n\n for i in xrange(0, len(expected[key])):\n if isinstance(expected[key][i], dict) or isinstance(expected[key][i], list):\n results, called_by_logged = self.fusion_api_validate_response_follow(expected[key][i], response[key][i], uriCache, wordy, depth + 1, called_by_logged=called_by_logged)\n if not results:\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif isinstance(expected[key][i], int):\n if expected[key][i] == response[key][i]:\n if wordy:\n logger.info((\"%ssimple %s == %s\" % (tabs, expected[key][i], response[key][i])), also_console=False)\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%ssimple %s != %s\" % (tabs, expected[key][i], response[key][i]))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n words = expected[key][i].split(\":\")\n if len(words) < 2:\n match = False\n if not disable_list_sorting:\n for j in xrange(0, len(response[key])):\n if expected[key][i] == response[key][j]:\n logger.info((\"%sfound item in list. Will remove 1 matching item: [%s]\" % (tabs, expected[key][i])), also_console=False)\n response[key].pop(j)\n match = True\n break\n if not match:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sDidn't find item in list: [%s]\" % (tabs, expected[key][i]))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif disable_list_sorting:\n if expected[key][i] == response[key][i]:\n logger.info((\"%sFound matching item: [%s]\" % (tabs, expected[key][i])), also_console=False)\n match = True\n break\n if not match:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sActual value [%s] doesn't match expected value [%s]\" % (tabs, response[key][i], expected[key][i]))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif words[0] == \"REGEX\":\n match = False\n exp = \":\".join(words[1:])\n for j in xrange(0, len(response[key])):\n if re.search(exp, response[key][j], re.M | re.I):\n logger.info((\"%sfound item in list: [%s]\" % (tabs, exp)), also_console=False)\n match = True\n break\n if not match:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sDidn't match item in list: [%s]\" % (tabs, exp))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n match = False\n for j in xrange(0, len(response[key])):\n if re.search('/rest/', response[key][j]):\n resp = self.fusion_api_get_resource(str(response[key][j]))\n if resp['name'] == words[1]:\n logger.info((\"%sfound item in list: [%s]\" % (tabs, words[1])), also_console=False)\n match = True\n break\n elif expected[key][i] == response[key][j]:\n if wordy:\n logger.info((\"%ssimple %s == %s\" % (tabs, expected[key][i], response[key][i])), also_console=False)\n match = True\n break\n if not match:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sDidn't find item via uri lookup %s: [exp: %s != ret: %s]\" % (tabs, str(response[key][j]), words[1], resp['name']))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sList item not in Res or diff len list: %s\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n continue\n\n if isinstance(expected[key], dict):\n if key in response:\n results, called_by_logged = self.fusion_api_validate_response_follow(expected[key], response[key], uriCache, wordy, depth + 1, called_by_logged=called_by_logged)\n if not results:\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n continue\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sDict item not in Res: %s\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n # only str, int, bool, unicode left\n if key in response:\n if (isinstance(response[key], str) or isinstance(response[key], unicode)) and re.search(r'/rest/', response[key], re.I):\n words = expected[key].split(\":\")\n compare_as_is = False\n compare_as_regex = False\n if len(words) < 2:\n if wordy:\n logger.info((\"%sExpected did not split into type,name: %s\" % (tabs, expected[key])), also_console=False)\n logger.info((\"%swill compare as is.\") % tabs, also_console=False)\n compare_as_is = True\n exp_name = words[0]\n else:\n if key == 'serverHardwareTypeUri':\n if words[0] == \"SHT\":\n logger.info((\"%sSHT lookup. Call 'Get Server Hardware Type URI By Name And Mezz' for: %s\" % (tabs, expected[key])), also_console=False)\n sht_uri = BuiltIn().run_keyword(\"Get Server Hardware Type URI By Name And Mezz\", \":\".join(words[1:]))\n else: # support for SHT Uri lookup by ServerHardware (SH:wpst14, bay 1)\n logger.info((\"SH lookup for SHT: %s\" % expected[key]), also_console=False)\n sh_resp = BuiltIn().run_keyword(\"Get Resource\", expected[key])\n sht_uri = sh_resp['serverHardwareTypeUri']\n\n sht_resp = self.fusion_api_get_resource(sht_uri)\n exp_name = sht_resp['name']\n else:\n if words[0] == \"REGEX\":\n compare_as_regex = True\n exp_name = \":\".join(words[1:])\n\n if wordy:\n logger.info((\"%sResponse has URI, get uri: %s\" % (tabs, response[key])), also_console=False)\n logger.info((\"%sExpecting name: %s\" % (tabs, exp_name)), also_console=False)\n\n if compare_as_regex:\n found = re.search(exp_name, response[key], re.M | re.I)\n msg = \"[\" + key + \"] \" + exp_name + \" vs \" + response[key]\n if found:\n if wordy:\n logger.info((\"%sregex match %s\" % (tabs, msg)), also_console=False)\n continue\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sregex not match %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif compare_as_is:\n msg = \"[\" + key + \"] \" + expected[key] + \" vs \" + response[key]\n if expected[key] != response[key]:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%ssimple != %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n if wordy:\n logger.info((\"%ssimple == %s\" % (tabs, msg)), also_console=False)\n continue\n else:\n if response[key] in uriCache:\n if wordy:\n msg = \"[\" + key + \"] \" + response[key] + \" --> \" + uriCache[response[key]]\n logger.info((\"%suriCache lookup %s\" % (tabs, msg)), also_console=False)\n resp_name = uriCache[response[key]]\n else:\n resp = self.fusion_api_get_resource(str(response[key]))\n resp_name = resp['name']\n uriCache[response[key]] = resp_name\n if wordy:\n msg = response[key] + \" --> \" + resp_name\n logger.info((\"%sGET uri and save in cache %s\" % (tabs, msg)), also_console=False)\n\n if resp_name != exp_name:\n msg = \"[\" + key + \"] \" + exp_name + \" vs \" + resp_name\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sExpected Name does not match URI name: %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif (isinstance(expected[key], str) or isinstance(expected[key], unicode)) and (expected[key].find(\"REGEX:\") > -1):\n words = expected[key].split(\":\")\n pattern = \":\".join(words[1:])\n found = re.search(pattern, str(response[key]), re.M | re.I)\n msg = \"[\" + key + \"] \" + pattern + \" vs \" + str(response[key])\n if found:\n if wordy:\n logger.info((\"%sregex match %s\" % (tabs, msg)), also_console=False)\n continue\n else:\n logger.warn(\"%sregex not match %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n elif (isinstance(expected[key], str) or isinstance(expected[key], unicode)) and (expected[key].find(\"RANGE:\") > -1):\n words = expected[key].split(\":\")\n wmin = words[1]\n wmax = words[2]\n msg = wmin + \" - \" + wmax + \":\" + str(response[key])\n if (int(response[key]) >= int(wmin)) and (int(response[key]) <= int(wmax)):\n if wordy:\n logger.info((\"%s%s Value in Range: %s\" % (tabs, key, msg)), also_console=False)\n continue\n else:\n logger.warn(\"%s%s Value NOT in Range: %s\" % (tabs, key, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n elif str(expected[key]) != str(response[key]):\n msg = \"[\" + key + \"] \" + str(expected[key]) + \" vs \" + str(response[key])\n if \"name\" != key and \"name\" in expected:\n msg = \"@ dict name=%s : %s\" % (str(expected[\"name\"]), msg)\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%ssimple != %s\" % (tabs, msg))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n else:\n if wordy:\n logger.info((\"%sExpected match response: %s\" % (tabs, expected[key])), also_console=False)\n\n else:\n if not called_by_logged:\n logger.warn(CALLED_BY_LOGGED_MESSAGE % (SUITE_NAME, TEST_NAME, key, depth))\n called_by_logged = True\n logger.warn(\"%sResponse does not have key %s:\" % (tabs, key))\n if VALIDATE_ENTIRE_DTO:\n keyValueErrors += 1\n else:\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n if keyValueErrors:\n logger.warn(\"%sDTO had %s failing keys:\" % (tabs, keyValueErrors))\n if depth == 0:\n return False\n else:\n return False, called_by_logged\n\n else:\n if depth == 0:\n return True\n else:\n return True, called_by_logged", "def __eq__(self, other):\n if not isinstance(other, TravelRecordResponse):\n return False\n\n return self.__dict__ == other.__dict__", "def compare_dicts(dict1, dict2, dict1_name=\"d1\", dict2_name=\"d2\", path=\"\"):\n # Setup paths to track key exploration. The path parameter is used to allow\n # recursive comparisions and track what's being compared.\n result = True\n for key in dict1.keys():\n dict1_path = \"{}{}[{}]\".format(dict1_name, path, key)\n dict2_path = \"{}{}[{}]\".format(dict2_name, path, key)\n if key not in dict2.keys():\n log.debug(\"%s not a valid key in %s.\", dict1_path, dict2_path)\n result = False\n elif isinstance(dict1[key], dict) and isinstance(dict2[key], dict):\n log.debug(\n \"%s and %s contain dictionary. Evaluating.\", dict1_path,\n dict2_path\n )\n result = compare_dicts(\n dict1[key], dict2[key], dict1_name, dict2_name,\n path=\"[{}]\".format(key)\n )\n elif isinstance(dict1[key], list) and isinstance(dict2[key], list):\n log.debug(\n \"%s and %s key '%s' contains list. Validating dict1 items \"\n \"exist in dict2.\", dict1_path, dict2_path, key\n )\n if not all([bool(item in dict2[key]) for item in dict1[key]]):\n log.debug(\n \"Mismatch: %s value is '%s' while %s value is '%s'.\",\n dict1_path, dict1[key], dict2_path, dict2[key]\n )\n result = False\n # Hack for NetBox v2.6.7 requiring integers for some values\n elif key in [\"status\", \"type\"]:\n if dict1[key] != dict2[key][\"value\"]:\n log.debug(\n \"Mismatch: %s value is '%s' while %s value is '%s'.\",\n dict1_path, dict1[key], dict2_path, dict2[key][\"value\"]\n )\n result = False\n elif dict1[key] != dict2[key]:\n log.debug(\n \"Mismatch: %s value is '%s' while %s value is '%s'.\",\n dict1_path, dict1[key], dict2_path, dict2[key]\n )\n # Allow the modification of device sites by ignoring the value\n if \"site\" in path and key == \"name\":\n log.debug(\"Site mismatch is allowed. Moving on.\")\n else:\n result = False\n if result:\n log.debug(\"%s and %s values match.\", dict1_path, dict2_path)\n else:\n log.debug(\"%s and %s values do not match.\", dict1_path, dict2_path)\n return result\n log.debug(\"Final dictionary compare result: %s\", result)\n return result", "def calculate_exact_match(pred_dict, ref_dict):\n num, em = 0, 0\n for key in pred_dict.keys():\n num += 1\n if ' '.join(pred_dict[key]).strip() == ' '.join(ref_dict[key]).strip():\n em += 1\n return em / num * 100", "def test_v1_alert_ref_list_get(self):\n pass", "def find_real_ref(ref, ref_list):\n for r in reversed(ref_list):\n if r == ref:\n return r\n return None", "def test_get_request_body(self):\n\n batch = ReferenceBatchRequest()\n\n # no references\n expected_return = []\n body = batch.get_request_body()\n self.assertEqual(body, expected_return)\n\n # add a reference\n batch.add(\"fd5af656-7d86-40da-9577-845c98e75543\", \"Griptape\", \"color\",\n \"1c51b14d-1652-4225-8dfc-7f4079616f65\")\n body = batch.get_request_body()\n expected_return.append({\n \"from\": \"weaviate://localhost/Griptape/fd5af656-7d86-40da-9577-845c98e75543/color\",\n \"to\": \"weaviate://localhost/1c51b14d-1652-4225-8dfc-7f4079616f65\"\n })\n self.assertEqual(body, expected_return)\n\n # add another reference\n batch.add(\"fd5af656-7d86-40da-9577-845c98e75511\", \"Griptape\", \"length\",\n \"1c51b14d-1652-4225-8dfc-7f4079616f66\")\n body = batch.get_request_body()\n expected_return.append({\n \"from\": \"weaviate://localhost/Griptape/fd5af656-7d86-40da-9577-845c98e75511/length\",\n \"to\": \"weaviate://localhost/1c51b14d-1652-4225-8dfc-7f4079616f66\"\n })\n self.assertEqual(body, expected_return)", "def is_ref(frag):\n return isinstance(frag, dict) and \\\n frag.get('type') and \\\n frag.get('id')", "def __find_correlations(self, results):\n\n for result in results[:self.__result_limit]:\n\n # pub without venue\n if len(result['ven']) == 0:\n result['alternative'] = []\n\n with self.vix.searcher(weighting=Frequency) as vs:\n vq_parse = QueryParser('key', self.vix.schema).parse(result['pub']['crossref'])\n tresult = vs.search(vq_parse, limit=None, )\n if len(tresult) != 0:\n result['ven'] = {}\n result['added'] = 1\n for attr in tresult[0].items():\n result['ven'][attr[0]] = attr[1]\n\n self.__output.append(result)\n\n # venue without pub or venue with a list of pubs\n elif len(result['pub']) == 0 or (\n isinstance(result['pub'], list) and len(result['pub']) > 1):\n result['alternative'] = []\n\n with self.pix.searcher(weighting=Frequency) as ps:\n pq_parse = QueryParser('crossref', self.pix.schema).parse(result['ven']['key'])\n tresult = ps.search(pq_parse, limit=None, )\n\n if len(tresult):\n plist = []\n tmp = dict()\n for el in tresult:\n for attr in el.items():\n if attr[0] == 'title' and attr[1] not in [x['title'] for x in result['pub']]:\n plist.append(attr[1])\n break\n\n result['alternative'] = plist\n self.__output.append(result)\n\n # mixed case\n elif len(self.__output) == 0 or not result['ven']['key'] in [x['key'] for x in self.__output]:\n lis = [x for x in results if len(x['ven']) and x['ven']['key'] == result['ven']['key']]\n tmp = {}\n if len(lis) <= 1:\n tmp = {'key': result['pub']['key'],\n 'score': result['score'],\n 'pub': [x['pub'] for x in lis],\n 'ven': result['ven'],\n 'alternative': list()}\n else:\n tmp = {'key': result['ven']['key'],\n 'score': result['score'],\n 'pub': [x['pub'] for x in lis],\n 'ven': result['ven'],\n 'alternative': list()}\n plist = []\n with self.pix.searcher() as ps:\n pq_parse = QueryParser('crossref', self.pix.schema).parse(tmp['key'])\n tresult = ps.search(pq_parse, limit=None, )\n if len(tresult):\n for el in tresult:\n for attr in el.items():\n if attr[0] == 'title' and attr[1] not in [x['title'] for x in tmp['pub']]:\n plist.append(attr[1])\n break\n\n tmp['alternative'] = plist\n self.__output.append(tmp)", "def test_response(self):\n for i, response in enumerate(RESPONSES):\n with self.subTest(i=i):\n self.assertDictContainsSubset(response, dict(self.responses[i].data))", "def test_list_referrals_by_desc_state(self):\n user = factories.UserFactory()\n referrals = [\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n factories.ReferralFactory(\n state=models.ReferralState.ANSWERED,\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n ]\n\n self.setup_elasticsearch()\n response = self.client.get(\n f\"/api/referrallites/?user={user.id}&sort=state_number&sort_dir=desc\",\n HTTP_AUTHORIZATION=f\"Token {Token.objects.get_or_create(user=user)[0]}\",\n )\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json()[\"count\"], 2)\n self.assertEqual(response.json()[\"results\"][0][\"id\"], referrals[0].id)\n self.assertEqual(response.json()[\"results\"][1][\"id\"], referrals[1].id)", "def test_response_reusage(self):\n\n post1 = self._create_db_post(content=\"@test I need a foo.\",\n channel=self.sc.inbound,\n demand_matchables=True,\n user_profile={'screen_name': 'customer'})\n self.assertTrue(self.sc.inbound_channel.is_assigned(post1))\n\n conv1 = self.sc.upsert_conversation(post1)\n post2 = self._create_db_post(content=\"I still need a foo!\",\n channel=self.sc.inbound,\n demand_matchables=True,\n user_profile={'screen_name': 'customer'})\n conv2 = self.sc.upsert_conversation(post2)\n\n resp1 = Response.objects.upsert_from_post(post1)\n resp2 = Response.objects.upsert_from_post(post2)\n self.assertEqual(conv1.id, conv2.id)\n self.assertEqual(resp1.id, resp2.id)\n self.assertTrue(resp2.post_date > resp1.post_date)", "def compare_event(a: dict, b: dict):\n\n for key, value in a.items():\n assert key in b\n\n if key not in b:\n continue\n\n if key == \"Detail\" and isinstance(value, str):\n value = json.loads(value)\n b[key] = json.loads(b[key])\n\n if isinstance(value, dict):\n compare_event(value, b[key])\n else:\n assert value == b[key]", "def test_difference(self, client):\n\n expected = {\n 'a': [0,2,4,6,8],\n 'b': [4,6,8,10,12,14,16],\n 'result': [0, 2]\n }\n\n res = client.post('/api/v1/difference', json={'a': expected['a'], 'b': expected['b'] })\n assert res.status_code == 200\n assert res.json['data'] == expected['result']\n assert res.json['status'] == 2000", "def __eq__(self, other):\n if not isinstance(other, ChannelReturnResponse):\n return False\n\n return self.to_dict() == other.to_dict()", "def __eq__(self, other):\n if not isinstance(other, Response):\n return False\n\n return self.__dict__ == other.__dict__", "def match(self, data_instance: Dict[str, Any]) -> bool:", "def test_list_referrals_by_asc_object(self):\n user = factories.UserFactory()\n referrals = [\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n object=\"A - first by alphabetical order\",\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n object=\"b - second by alphabetical order\",\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n object=\"é - third by alphabetical order\",\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n factories.ReferralFactory(\n state=models.ReferralState.RECEIVED,\n object=\"G - fourth by alphabetical order\",\n post__users=[user],\n urgency_level=models.ReferralUrgency.objects.get(\n duration=timedelta(days=1)\n ),\n ),\n ]\n\n self.setup_elasticsearch()\n response = self.client.get(\n f\"/api/referrallites/?user={user.id}&sort=object.keyword&sort_dir=asc\",\n HTTP_AUTHORIZATION=f\"Token {Token.objects.get_or_create(user=user)[0]}\",\n )\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.json()[\"count\"], 4)\n self.assertEqual(response.json()[\"results\"][0][\"id\"], referrals[0].id)\n self.assertEqual(response.json()[\"results\"][1][\"id\"], referrals[1].id)\n self.assertEqual(response.json()[\"results\"][2][\"id\"], referrals[2].id)\n self.assertEqual(response.json()[\"results\"][3][\"id\"], referrals[3].id)", "def test_get_with_filter_person_statement(mockclient_cl1):\n r = mockclient_cl1.get(TEST_URL + \"?size=100&s=S00063&p=P00063\")\n #assert r.status_code == 400\n assert r.status_code == 200\n assert r.json[\"statements\"][0][\"@id\"] == \"S00063\"\n r = mockclient_cl1.get(TEST_URL + \"?size=100&s=S00036&p=P00064\")\n assert r.status_code == 404", "def compare(isamAppliance1, isamAppliance2):\n ret_obj1 = get_all(isamAppliance1)\n ret_obj2 = get_all(isamAppliance2)\n\n return tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])", "def compare(isamAppliance1, isamAppliance2):\n ret_obj1 = get_all(isamAppliance1)\n ret_obj2 = get_all(isamAppliance2)\n\n return tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])", "def get_mex_status(request):\n flag = False #flag that is used for indicating successful search of MEx in MEx list\n response = GetMexStatusResponse() #create response object \n for i in mex_list: #search for MEx in MEx list\n if i.id == request.mex_id:\n response.mex_status = i.status.name #MEx status query 'name' method from enum object returns text\n response.job_id = str(i.job_id)\n flag = True\n if flag == True: #if we found requested Mex in the list, together with status and job id we return bool success \n response.success = True\n return response\n else:\n response.success = False\n return response", "def dereference(\n self, val: Dict[str, Any], loop_set: Optional[Set[int]] = None\n ) -> Dict[str, Any]:\n if not isinstance(val, Dict):\n return val\n\n if len(val) == 1 and \"$ref\" in val:\n if loop_set is None:\n loop_set = set()\n\n if id(val) in loop_set:\n raise ValueError(\"$ref loop detected\")\n\n val = dereference_json_pointer(self.data, val[\"$ref\"])\n loop_set.add(id(val))\n return self.dereference(val, loop_set)\n\n return val", "def _retrieve_html_diff(self, oldrevid, newrevid):\n req = requests.get(self.endpoint,\n {\n 'action': 'compare',\n 'fromrev': oldrevid,\n 'torev': newrevid,\n 'uselang': 'en',\n 'format': 'json',\n })\n req.raise_for_status()\n return req.json().get('compare', {}).get('*')", "def test_process_response(self):\n t = self.create_request_object()\n response_content = u\"\"\"<ODM FileType=\"Snapshot\" FileOID=\"\" CreationDateTime=\"\" ODMVersion=\"1.3\"\nxmlns:mdsol=\"http://www.mdsol.com/ns/odm/metadata\" xmlns=\"http://www.cdisc.org/ns/odm/v1.3\">\n <Study OID=\"Lab Test\">\n <GlobalVariables>\n <StudyName>Lab Test</StudyName>\n <StudyDescription />\n <ProtocolName>Lab Test</ProtocolName>\n </GlobalVariables>\n </Study>\n <Study OID=\"Mediflex\">\n <GlobalVariables>\n <StudyName>Mediflex</StudyName>\n <StudyDescription />\n <ProtocolName>Mediflex</ProtocolName>\n </GlobalVariables>\n </Study>\n</ODM>\"\"\"\n req = mock.Mock(requests.Request, text=response_content)\n response = t.result(req)\n self.assertTrue(isinstance(response, RWSStudies))\n for study in response:\n self.assertTrue(study.oid in ['Lab Test', 'Mediflex'])", "def __eq__(self, other):\n if not isinstance(other, AttachmentResponse):\n return False\n\n return self.__dict__ == other.__dict__", "def compare_results(self, benchmark_result, reference_benchmark_result):\n pass", "def _resolve_dict_entry(self, doc_uri, main_doc, obj):\n # Interpret '$ref' key if present in obj\n if '$ref' in obj:\n result = self._load_ref(doc_uri, main_doc, obj['$ref'])\n else:\n result = self.dict_class()\n # Merge values from obj with result\n for k, v in obj.items():\n if k != '$ref':\n result[k] = self._resolve(doc_uri, main_doc, v)\n return result", "def sparql_compare_ont(obj):\n if not 'ontology_purl' in obj:\n return\n purl = obj['ontology_purl']\n id = obj['id']\n # this could be made more declarative, or driven by the context.jsonld mapping;\n # however, for now this is relatively simple and easy to understand:\n license = obj['license']['url'] if 'license' in obj else ''\n run_sparql(obj, 'license', license, \"SELECT DISTINCT ?license WHERE {<\"+purl+\"> <http://purl.org/dc/elements/1.1/license> ?license}\")\n run_sparql(obj, 'title', obj['title'] if 'title' in obj else '', \"SELECT DISTINCT ?title WHERE {<\"+purl+\"> <http://purl.org/dc/elements/1.1/title> ?title}\")\n run_sparql(obj, 'description', obj['description'] if 'description' in obj else '', \"SELECT DISTINCT ?description WHERE {<\"+purl+\"> <http://purl.org/dc/elements/1.1/description> ?description}\")\n run_sparql(obj, 'homepage', obj['homepage'] if 'homepage' in obj else '', \"SELECT DISTINCT ?homepage WHERE {<\"+purl+\"> <http://xmlns.com/foaf/0.1/homepage> ?homepage}\")", "def test_do_get__success(self, mock_get_approvers):\n mock_get_approvers.return_value = ['reviewer1@example.com']\n\n with test_app.test_request_context(self.request_path):\n actual = self.handler.do_get(feature_id=self.feature_id)\n\n expected = {\n \"gates\": [\n {\n \"id\": 1,\n \"feature_id\": self.feature_id,\n \"stage_id\": 1,\n \"gate_type\": 1,\n \"team_name\": \"API Owners\",\n \"gate_name\": \"Intent to Prototype\",\n \"state\": 1,\n \"requested_on\": None,\n \"responded_on\": None,\n \"owners\": [],\n \"next_action\": None,\n \"additional_review\": False,\n 'slo_initial_response': 5,\n 'slo_initial_response_took': None,\n 'slo_initial_response_remaining': None,\n },\n ],\n \"possible_owners\": {\n 1: [\"reviewer1@example.com\"],\n 2: [\"reviewer1@example.com\"],\n 3: [\"reviewer1@example.com\"],\n 4: [\"reviewer1@example.com\"],\n 32: [\"reviewer1@example.com\"],\n 34: [\"reviewer1@example.com\"],\n 42: [\"reviewer1@example.com\"],\n 44: [\"reviewer1@example.com\"],\n 54: [\"reviewer1@example.com\"],\n 62: [\"reviewer1@example.com\"],\n 64: [\"reviewer1@example.com\"],\n 74: [\"reviewer1@example.com\"],\n }}\n\n self.assertEqual(actual, expected)", "def is_item_in_the_response(key, value, jsonResponse):\n flag = False\n for item in jsonResponse:\n if type(jsonResponse[item]) == int:\n if item == key and jsonResponse[item] == int(value):\n flag = True\n\n if type(jsonResponse[item]) == str:\n if item == key and jsonResponse[item] == str(value):\n flag = True\n\n if type(jsonResponse[item]) == bool:\n if item == key and jsonResponse[item] == bool(value):\n flag = True\n else:\n #log and error\n pass\n return flag", "def test_get_filtered_list(self):\n flexmock(errata).should_receive(\"Advisory\").and_return(None)\n\n response = flexmock(status_code=200)\n response.should_receive(\"json\").and_return(test_structures.example_erratum_filtered_list)\n\n flexmock(errata.requests).should_receive(\"get\").and_return(response)\n\n res = errata.get_filtered_list()\n self.assertEqual(2, len(res))", "def check_valid_report(context):\n response = context.response.json()\n if context.history:\n assert(isinstance(response['objects'], list))\n else:\n assert(isinstance(response, dict))", "def test_handle_response_all_shortlist_contacted_value_not_found(self):\n lookup = Lookup(FindValue, self.target, self.node, self.event_loop)\n lookup._lookup = mock.MagicMock()\n uuids = [uuid for uuid in lookup.pending_requests.keys()]\n uuid = uuids[0]\n contact = lookup.shortlist[0]\n # Only one item in pending_requests\n for i in range(1, len(uuids)):\n del lookup.pending_requests[uuids[i]]\n self.assertEqual(1, len(lookup.pending_requests))\n # Add K items from shortlist to the contacted set.\n for contact in lookup.shortlist:\n lookup.contacted.add(contact)\n # Cause the lookup to fire.\n msg = Nodes(uuid, self.node.network_id, self.node.network_id,\n self.reply_port, self.version, self.seal,\n self.contacts)\n response = asyncio.Future()\n response.set_result(msg)\n lookup._handle_response(uuid, contact, response)\n # The _lookup method should not be called.\n self.assertEqual(lookup._lookup.call_count, 0)\n # The lookup task has fired.\n self.assertTrue(lookup.done())\n with self.assertRaises(ValueNotFound) as result:\n lookup.result()\n self.assertIsInstance(result.exception, ValueNotFound)\n self.assertEqual(result.exception.args[0],\n \"Unable to find value for key: {}\"\n .format(self.target))", "def compare(har1_path, har2_path):\n har_file_paths1 = get_paths(har1_path)\n har_file_paths2 = get_paths(har2_path)\n\n encoded_urls1 = set([get_encoded_url(har) for har in har_file_paths1])\n encoded_urls2 = set([get_encoded_url(har) for har in har_file_paths2])\n\n url_intersection = list(encoded_urls1.intersection(encoded_urls2))\n urls_not_in_har1 = encoded_urls2 - encoded_urls1\n urls_not_in_har2 = encoded_urls1 - encoded_urls2\n\n if urls_not_in_har1:\n print 'URLs missing from {0}:'.format(har1_path)\n print urls_not_in_har1\n if urls_not_in_har2:\n print 'URLs missing from {0}:'.format(har2_path)\n print urls_not_in_har2\n\n print '\\n'\n print 'Comparing {0} urls (num urls in both dirs)'.format(\n str(len(url_intersection)))\n\n total_har1_resource_count = Counter()\n total_har2_resource_count = Counter()\n\n for encoded_url in url_intersection:\n decoded_url = urlsafe_b64decode(encoded_url)\n\n har1_dict = get_har_dict_from_file(\n path.join(har1_path, encoded_url + '.har'))\n har2_dict = get_har_dict_from_file(\n path.join(har2_path, encoded_url + '.har'))\n\n har1_status_code_count = get_status_code_count(har1_dict)\n har2_status_code_count = get_status_code_count(har2_dict)\n\n har1_response_size = get_total_body_size(har1_dict)\n har2_response_size = get_total_body_size(har2_dict)\n\n har1_resource_type_count = get_resource_count(har1_dict)\n har2_resource_type_count = get_resource_count(har2_dict)\n\n total_har1_resource_count.update(har1_resource_type_count)\n total_har2_resource_count.update(har2_resource_type_count)\n\n print 'Count for {0}:'.format(decoded_url)\n print har1_status_code_count\n print har2_status_code_count\n print 'Resource type counts for {0}:'.format(decoded_url)\n print har1_resource_type_count\n print har2_resource_type_count\n print 'Resource count difference: {0}'.format(\n abs(\n sum(har1_resource_type_count.values()) -\n sum(har2_resource_type_count.values())))\n print 'Response body sizes'\n print har1_response_size\n print har2_response_size\n print '======================================================='\n\n print 'Total resource counts'\n print total_har1_resource_count\n print total_har2_resource_count", "def process_response(self, response: Dict) -> Iterator[dict]:", "def check(self):\n out = self.LeggiDocumento('000')\n if 'success' in out and 'result' in out:\n out.pop('result')\n else:\n out.pop('request')\n return out", "def test_get_data(self):\n remote_data = get_data('http://www.mocky.io/v2/5e539b332e00007c002dacbe')\n with open('response.json') as fin:\n local_data = json.load(fin)\n self.assertDictEqual(remote_data, local_data)", "def __eq__(self, other):\n if not isinstance(other, QuickSearchResponse):\n return False\n\n return self.to_dict() == other.to_dict()", "def is_project_in_the_response(projectComponent, response):\n for project in response:\n if response[project] == projectComponent:\n return True\n return False", "def verify_response(self, system_name, expected_api_response,\n expected_response_type, comparison_mode,\n request_id=None, generate_output_diff_file=\"Yes\"):\n arguments = {'system_name': system_name,\n 'expected_api_response': expected_api_response,\n 'expected_response_type': expected_response_type,\n 'comparison_mode': comparison_mode,\n 'request_id': request_id,\n 'generate_output_diff_file': generate_output_diff_file}\n wdesc = \"Verify API response with the expected API response\"\n pNote(wdesc)\n output_file = self.logsdir+\"/difference_output.log\"\n output_file = Utils.file_Utils.addTimeDate(output_file)\n generate_output_diff_file = Utils.rest_Utils.\\\n resolve_value_of_verify(generate_output_diff_file)\n\n try:\n arguments[\"expected_api_response\"] = Utils.rest_Utils.\\\n check_ext_get_abspath(arguments[\"expected_api_response\"],\n self.tc_path)\n\n credentials = Utils.data_Utils.\\\n get_user_specified_tag_values_in_tc(self.datafile, **arguments)\n\n credentials[\"expected_api_response\"] = Utils.rest_Utils.\\\n check_ext_get_abspath(credentials[\"expected_api_response\"],\n os.path.dirname(self.datafile))\n\n if request_id:\n response = Utils.data_Utils.get_object_from_datarepository(\n \"{0}_{1}_api_response_object\".format(system_name,\n credentials['request_id']))\n else:\n response = Utils.data_Utils.get_object_from_datarepository(\n \"{0}_api_response_object\".format(system_name))\n except Exception as exception:\n pNote(exception, \"error\")\n return False\n if any([x in credentials[\"comparison_mode\"] for x in [\"xpath=\", \"jsonpath=\", \"regex=\"]]) \\\n or credentials[\"comparison_mode\"] == \"\":\n status = self.rest_object.cmp_content_response(self.datafile, system_name, response,\n credentials['expected_api_response'],\n credentials['expected_response_type'],\n credentials['comparison_mode'])\n else:\n status = self.rest_object.cmp_response(response,\n credentials['expected_api_response'],\n credentials['expected_response_type'],\n output_file,\n credentials['generate_output_diff_file'])\n return status", "def handle_ref(self, data, **kwargs):\n\n # Backward compatibility: branch and ref were both used. Let's keep branch as the exposed field\n # even if interally it gets converted to \"ref\" later.\n if data.get(\"ref\"):\n data[\"branch\"] = data[\"ref\"]\n del data[\"ref\"]\n\n return data", "def verify_queue_stats(*get_response):\n\n test_result_flag = True\n headers = get_response[0]\n body = json.loads(get_response[1])\n\n keys_in_body = body.keys()\n keys_in_body.sort()\n\n if (keys_in_body == [\"actions\", \"messages\"]):\n stats = body[\"messages\"]\n keys_in_stats = stats.keys()\n keys_in_stats.sort()\n if (keys_in_stats == [\"claimed\", \"free\"]) :\n try:\n int(stats[\"claimed\"])\n int(stats[\"free\"])\n except:\n test_result_flag = False\n else:\n test_result_flag = False\n else:\n test_result_flag = False\n\n if test_result_flag:\n return test_result_flag\n else:\n print headers\n print body\n assert test_result_flag, \"Get Request stats failed\"", "def __call__(self, json_res):\r\n id2hyps = {\r\n res['clip_id']: [_remove_nonascii(res['descs'][0]['desc'].strip())]\r\n for res in json_res\r\n }\r\n id2hyps = self.tokenizer.tokenize(id2hyps)\r\n assert len(id2hyps) == len(self.id2refs)\r\n\r\n ret_scores = {}\r\n for scorer, method in self.scorers:\r\n print(f\"Computing {method} score...\")\r\n score, scores = scorer.compute_score(self.id2refs, id2hyps)\r\n if isinstance(method, list):\r\n for sc, scs, m in zip(score, scores, method):\r\n ret_scores[m] = sc * 100\r\n else:\r\n ret_scores[method] = score * 100\r\n\r\n return ret_scores", "def match(self, result: dict):\n if self._matchStatus(result['Status']):\n if self._comparator['Length']:\n return self._matchLength(int(result['Length']))\n if self._comparator['Time']:\n return self._matchTime(result['Time Taken'])\n return True\n return False", "def compare_contract_results(sub, d1, contract, sub_contract, parent_duns, duns, dom_country, int_country, created_at,\n updated_at, debug=False):\n attr = {\n 'created_at': created_at,\n 'updated_at': updated_at,\n 'id': sub.id,\n\n 'unique_award_key': d1.unique_award_key,\n 'award_id': contract.contract_number,\n 'parent_award_id': contract.idv_reference_number,\n 'award_amount': contract.dollar_obligated,\n 'action_date': str(contract.date_signed),\n 'fy': 'FY{}'.format(fy(contract.date_signed)),\n 'awarding_agency_code': d1.awarding_agency_code,\n 'awarding_agency_name': d1.awarding_agency_name,\n 'awarding_sub_tier_agency_c': contract.contracting_office_aid,\n 'awarding_sub_tier_agency_n': contract.contracting_office_aname,\n 'awarding_office_code': contract.contracting_office_id,\n 'awarding_office_name': contract.contracting_office_name,\n 'funding_agency_code': d1.funding_agency_code,\n 'funding_agency_name': d1.funding_agency_name,\n 'funding_sub_tier_agency_co': contract.funding_agency_id,\n 'funding_sub_tier_agency_na': contract.funding_agency_name,\n 'funding_office_code': contract.funding_office_id,\n 'funding_office_name': contract.funding_office_name,\n 'awardee_or_recipient_uniqu': contract.duns,\n 'awardee_or_recipient_legal': contract.company_name,\n 'dba_name': contract.dba_name,\n 'ultimate_parent_unique_ide': contract.parent_duns,\n 'ultimate_parent_legal_enti': contract.parent_company_name,\n 'legal_entity_country_code': contract.company_address_country,\n 'legal_entity_country_name': dom_country.country_name,\n 'legal_entity_address_line1': contract.company_address_street,\n 'legal_entity_city_name': contract.company_address_city,\n 'legal_entity_state_code': contract.company_address_state,\n 'legal_entity_state_name': contract.company_address_state_name,\n 'legal_entity_zip': contract.company_address_zip,\n 'legal_entity_congressional': contract.company_address_district,\n 'legal_entity_foreign_posta': None,\n 'business_types': contract.bus_types,\n 'place_of_perform_city_name': contract.principle_place_city,\n 'place_of_perform_state_code': contract.principle_place_state,\n 'place_of_perform_state_name': contract.principle_place_state_name,\n 'place_of_performance_zip': contract.principle_place_zip,\n 'place_of_perform_congressio': contract.principle_place_district,\n 'place_of_perform_country_co': contract.principle_place_country,\n 'place_of_perform_country_na': int_country.country_name,\n 'award_description': d1.award_description,\n 'naics': contract.naics,\n 'naics_description': d1.naics_description,\n 'cfda_numbers': None,\n 'cfda_titles': None,\n\n 'subaward_type': 'sub-contract',\n 'subaward_report_year': contract.report_period_year,\n 'subaward_report_month': contract.report_period_mon,\n 'subaward_number': sub_contract.subcontract_num,\n 'subaward_amount': sub_contract.subcontract_amount,\n 'sub_action_date': str(sub_contract.subcontract_date),\n 'sub_awardee_or_recipient_uniqu': sub_contract.duns,\n 'sub_awardee_or_recipient_legal': sub_contract.company_name,\n 'sub_dba_name': sub_contract.dba_name,\n 'sub_ultimate_parent_unique_ide': sub_contract.parent_duns,\n 'sub_ultimate_parent_legal_enti': sub_contract.parent_company_name,\n 'sub_legal_entity_country_code': sub_contract.company_address_country,\n 'sub_legal_entity_country_name': int_country.country_name,\n 'sub_legal_entity_address_line1': sub_contract.company_address_street,\n 'sub_legal_entity_city_name': sub_contract.company_address_city,\n 'sub_legal_entity_state_code': sub_contract.company_address_state,\n 'sub_legal_entity_state_name': sub_contract.company_address_state_name,\n 'sub_legal_entity_zip': None,\n 'sub_legal_entity_congressional': sub_contract.company_address_district,\n 'sub_legal_entity_foreign_posta': sub_contract.company_address_zip,\n 'sub_business_types': sub_contract.bus_types,\n 'sub_place_of_perform_city_name': sub_contract.principle_place_city,\n 'sub_place_of_perform_state_code': sub_contract.principle_place_state,\n 'sub_place_of_perform_state_name': sub_contract.principle_place_state_name,\n 'sub_place_of_performance_zip': sub_contract.principle_place_zip,\n 'sub_place_of_perform_congressio': sub_contract.principle_place_district,\n 'sub_place_of_perform_country_co': sub_contract.principle_place_country,\n 'sub_place_of_perform_country_na': dom_country.country_name,\n 'subaward_description': sub_contract.overall_description,\n 'sub_high_comp_officer1_full_na': sub_contract.top_paid_fullname_1,\n 'sub_high_comp_officer1_amount': sub_contract.top_paid_amount_1,\n 'sub_high_comp_officer2_full_na': sub_contract.top_paid_fullname_2,\n 'sub_high_comp_officer2_amount': sub_contract.top_paid_amount_2,\n 'sub_high_comp_officer3_full_na': sub_contract.top_paid_fullname_3,\n 'sub_high_comp_officer3_amount': sub_contract.top_paid_amount_3,\n 'sub_high_comp_officer4_full_na': sub_contract.top_paid_fullname_4,\n 'sub_high_comp_officer4_amount': sub_contract.top_paid_amount_4,\n 'sub_high_comp_officer5_full_na': sub_contract.top_paid_fullname_5,\n 'sub_high_comp_officer5_amount': sub_contract.top_paid_amount_5,\n\n 'prime_id': contract.id,\n 'internal_id': contract.internal_id,\n 'date_submitted': contract.date_submitted.strftime('%Y-%m-%d %H:%M:%S.%f'),\n 'report_type': contract.report_type,\n 'transaction_type': contract.transaction_type,\n 'program_title': contract.program_title,\n 'contract_agency_code': contract.contract_agency_code,\n 'contract_idv_agency_code': contract.contract_idv_agency_code,\n 'grant_funding_agency_id': None,\n 'grant_funding_agency_name': None,\n 'federal_agency_name': None,\n 'treasury_symbol': contract.treasury_symbol,\n 'dunsplus4': None,\n 'recovery_model_q1': str(contract.recovery_model_q1).lower(),\n 'recovery_model_q2': str(contract.recovery_model_q2).lower(),\n 'compensation_q1': None,\n 'compensation_q2': None,\n 'high_comp_officer1_full_na': contract.top_paid_fullname_1,\n 'high_comp_officer1_amount': contract.top_paid_amount_1,\n 'high_comp_officer2_full_na': contract.top_paid_fullname_2,\n 'high_comp_officer2_amount': contract.top_paid_amount_2,\n 'high_comp_officer3_full_na': contract.top_paid_fullname_3,\n 'high_comp_officer3_amount': contract.top_paid_amount_3,\n 'high_comp_officer4_full_na': contract.top_paid_fullname_4,\n 'high_comp_officer4_amount': contract.top_paid_amount_4,\n 'high_comp_officer5_full_na': contract.top_paid_fullname_5,\n 'high_comp_officer5_amount': contract.top_paid_amount_5,\n 'sub_id': sub_contract.id,\n 'sub_parent_id': sub_contract.parent_id,\n 'sub_federal_agency_id': None,\n 'sub_federal_agency_name': None,\n 'sub_funding_agency_id': sub_contract.funding_agency_id,\n 'sub_funding_agency_name': sub_contract.funding_agency_name,\n 'sub_funding_office_id': sub_contract.funding_office_id,\n 'sub_funding_office_name': sub_contract.funding_office_name,\n 'sub_naics': sub_contract.naics,\n 'sub_cfda_numbers': None,\n 'sub_dunsplus4': None,\n 'sub_recovery_subcontract_amt': sub_contract.recovery_subcontract_amt,\n 'sub_recovery_model_q1': str(sub_contract.recovery_model_q1).lower(),\n 'sub_recovery_model_q2': str(sub_contract.recovery_model_q2).lower(),\n 'sub_compensation_q1': None,\n 'sub_compensation_q2': None,\n }\n if debug and not (attr.items() <= sub.__dict__.items()):\n print(sorted(attr.items()))\n print(sorted(sub.__dict__.items()))\n return attr.items() <= sub.__dict__.items()" ]
[ "0.6025328", "0.57968277", "0.57027745", "0.56979775", "0.5373415", "0.532791", "0.5307729", "0.52962", "0.52779275", "0.5272271", "0.51982015", "0.5154221", "0.514683", "0.5144544", "0.51169497", "0.5078921", "0.5072906", "0.5058429", "0.5058264", "0.505702", "0.5056039", "0.5030337", "0.50278306", "0.5023779", "0.49947494", "0.4987356", "0.49872717", "0.4980991", "0.49746358", "0.49706483", "0.49679944", "0.49606493", "0.49581385", "0.49415997", "0.4923652", "0.4913109", "0.49058825", "0.49054247", "0.48980242", "0.489429", "0.4891458", "0.4887395", "0.48702392", "0.48666814", "0.48568878", "0.4853028", "0.48429355", "0.48425838", "0.4834391", "0.48296332", "0.48269042", "0.48232007", "0.48230913", "0.48192304", "0.48184165", "0.4809105", "0.48087198", "0.4806947", "0.48055285", "0.48050115", "0.48042655", "0.48040408", "0.48026523", "0.4800339", "0.478819", "0.47644988", "0.4763969", "0.47580224", "0.4754825", "0.47527167", "0.4744529", "0.47442254", "0.47426373", "0.4741268", "0.4741268", "0.47412142", "0.47280934", "0.47279066", "0.4723681", "0.4720122", "0.47169644", "0.47160298", "0.4705535", "0.46991327", "0.46973288", "0.46968907", "0.4693064", "0.4692073", "0.46894473", "0.46870515", "0.4685188", "0.46845555", "0.46815073", "0.4679479", "0.46755657", "0.46744674", "0.4674457", "0.46738517", "0.46729007", "0.4671863" ]
0.7237854
0
Create a file for the filtered response and for the filtered reference
def ref_resp2files(output_file, output_json): with open(output_file, "w") as reference_text: reference_text.write(output_json)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def export_file(self):\n if self.args.keyfilter:\n self.filter_keys()\n if self.args.datafilter:\n self.filter_values()\n json.dump(self.outputdata, self.outfile, indent=self.args.indent)\n self.outfile.write('\\n')", "def create_reference(\n self, response_checker=default_checker.default_journey_checker\n ):\n # Check that the file doesn't already exist\n filename = self.get_file_name()\n filepath = os.path.join(config[\"REFERENCE_FILE_PATH\"], filename)\n\n if os.path.isfile(filepath):\n logger.warning(\n \"NO REF FILE CREATED - {} is already present\".format(filepath)\n )\n else:\n # Concatenate reference file info\n reference_text = OrderedDict()\n reference_text[\"query\"] = self.query.replace(\n config[\"URL_JORMUN\"][7:], \"localhost\"\n )\n logger.warning(\"Query: {}\".format(self.query))\n reference_text[\"response\"] = response_checker.filter(\n json.loads(self.full_resp)\n )\n reference_text[\"full_response\"] = json.loads(\n self.full_resp.replace(config[\"URL_JORMUN\"][7:], \"localhost\")\n )\n\n # Write reference file directly in the references folder\n with open(filepath, \"w\") as ref:\n ref.write(json.dumps(reference_text, indent=4))\n logger.info(\"Created reference file : {}\".format(filepath))", "def create_response_info(self, response):\n output_path = os.path.join(self.output_folder, self.file_name)\n output_path += \".response.txt\"\n with open(output_path, 'w') as file:\n file.write(json.dumps(response))", "def create_file_output(self, results):\n for key, value in results.table_output.items():\n name_timestamp = key.split('&')\n _name = name_timestamp[0]\n timestamp = name_timestamp[1]\n file_name = output_file_prefix + \"-\" + _name + \".csv\"\n if file_name not in self.file_creation_set:\n self._header_written = False\n self.file_creation_set.update([file_name])\n for row in value:\n with open(file_name, 'a+') as file_to_write:\n row.update({'Timestamp': timestamp})\n _keys = row.keys()\n file_output = csv.DictWriter(file_to_write, _keys)\n if not self._header_written:\n file_output.writeheader()\n self._header_written = True\n file_output.writerow(row)\n file_to_write.close()\n return results", "def create_filtered_network_file(network_file_prefix, filtered_network_file, ueids):\n network_file_method_attribute = network_file_prefix + \"_method_id.eda\"\n network_file_source_attribute = network_file_prefix + \"_source.eda\"\n #biana_output_converter.filter_network_by_interaction_type(network_attribute_file_name = network_file_method_attribute, network_out_file_name = network_file_prefix + \"_y2h.sif\", interaction_type=\"y2h\")\n #biana_output_converter.filter_network_by_interaction_type(network_attribute_file_name = network_file_method_attribute, network_out_file_name = network_file_prefix + \"_tap.sif\", interaction_type=\"tap\")\n #biana_output_converter.filter_network_by_interaction_type(network_attribute_file_name = network_file_method_attribute, network_out_file_name = network_file_prefix + \"_no_tap.sif\", interaction_type=\"tap\", reverse_selection=True)\n #biana_output_converter.filter_network_by_interaction_type(network_attribute_file_name = network_file_method_attribute, network_out_file_name = filtered_network_file + \".no_tap\", interaction_type=\"tap\", reverse_selection=True)\n valid_ids = set([0,4,96,676,729,19,6,7,858,59,109]) # TAP\n biana_output_converter.filter_network_by_interaction_attribute_value(network_attribute_file_name = network_file_method_attribute, network_out_file_name = filtered_network_file + \".no_tap\", accept_attribute_value = lambda x: int(x) not in valid_ids)\n\n #interaction_to_sources = get_interaction_sources(network_file_source_attribute)\n with open(filtered_network_file, 'w') as f:\n for line in open(filtered_network_file + \".no_tap\"):\n id1, dummy, id2 = line.split()\n # Filter self interactions\n if id1 == id2:\n continue\n # Remove singleton interacions (that has evidence only from one database)\n #id_pair = sorted([id1, id2])\n #if is_singleton(interaction_to_sources[(id_pair[0], id_pair[1])]):\n # continue\n # Do not include ambigous user entities\n if id1 in ueids and id2 in ueids:\n f.write(line)\n return", "def save_response(response, file_name, path='~/tmp/fcb-analyzer'):\n \n path = ensure_path(path)\n f = open(path + '/' + file_name, 'w')\n f.write(response.text)", "def create_file(self):\n for data_element in self.data:\n title = data_element['title']\n anchor = data_element['href']\n example = data_element['example']\n content = data_element['content']\n if example:\n abstract = '<section class=\"prog__container\">{}<br>{}</section>'.format(content, example)\n\n list_of_data = [\n title, # api title\n 'A', # type is article\n '', # no redirect data\n '', # ignore\n '', # no categories\n '', # ignore\n '', # no related topics\n '', # ignore\n '', # no external link\n '', # no disambiguation\n '', # images\n abstract, # abstract\n anchor # url to doc\n ]\n self.output_file.write('{}\\n'.format('\\t'.join(list_of_data)))", "def create_exclusions_file(output_file: str, verbosity: int) -> None:\n set_log_level(verbosity)\n\n with open(output_file, \"a\") as file_obj:\n for line in EXCLUSIONS_TEMPLATE:\n file_obj.write(line)\n utils.print_green(f\"Success! Exclusions template file written to: {output_file}\")\n print(\n \"Make sure you download your account authorization details before running the scan.\"\n \"Set your AWS access keys as environment variables then run: \"\n )\n print(\"\\tcloudsplaining download\")\n print(\"You can use this with the scan command as shown below: \")\n print(\n \"\\tcloudsplaining scan --exclusions-file exclusions.yml --input-file default.json\"\n )", "def write_filter_spec(filters, filename):\n data = export_filters(filters)\n with open(filename, 'w') as fp:\n json.dump(data, fp, indent = 4)", "def process_output_file_write(output_file, response):\n\n with open(output_file, \"w\") as output_file:\n output_file.write(response)", "def _toFile(self):\n pass", "def _get_file_objects(self, build_results=True):\n file_obj = None\n writer = None\n if self.filename is not None:\n file_obj = open(self.filename, \"w\")\n writer = csv.writer(file_obj, lineterminator=\"\\n\")\n\n header = [\n \"Interaction index\",\n \"Player index\",\n \"Opponent index\",\n \"Repetition\",\n \"Player name\",\n \"Opponent name\",\n \"Actions\",\n ]\n if build_results:\n header.extend(\n [\n \"Score\",\n \"Score difference\",\n \"Turns\",\n \"Score per turn\",\n \"Score difference per turn\",\n \"Win\",\n \"Initial cooperation\",\n \"Cooperation count\",\n \"CC count\",\n \"CD count\",\n \"DC count\",\n \"DD count\",\n \"CC to C count\",\n \"CC to D count\",\n \"CD to C count\",\n \"CD to D count\",\n \"DC to C count\",\n \"DC to D count\",\n \"DD to C count\",\n \"DD to D count\",\n \"Good partner\",\n ]\n )\n\n writer.writerow(header)\n return file_obj, writer", "def autoSaveFilter(filename):", "def save_file(E_Filtered, output_path):\n os.makedirs(os.path.dirname(output_path), exist_ok=True)\n with open(output_path, 'w+') as f:\n for k, v in E_Filtered.items():\n f.write(\"%s\\t%s\\n\" % (list(k), v))", "def CreateOutFile(pdb_file): \n if(pdb_file[-4:] == '.pdb'):\n OutFile = open(pdb_file[:-4].replace('../','')+'_PredictedSites.xyz', 'w') #overwrite file if already present\n elif(pdb_file[-3:] == '.gz'):\n OutFile = open(pdb_file[:-7].replace('../','')+'_PredictedSites.xyz', 'w') #overwrite file if already present\n else:\n OutFile = open(pdb_file.replace('../','')+'_PredictedSites.xyz', 'w') #overwrite file if already present", "def render_to_file(properties,file):\n properties['tempfile']=None\n properties['remove_temp']=True\n properties['outfile']=file", "def write_data_files(self):\n # build our strings\n header_string = \"\"\n data_string = \"\"\n for value in self.data.values():\n header_string += value[2] + \",\"\n if value[0] != None:\n data_string += value[1].format(value[0])\n else:\n data_string += \",\"\n # remove the extra comma and replace with a newline\n header_string = header_string[:-1]\n header_string += \"\\n\"\n data_string = data_string[:-1]\n data_string += \"\\n\"\n \n # show what we built\n #print(header_string)\n #print(data_string)\n \n # open a temp file\n with open(\"{:s}\\\\VWSInput\\\\temp_data.csv\".format(self.path), \"w\") as temp_file:\n #temp_file.write(header_string)\n temp_file.write(data_string)\n \n # move to the input file\n filetools.mv(\"{:s}\\\\VWSInput\\\\temp_data.csv\".format(self.path), \"{:s}\\\\VWSInput\\\\data.csv\".format(self.path))\n \n return", "def _create_file(content=''):\r\n sjson_file = tempfile.NamedTemporaryFile(prefix=\"subs_\", suffix=\".srt.sjson\")\r\n sjson_file.content_type = 'application/json'\r\n sjson_file.write(textwrap.dedent(content))\r\n sjson_file.seek(0)\r\n return sjson_file", "def tofile(self, f):\n raise NotImplementedError(\"ScalableRedisLocalBloomFilter not support tofile\")", "def test_output_source_file(self):\n response = self.client.open(\n '/v1/control/file/{id}'.format(id='id_example'),\n method='GET',\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def writeResponse(response):", "def createMetadata(request, datafile):\n samples = []\n datafile = datafile.split(',')\n for f in datafile:\n filename = f.replace('[', '').replace(']', '').replace('\"', '').replace(' ', '')\n cont = subprocess.Popen(\n [\"curl -u \" + request.session.get('username') + \":\" + request.session.get('password') + \" -k -s \" + filename[1:]],\n stdout=subprocess.PIPE, shell=True).communicate()[0]\n with open(request.session.get('username') + \"/data.txt\", \"w\") as datafile:\n datafile.write(cont)\n with open(datafile.name, \"r\") as tfile:\n for line in tfile:\n if \"!Sample_geo_accession\" in line:\n line = line.split('\\t')\n for x in range(0, len(line)):\n samples.append(line[x].replace('\\n', ''))\n samples = filter(None, samples)\n tfile.seek(0)\n with open(request.session.get('username') + \"/meta.txt\", \"w\") as meta:\n for i in range(0, len(samples)):\n for line in tfile:\n if \"!Sample\" in line:\n line = line.split('\\t')\n line[i] = line[i].replace(\"!Sample_\", \"\").replace(\"\\n\", \"\").replace(\"'\", \"\").replace(\",\", \"\").replace(\"\\\"\", \"\")\n if line[i] == \"geo_accession\":\n line[i] = \"sample_id\"\n elif line[1] == \"\\\"female\\\"\" or line[1] == \"\\\"male\\\"\":\n line[0] = \"sex\"\n if \"title\" not in line[0]:\n meta.write(re.sub(r'[^\\x00-\\x7F]+', ' ', line[i]) + '\\t')\n meta.write('\\n')\n tfile.seek(0)\n meta.close()\n datafile.close()\n call([\"rm\", request.session.get('username') + \"/data.txt\"])\n return meta", "def download_pickle(self, filename, context=None, filter=[]):\n request = context.REQUEST\n RESPONSE = request.RESPONSE\n RESPONSE.setHeader('Content-Type', 'text/plain; charset=utf-8')\n RESPONSE.setHeader('Content-Disposition', 'attachment; filename=%s' % filename)\n return self.to_pickle(filter=filter)", "def __save_response(self, method, extras, data):\n\n import os, re\n to = \"/tmp/lex/\"\n if not os.path.exists(to):\n os.mkdir(to)\n\n removeables = re.compile('[/&?:]')\n filename = method + '-' + '_'.join(\"%s=%s\" % kv for kv in extras.iteritems())\n filename = os.path.join(to, removeables.sub('_', filename))\n with open(filename, 'w') as f:\n f.write(data)", "def pipe_to_file(response, path):\n # TODO: Indicate progress.\n with open(path, 'wb') as file:\n while True:\n chunk = response.read(4096)\n if not chunk:\n break\n file.write(chunk)", "def write_to_vcf(self):\n\n # 1. Generate header info\n date_for_vcf = datetime.now().strftime('%Y%m%d')\n header_info = [\n '##fileformat=VCFv4.2',\n '##fileDate=%s' % date_for_vcf,\n '##source=%s' % self.get_analyser_name(),\n '##reference=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/bigZips/hg38.fa.gz',\n '##contig=<ID=chr1,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr1.fa.gz>',\n '##contig=<ID=chr2,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr2.fa.gz>',\n '##contig=<ID=chr3,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr3.fa.gz>',\n '##contig=<ID=chr4,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr4.fa.gz>',\n '##contig=<ID=chr5,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr5.fa.gz>',\n '##contig=<ID=chr6,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr6.fa.gz>',\n '##contig=<ID=chr7,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr7.fa.gz>',\n '##contig=<ID=chr8,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr8.fa.gz>',\n '##contig=<ID=chr9,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr9.fa.gz>',\n '##contig=<ID=chr10,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr10.fa.gz>',\n '##contig=<ID=chr11,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr11.fa.gz>',\n '##contig=<ID=chr12,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr12.fa.gz>',\n '##contig=<ID=chr13,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr13.fa.gz>',\n '##contig=<ID=chr14,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr14.fa.gz>',\n '##contig=<ID=chr15,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr15.fa.gz>',\n '##contig=<ID=chr16,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr16.fa.gz>',\n '##contig=<ID=chr17,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr17.fa.gz>',\n '##contig=<ID=chr18,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr18.fa.gz>',\n '##contig=<ID=chr19,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr19.fa.gz>',\n '##contig=<ID=chr20,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr20.fa.gz>',\n '##contig=<ID=chr21,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr21.fa.gz>',\n '##contig=<ID=chr22,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chr22.fa.gz>',\n '##contig=<ID=chrM,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chrM.fa.gz>',\n '##contig=<ID=chrX,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chrX.fa.gz>',\n '##contig=<ID=chrY,URL=https://hgdownload.soe.ucsc.edu/goldenPath/hg38/chromosomes/chrY.fa.gz>',\n ]\n header_parameters = [\n '##FORMAT=<ID=GT,Number=1,Type=String,Description=\"Genotype\">',\n '##FORMAT=<ID=MTQ,Number=1,Type=String,Description=\"MassArray Typer quality value for SNP call. '\n 'A=Conservative, B=Moderate, C=Aggressive, D=Low Probability, E=User Call, i=Low Intensity. A and B are considered high '\n 'quality scores.\">',\n '##INFO=<ID=PCR,Number=2,Type=String,Description=\"PCR sequences used in assay.\">',\n '##INFO=<ID=AF,Number=A,Type=Float,Description=\"Minor allele frequency from population data.\">',\n '##INFO=<ID=Gene,Number=A,Type=String,Description=\"HGNC Gene Name for gene containing SNP.\">',\n '##INFO=<ID=Build,Number=A,Type=String,Description=\"Genome build used to determine SNP position for assay.\">',\n '##FILTER=<ID=LowCallRate,Description=\"SNP not called in at least 30% of samples in assay.\">',\n ]\n\n # 2. Extract info from XML file\n results = self.get_results()\n snps = self.get_snps()\n pcr_sequences = self.get_pcr_sequences()\n call_rates = self.get_snp_call_rate()\n\n # 3. For each sample, create VCF, add headers, determine genotype of each SNP and write to file.\n for sample, variants in results.items():\n\n with open(os.path.join(self.output, '%s.vcf' % sample), 'w+') as outfile:\n\n header_fields = ['CHROM', 'POS', 'ID', 'REF', 'ALT', 'QUAL', 'FILTER', 'INFO', 'FORMAT', str(sample)]\n\n outfile.write('%s\\n' % '\\n'.join(header_info))\n outfile.write('%s\\n' % '\\n'.join(header_parameters))\n outfile.write('#%s\\n' % '\\t'.join(header_fields))\n\n # for each variant, make a line to add to the file which will\n # then be sorted\n lines_to_write = []\n for snp, info in variants.items():\n\n ref_allele = snps[snp]['ref']\n alt_alleles = snps[snp]['alt']\n alt_list = alt_alleles.split(',')\n\n # Genotype formatting matches VCF v4.0 spec where ./. is no call.\n gt_list = []\n called_genotype = info['genotype']\n if not called_genotype:\n gt_list = ['.', '.']\n elif len(called_genotype) == 1:\n called_genotype += called_genotype\n for allele in list(called_genotype):\n if allele == ref_allele:\n gt_list.append(0)\n else:\n if allele in alt_list:\n idx = alt_list.index(allele)\n gt_list.append(idx + 1)\n else:\n raise ValueError(\n 'Called genotype %s not provided as possible alt in bed file. Sample %s and SNP '\n '%s %s.' % (called_genotype, sample, snp, alt_alleles)\n )\n gt = '/'.join([str(x) for x in gt_list])\n\n # Threshold currently set to 0.3 (70% results have a call).\n snp_call_rate = call_rates[snp]\n if snp_call_rate >= 0.3:\n vcf_filter = 'LowCallRate'\n else:\n vcf_filter = 'PASS'\n\n snp_pcr_seqs = pcr_sequences[snp]\n\n lines_to_write.append(\n '{chr}\\t{pos}\\t{id}\\t{ref}\\t{alt}\\t.\\t{filter}\\tAF={af};PCR={pcr};Gene={gene};Build={build}\\t'\n 'GT:MTQ\\t{gt}:{qual}\\n'.format(\n chr=snps[snp]['chrom'],\n pos=snps[snp]['pos'],\n id=snp,\n ref=ref_allele,\n alt=alt_alleles,\n filter=vcf_filter,\n af=snps[snp]['maf'],\n pcr=','.join(snp_pcr_seqs),\n gene=snps[snp]['gene'],\n build=snps[snp]['genome_build'],\n gt=gt,\n qual=','.join(info['quality'])\n )\n )\n\n sorted_lines_to_write = sorted(\n lines_to_write,\n key=lambda x: (\n # first key for sorting is the int value of chr\n int(x.split('\\t')[0][3:]),\n # second key for sorting is the position of the variant\n int(x.split('\\t')[1])\n )\n )\n\n for line in sorted_lines_to_write:\n outfile.write(line)", "def save_response_to_file(self, response, format=None, annotation=''):\n \n if format is None:\n logging.error(\"Specify a format\")\n return None\n\n # Build filename, choosing extension carefully\n url = response.url\n _name, _ext = os.path.splitext(url.split('/')[-1])\n name = remove_reserved_chars(_name)\n if format in ['html', 'pdf']:\n # HTML files might originally have no extension;\n # PDF files may have a non-PDF extension but PDFMiner requires them to have a .pdf extension\n ext = f'.{format}'\n if _ext != '':\n logging.warning(f\"Overwriting file extension from url ({_ext}) with expected extension ({ext}) for {url}\")\n else:\n if _ext == '':\n # Look up extension from dictionary. Note that Google Sheets are assumed to be exported as CSV files.\n ext = todf.get_ext(format)\n logging.warning(\"No extension in original url for {format} data: using expected extension {ext}\")\n else:\n ext = _ext.split('?')[0] # Remove query portion of URL, if any \n file_name = f\"{self.state_abbrev}{annotation}{name}{ext}\"\n\n # Save HTML and CSV as text, other formats as binary\n file_path = os.path.join(TMPDIR, file_name)\n if ext == '.html' or ext == '.csv':\n try:\n with open(file_path, 'w') as f:\n f.write(response.text)\n except UnicodeEncodeError:\n with open(file_path, \"w\", encoding=\"utf-8\") as f:\n f.write(response.text)\n except AttributeError as e:\n logging.error(f\"{e}. Check if the format of the content at this URL is html as expected; if not, update the code to specify the correct format (e.g., pdf).\")\n else:\n with open(file_path, 'wb') as f:\n f.write(response.body) \n\n return file_path", "def write_response_to_lib_folder(self, label: Optional[str], response: Response) -> None:\n cleaned_label = label.replace(\"/\", \"|\") if label else \"response\"\n file_name = cleaned_label + \" \" + str(datetime.now())\n file_ending = \".json\"\n if not os.path.exists(RECORD_PATH):\n os.mkdir(RECORD_PATH)\n proposed_file_name = os.path.join(RECORD_PATH, file_name + file_ending)\n # Cover files with the same name case\n while os.path.exists(proposed_file_name):\n length_of_file_type = len(file_ending)\n proposed_file_name = proposed_file_name[:-length_of_file_type] + \" (1)\" + file_ending\n with open(proposed_file_name, 'w') as f:\n f.write(response.text)\n if 'X-Trace-Id' in response.headers:\n log.info(cleaned_label + ' | X-Trace-Id: ' + response.headers['X-Trace-Id'])", "def write_data(tech_id, tech_name, sentence, source, date_crawled):\n with open('PDF_data.txt', 'a') as f:\n # text = match[\"tid\"] + '\\n' + match[\"name\"] + '\\n' + sent + '\\n' + source + '\\n' + date_crawled + '\\n\\n'\n text = tech_id + '\\n' + tech_name + '\\n' + sentence + '\\n' + source + '\\n' + date_crawled + '\\n\\n'\n f.write(text)", "def dwn_saved_result_csv(request):\n source_id = request.GET.get('source_id')\n data = []\n objs = ExtractedRelation.objects.filter(source=source_id)\n s = Source.objects.filter(source_id=source_id)[0]\n for i in objs:\n data.append((i.sentence, i.head, i.tail, i.pred_relation, i.sentiment, i.conf, s.source, i.rel_id, os.path.basename(i.ckpt)))\n \n df = pd.DataFrame(data, columns=['Sentence', 'Head', 'Tail', 'Predicted Relation', 'Predicted Sentiment', 'Confidence', 'Source', 'rel_id', 'Checkpoint'])\n df.to_csv(\"temp/analysis_results.csv\", index=False)\n \n return FileResponse(open('temp/analysis_results.csv','rb'))", "def render_file(self, context, result):\n\t\tif __debug__:\n\t\t\tlog.debug(\"Processing file-like object.\", extra=dict(request=id(context), result=repr(result)))\n\t\t\n\t\tresponse = context.response\n\t\tresponse.conditional_response = True\n\t\t\n\t\tmodified = mktime(gmtime(getmtime(result.name)))\n\t\t\n\t\tresponse.last_modified = datetime.fromtimestamp(modified)\n\t\tct, ce = guess_type(result.name)\n\t\tif not ct: ct = 'application/octet-stream'\n\t\tresponse.content_type, response.content_encoding = ct, ce\n\t\tresponse.etag = unicode(modified)\n\t\t\n\t\tresult.seek(0, 2) # Seek to the end of the file.\n\t\tresponse.content_length = result.tell()\n\t\t\n\t\tresult.seek(0) # Seek back to the start of the file.\n\t\tresponse.body_file = result\n\t\t\n\t\treturn True", "def write_file(self):\n rl_df, lift_df = self.create_df()\n\n number = re.findall('\\d+', self.url)[0]\n\n if self.write is True:\n with open('house_{}.csv'.format(number), 'w',\n encoding='utf-8-sig') as file:\n rl_df.to_csv(file, sep=';')\n with open('house_lifts_{}.csv'.format(number), 'w',\n encoding='utf-8-sig') as file2:\n lift_df.to_csv(file2, sep=';')", "def initialize_response(self, filename):\n key = 'Content-Disposition'\n self.response = HttpResponse(content_type='text/csv')\n self.response[key] = f'attachment; filename=\"{filename}\"'\n self.writer = UnicodeCsvWriter(self.response)", "def filter_by_IDs(\n self, record_ids: list, output_file: Path = None, point_to_new_file: bool = True\n ) -> None:\n if output_file is None:\n output_file = (\n Path(self._input_file.parent)\n / f\"{self._input_file.stem}_filtered{self._input_file.suffix}\"\n )\n else:\n output_file = Path(output_file)\n with tempfile.NamedTemporaryFile(mode=\"w+t\") as tmp_ids:\n tmp_ids.writelines(\"\\n\".join(record_ids))\n tmp_ids.flush()\n tmp_ids_path = tmp_ids.name\n cmd_str = (\n f\"seqkit grep -i -f {tmp_ids_path} {self._input_file} -o {output_file}\"\n )\n utils.terminal_execute(cmd_str, suppress_shell_output=True)\n if point_to_new_file:\n self.file_path = output_file", "def on_get(self, req, resp):\n resp.set_header('Content-Type', 'text/json')\n diaries_paths = encode.get_files_in_directory(DIARIES_TO_CREATE_DIR, \".pdf\")\n\n def extract_file_name(path): return os.path.basename(path)\n resp.body = json.dumps({\"templates_file_names\": list(map(extract_file_name, diaries_paths)),\n \"templates_paths\": diaries_paths})", "def initialize_response(self, filename):\n self.writer = UnicodeCsvWriter(self.csv_buffer)\n self.filename = filename\n self.archive = ZipFile(self.zip_buffer, 'w', compression=ZIP_DEFLATED)", "def to_output_file(self, content):\n self.__log(f'Starting to write response content to output file.')\n if self.output_file_exists() and not self.config['FORCE_OVERWRITE']:\n self.__log(f'Cannot write to file. Selected output file exists and FORCE_OVERWRITE is disabled.', 'error')\n raise FileExistsError\n file = self.config['OUT_FOLDER'] + '/' + self.config['OUTPUT_FOLDER'] + '/' + self.output_filename + '.' \\\n + self.options['image_format'].lower()\n with open(file, 'w') as f:\n f.writelines(content)\n self.__log(f'Successfully wrote response content to \"{file}\".', 'success')", "def result_file(accession_list):\n with open(\"../accessions_list.txt\", 'w') as file:\n file.write(accession_list)", "def report_result(result, filter_name, outf):\n s = (f\"{result['lambda_c']:.5e} {result['lambda_mean']:.5e} \"\n f\"{result['lambda_1']:.5e} \"\n f\"{result['lambda_pivot']:.5e} \"\n f\"{result['lambda_eff']:.5e} \"\n f\"{result['lambda_eff_jv']:.5e} {result['isophotal_wt']:.5e} \"\n f\"{result['width']:.5e} {result['response']:.5e} \"\n f\"{result['flux_mean']:.5e} {result['flux_nu_mean']:.5e} \"\n f\"{result['color_term_k0']:.5e} \"\n f\"{result['color_term_k1']:.5e} {result['source_rate']:.5e} \"\n f\"{result['source_size']:.5e} \"\n f\"{result['source_fwhm']:.5e} {result['background_power']:.5e} \"\n f\"{result['nep']:.5e} \"\n f\"{result['nefd']:.5e} {result['mdcf']:.5e} \"\n f\"{result['npix_mean']:.5e} {result['lambda_prime']:.5e} \"\n f\"{result['lamcorr']:.5e} \"\n f\"{os.path.basename(filter_name)}\")\n print(s)\n outf.write(s + '\\n')", "def tofile(self, f):\n raise NotImplementedError(\"RedisLocalBloomFilter not support tofile\")", "def make_file(self):\n\n f = open(get_output_path(), \"w\")\n \n f.write(self.export())\n \n f.close()\n\n return self", "def compare_with_ref(\n self, response, response_checker=default_checker.default_journey_checker\n ):\n\n def ref_resp2files(output_file, output_json):\n \"\"\"\n Create a file for the filtered response and for the filtered reference\n \"\"\"\n with open(output_file, \"w\") as reference_text:\n reference_text.write(output_json)\n\n def print_diff(ref_file, resp_file):\n \"\"\"\n Print differences between reference and response in console\n \"\"\"\n # open reference\n with open(ref_file) as reference_text:\n reference = reference_text.readlines()\n # open response\n with open(resp_file) as response_text:\n response = response_text.readlines()\n\n # Print failed test name\n print_color(\"\\n\\n\" + str(file_name) + \" failed :\" + \"\\n\\n\", Colors.PINK)\n\n symbol2color = {\"+\": Colors.GREEN, \"-\": Colors.RED}\n for line in difflib.unified_diff(reference, response):\n print_color(line, symbol2color.get(line[0], Colors.DEFAULT))\n\n # Filtering the answer. (We compare to a reference also filtered with the same filter)\n filtered_response = response_checker.filter(response)\n\n # Get the reference\n\n # Create the file name\n filename = self.get_file_name()\n filepath = os.path.join(config[\"REFERENCE_FILE_PATH\"], filename)\n\n assert os.path.isfile(filepath), \"{} is not a file\".format(filepath)\n\n with open(filepath, \"r\") as f:\n raw_reference = f.read()\n\n # Transform the string into a dictionary\n dict_ref = json.loads(raw_reference)\n\n # Get only the full_response part from the ref\n ref_full_response = dict_ref[\"full_response\"]\n\n # Filtering the reference\n filtered_reference = response_checker.filter(ref_full_response)\n\n # Compare response and reference\n try:\n response_checker.compare(filtered_response, filtered_reference)\n except AssertionError as e:\n # print the assertion error message\n logging.error(\"Assertion Error: %s\" % str(e))\n # find name of test\n file_name = filename.split(\"/\")[-1]\n file_name = file_name[:-5]\n\n # create a folder\n dir_path = config[\"RESPONSE_FILE_PATH\"]\n if not os.path.exists(dir_path):\n os.makedirs(dir_path)\n\n # create path to ref and resp\n full_file_name_ref = dir_path + \"/reference_\" + file_name + \".txt\"\n full_file_name_resp = dir_path + \"/response_\" + file_name + \".txt\"\n\n json_filtered_reference = json.dumps(filtered_reference, indent=4)\n json_filtered_response = json.dumps(filtered_response, indent=4)\n\n # Save resp and ref as txt files in folder named outputs\n ref_resp2files(full_file_name_ref, json_filtered_reference)\n ref_resp2files(full_file_name_resp, json_filtered_response)\n\n # Print difference in console\n print_diff(full_file_name_ref, full_file_name_resp)\n\n raise", "def write_result(file_name, name, entries, extra_includes, src_file_names):\r\n\r\n with open(file_name, 'w', newline='\\n') as f:\r\n f.write('// Generated by %s\\n' % os.path.basename(__file__))\r\n f.write('// Based on %s: %s\\n' %\r\n ((\"this file\" if len(src_file_names) < 2 else\r\n \"these files\"), \", \".join(src_file_names)))\r\n methods = entries[0]\r\n if len(methods) != 0:\r\n f.write(to_PyMethodDef(name, methods, extra_includes))\r\n f.write('\\n')\r\n\r\n properties = entries[1]\r\n if len(properties) != 0:\r\n f.write('\\n')\r\n f.write(to_PyGetSetDef(name, properties))", "def create_submission_file(img_idx, img_pred, file_path, set_threshold=0.5, apply_crf=False):\n # Inference, and collect to dictionary\n n_samples = len(img_idx)\n report_every = int(float(n_samples) / 100)\n print(\"Found %d samples\" % n_samples)\n rle = FastRle(4)\n for i, (idx, pred) in enumerate(zip(img_idx, img_pred)):\n rle.add(idx, resize_down(pred) > set_threshold, None, apply_crf)\n if i % report_every == 0:\n print(\"\\t -> [%d/%d]\" % (i, n_samples))\n\n result_dict = rle.get_results()\n n_results = len(result_dict)\n if n_results != n_samples:\n raise Exception('Number of results in result_dict [{}] differs from actual samples [{}]'.format(n_results, n_samples))\n sub = pd.DataFrame.from_dict(result_dict, orient='index')\n sub.index.names = ['id']\n sub.columns = ['rle_mask']\n sub.to_csv(file_path)", "def generate_expected_file(self, expected_file, xml_name):\n\t\tlogging.info('Gerando arquivo de documentos esperados')\n\t\tcontent = self.read_xml(xml_name)\n\n\t\twith open(expected_file, 'w', newline='') as csvfile:\n\t\t\tfieldnames = ['QueryNumber', 'DocNumber', 'DocVotes']\n\t\t\twriter = csv.DictWriter(csvfile, fieldnames=fieldnames)\n\n\t\t\twriter.writeheader()\n\t\t\tfor index in range(0, len(content['QueryNumber'])):\n\t\t\t\tcount_results = 0\n\t\t\t\tlogging.info('Escrevendo documentos da consulta '+str(index+1)+'/'+str(len(content['QueryNumber'])))\n\t\t\t\tfor result in content['Records'][index]:\n\t\t\t\t\twriter.writerow({'QueryNumber': content['QueryNumber'][index], 'DocNumber': result[0], \n\t\t\t\t\t\t\t\t\t 'DocVotes': result[1]})\n\t\t\t\t\tcount_results += 1\n\t\t\t\t\tif count_results == int(content['Results'][index]): break", "def query_into_file(self, query, fname=\"\", fields=None, parameters=None):\n target_url = self.build_query(query, fields=fields, parameters=parameters)\n\n with urllib.request.urlopen(target_url) as url:\n content = url.read()\n\n with open(fname, 'wb') as ofs:\n ofs.write(content)", "def create_ref_file(self):\n id = self.task_record.create_published_output_name()\n ctx = self.block_store.make_local_output(id)\n self.open_ref_contexts[ctx.get_filename()] = ctx\n return ctx.get_filename()", "def create_submission_file(df):\n\n # Find file number for new file\n file_num = 0\n while path.isfile('submission-matched-{}.csv'.format(file_num)):\n file_num += 1\n\n # Write final submission\n df.to_csv('submission-matched-{}.csv'.format(file_num), index = False)", "def filter_stream(self, req, method, filename, stream, data):\n\n if filename == 'prefs_general.html' and req.authname != 'anonymous':\n stream |= Transformer('.//table').append(\n tag.tr(\n tag.th(\n tag.label('Ticket notifications opt-out:', **{'for': 'ticket-notification-optout'}),\n ),\n tag.td(\n tag.input(type=\"hidden\", name=\"ticket-notification-optout_cb\", value=\"\"),\n tag.input(type=\"checkbox\", id=\"ticket-notification-optout\", name=\"ticket-notification-optout\", checked=req.session.get('ticket-notification-optout') or None),\n ),\n **{'class': 'field'}\n ),\n )\n return stream", "def test_to_file(self):\n fd, fp = mkstemp()\n close(fd)\n st = SampleTemplate.create(self.metadata, self.new_study)\n st.to_file(fp)\n self._clean_up_files.append(fp)\n with open(fp, 'U') as f:\n obs = f.read()\n self.assertEqual(obs, EXP_SAMPLE_TEMPLATE)\n\n fd, fp = mkstemp()\n close(fd)\n st.to_file(fp, {'2.Sample1', '2.Sample3'})\n self._clean_up_files.append(fp)\n\n with open(fp, 'U') as f:\n obs = f.read()\n self.assertEqual(obs, EXP_SAMPLE_TEMPLATE_FEWER_SAMPLES)", "def file_output(matches: list, output_file_name: str = 'matches.txt'):\n with open(\"test/Matches/\" + output_file_name, 'w') as f:\n for match in matches:\n for event in match.events:\n f.write(\"%s\\n\" % event.payload)\n f.write(\"\\n\")", "def output_handler(response, context):\n print(\"Output handler\")\n \n if response.status_code != 200:\n _return_error(response.status_code, response.content.decode('utf-8'))\n response_content_type = context.accept_header\n content = response.content\n\n predictions = json.loads(content.decode('UTF-8'))\n predictions = np.array(predictions[\"predictions\"])\n res = []\n for pred in predictions:\n top3 = (-pred).argsort()[:3]\n res.append({'file_name': 'no-filename', 'path': 'no-path', 'cls': 'actual', 'prediction':top3[0], 'proba_1':pred[top3[0]], 'prediction2':top3[1], 'proba_2':pred[top3[1]], 'prediction3':top3[2], 'proba_3':pred[top3[2]]})\n\n image_index = pd.DataFrame(res)\n image_index['prediction'] = image_index.prediction.map(cls_map)\n image_index['prediction2'] = image_index.prediction2.map(cls_map)\n image_index['prediction3'] = image_index.prediction3.map(cls_map)\n return image_index.to_csv(index=False, header=False), response_content_type", "def __init__(self, output_file):\n self.file = open(output_file, \"w\")", "def main(output_filepath):\n logger = logging.getLogger(__name__)\n logger.info('making final data set from raw data')\n\n baseurl = 'http://codeandbeer.org/virtual/BigData/Labs/'\n files = ['Booking-20151012-1322.csv', 'Booking-20181025-1232.csv']\n for filename in files:\n r = requests.get(baseurl+filename, stream=True)\n if r.status == 200:\n with open(output_filepath+\"/\"+filename, \"wb\") as f:\n f.raw.decode_content = True\n shutil.copyfileobj(r.raw, f)", "def create_file(self, file_name=\"result\", extension=\"html\"):\n with open(f\"{file_name}.{extension}\", \"a\") as f:\n f.write(\"<!DOCTYPE html>\\n<html>\\n<head>\\n <meta charset='utf-8'>\")\n for head_element in self.head:\n f.write(head_element)\n f.write(\"\\n</head>\\n<body>\")\n for body_element in self.body:\n f.write(body_element)\n f.write(\"\\n</body>\\n</html>\")", "def createFileResponse(context, name, archiveName, classModuleName, classModule, readme=None):\n retObject = {}\n with tempfile.TemporaryDirectory() as tmpdir:\n datadir = os.path.join(tmpdir, 'data')\n os.mkdir(datadir)\n with open(os.path.join(datadir, 'context.json'), 'w') as outfile:\n json.dump(context, outfile)\n if readme:\n with open(os.path.join(datadir, 'readme.txt'), 'w') as outfile:\n outfile.write(readme)\n with zipfile.ZipFile(os.path.join(tmpdir, 'data.zip'), 'a') as datazip:\n datazip.write(os.path.join(datadir, 'readme.txt'), arcname='readme.txt')\n with zipfile.ZipFile(os.path.join(tmpdir, 'data.zip'), 'a') as datazip:\n datazip.write(os.path.join(datadir, 'context.json'), arcname='context.json')\n with zipfile.ZipFile(os.path.join(tmpdir, 'data.zip'), 'a') as datazip:\n datazip.write(classModule, arcname=classModuleName+'.py')\n retObject = FileResponse(open(os.path.join(tmpdir, 'data.zip'), 'rb'))\n retObject['Content-Disposition'] = ('attachment; filename=\"'+name+'.zip\"')\n return retObject", "def test_DL_export_create_file(self):\n filepath = '1.txt'\n dl = flow_processing_input.DetectorsLocation(2021)\n dl.detectors_location_dict = createDLDataset(1).dataset\n dl.export_to_file(filepath)\n # Check if file was created at filepath\n self.assertTrue(os.path.exists(filepath))\n os.remove(filepath)", "def save_filter(self, filename, overwrite=False):\n hdu = fits.PrimaryHDU(self.filter, self.header)\n hdu.writeto(filename, clobber=overwrite)\n fits.append(filename, self.approx, self.header)\n fits.append(filename, self.filter + self.approx, self.header)\n fits.append(filename, self.max_scale_image(), self.header)", "def main(index, output_file, **kwargs):\n\n output_jsonl = None\n output_text = None\n if 'json' in kwargs['output_format']:\n fname = output_file if len(kwargs['output_format']) == 1 else kwargs['output_format'] + '.jsonl'\n output_jsonl = open(fname, 'w')\n if 'text' in kwargs['output_format']:\n fname = output_file if len(kwargs['output_format']) == 1 else kwargs['output_format'] + '.txt'\n output_text = open(fname, 'w')\n\n if kwargs.get('query') is not None:\n query = json.load(kwargs.get('query'))\n else:\n query = {\n \"sort\": [\"warc_id\"],\n \"size\": 200,\n \"query\": {\n \"bool\": {\n \"filter\": {\n \"bool\": {\n \"must_not\": [\n {\n \"query_string\": {\n \"analyze_wildcard\": True,\n \"default_field\": \"*\",\n \"query\": \"\"\"group:(*.patches OR *.commits* OR\n *.dist-commits* OR *.version-control* OR *.git* OR *.cvs* OR *.svn*\n OR *.trunk* OR *.scm* OR *.pkg*) OR (group:(*.bugs* OR *.issues*\n OR *.bugzilla* OR *.codereview*) OR \n headers.subject.keyword:(*jira* OR *bugzilla*) OR\n headers.from_email.keyword:(*bugs* OR *bugzilla* OR *jira* OR *jboss*))\"\"\"\n }\n }\n ],\n \"must\": {\"term\": {\"lang\": \"en\"}},\n \"minimum_should_match\": 1,\n \"should\": [\n {\"wildcard\": {\"group\": \"gmane.culture.*\"}},\n {\"wildcard\": {\"group\": \"gmane.politics.*\"}},\n {\"wildcard\": {\"group\": \"gmane.science.*\"}},\n {\"wildcard\": {\"group\": \"gmane.education.*\"}},\n {\"wildcard\": {\"group\": \"gmane.music.*\"}},\n {\"wildcard\": {\"group\": \"gmane.games.*\"}},\n {\"wildcard\": {\"group\": \"gmane.recreation.*\"}}\n ]\n }\n }\n }\n }\n }\n\n logger.info('Retrieving initial batch')\n es = util.get_es_client()\n results = util.es_retry(es.search, index=index, scroll='10m', size=kwargs['scroll_size'], body=query)\n\n skip = kwargs['skip']\n if skip > 0:\n logger.info('Skipping ahead {} messages'.format(skip))\n\n sampled_groups = {}\n num_samples = 0\n num_skipped = 0\n\n try:\n with tqdm(desc='Calculating progress', unit=' messages') as progress_bar:\n while num_samples < kwargs['total_mails'] and len(results['hits']['hits']) > 0:\n for hit in results['hits']['hits']:\n if skip > 0 and num_skipped < skip:\n progress_bar.set_description('Skipping messages')\n progress_bar.total = skip\n num_skipped += 1\n progress_bar.update()\n continue\n elif (skip == 0 or num_skipped >= skip) and num_samples == 0:\n progress_bar.set_description('Sampling messages')\n progress_bar.total = kwargs['total_mails']\n progress_bar.n = 0\n progress_bar.last_print_n = 0\n progress_bar.update(0)\n\n src = hit['_source']\n text_plain = src['text_plain']\n\n prev_samples = sampled_groups.get(src['group'], 0)\n if kwargs['group_limit'] and prev_samples > kwargs['group_limit']:\n continue\n sampled_groups[src['group']] = prev_samples + 1\n\n num_samples += 1\n progress_bar.update()\n\n if output_jsonl:\n json.dump({'text': text_plain,\n 'meta': {k: src[k] for k in src.keys() if k not in ['text_plain', 'text_html']},\n 'labels': []}, output_jsonl)\n output_jsonl.write('\\n')\n\n if output_text:\n output_text.write(util.normalize_message_text(text_plain))\n output_text.write('\\n')\n\n if num_samples >= kwargs['total_mails']:\n break\n\n results = util.es_retry(es.scroll, scroll_id=results['_scroll_id'], scroll='10m')\n finally:\n es.clear_scroll(scroll_id=results['_scroll_id'])\n\n if output_jsonl:\n output_jsonl.close()\n if output_text:\n output_text.close()", "def to_file(self, file_path, smirnoff_data):\n pass", "def find_and_download_files(context):\n\n\n input_path = 'input/'\n if os.path.isdir(input_path):\n log.debug('Path already exists: ' + input_path)\n else:\n log.debug('Creating: ' + input_path)\n os.mkdir(input_path)\n\n fw = context.client\n\n if 'classification_measurement' in context.config:\n class_meas = context.config['classification_measurement'].split()\n else:\n class_meas = ['T1']\n\n # session and acquisition include/exclude lists can come from:\n # project info metadata,\n # subject info metadata, and\n # config options\n # The last one wins (how about getting it from an input file also, eh?)\n ses_exclude_list = None\n ses_include_list = None\n acq_exclude_list = None\n acq_include_list = None\n\n fs = 'freesurfer_longitudinal_'\n where = 'Found in project info'\n # check for exclude/include lists of regexs for sessions in project info\n sel = context.gear_dict['project'].info.get(fs + 'session_excludelist')\n if sel:\n ses_exclude_list = sel.split()\n log.info(where+' '+fs+'session_excludelist: \"'+sel+'\"')\n sil = context.gear_dict['project'].info.get(fs + 'session_includelist')\n if sil:\n ses_include_list = sil.split()\n log.info(where+' '+fs+'session_includelist: \"'+sil+'\"')\n # check for exclude/include lists of regexs for acquisitions in project info\n ael = context.gear_dict['project'].info.get(fs + 'acquisition_excludelist')\n if ael:\n acq_exclude_list = ael.split()\n log.info(where+' '+fs+'acquisition_excludelist: \"'+ael+'\"')\n ail = context.gear_dict['project'].info.get(fs + 'acquisition_includelist')\n if ail:\n acq_include_list = ail.split()\n log.info(where+' '+fs+'acquisition_includelist: \"'+ail+'\"')\n\n where = 'Found in subject info'\n # check for exclude/include lists of regexs for sessions in subject info\n sel = context.gear_dict['subject'].info.get(fs + 'session_excludelist')\n if sel:\n ses_exclude_list = sel.split()\n log.info(where+' '+fs+'session_excludelist: \"'+sel+'\"')\n sil = context.gear_dict['subject'].info.get(fs + 'session_includelist')\n if sil:\n ses_include_list = sil.split()\n log.info(where+' '+fs+'session_includelist: \"'+sil+'\"')\n # check for exclude/include lists of regexs for acquisitions in subject info\n ael = context.gear_dict['subject'].info.get(fs + 'acquisition_excludelist')\n if ael:\n acq_exclude_list = ael.split()\n log.info(where+' '+fs+'acquisition_excludelist: \"'+ael+'\"')\n ail = context.gear_dict['subject'].info.get(fs + 'acquisition_includelist')\n if ail:\n acq_include_list = ail.split()\n log.info(where+' '+fs+'acquisition_includelist: \"'+ail+'\"')\n\n where = 'Found in config'\n # set up exclude/include lists of reegexs for sessions in config\n if 'session_excludelist' in context.config:\n ses_exclude_list = context.config['session_excludelist'].split()\n log.info(where+' session_excludelist: \"'+str(ses_exclude_list)+'\"')\n if 'session_includelist' in context.config:\n ses_include_list = context.config['session_includelist'].split()\n log.info(where+' session_includelist: \"'+str(ses_include_list)+'\"')\n\n # set up exclude/include lists of reegexs for acquisitions in config\n if 'acquisition_excludelist' in context.config:\n acq_exclude_list = context.config['acquisition_excludelist'].split()\n log.info(where+' acquisition_excludelist: \"'+str(acq_exclude_list)+'\"')\n if 'acquisition_includelist' in context.config:\n acq_include_list = context.config['acquisition_includelist'].split()\n log.info(where+' acquisition_includelist: \"'+str(acq_include_list)+'\"')\n\n # go through all sessions, acquisitions to find files\n for session in context.gear_dict['subject'].sessions():\n\n lemme_out = False\n if ses_exclude_list:\n for regex in ses_exclude_list:\n if re.search(regex, session.label): # if excluded, skip\n log.info('Session \"' + session.label + '\" matches ' + \\\n 'exclusion regex, skipping it')\n lemme_out = True\n continue\n if lemme_out:\n continue\n\n if ses_include_list:\n match = False\n for regex in ses_include_list:\n if not re.search(regex, session.label):\n match = True\n if match:\n continue # if not included (matches any regex), skip\n else:\n log.info('Session \"' + session.label + '\" matches ' \\\n 'an inclusion regex, keeping it')\n\n for acquisition in fw.get_session_acquisitions(session.id):\n\n lemme_out = False\n if acq_exclude_list:\n for regex in acq_exclude_list:\n if re.search(regex, acquisition.label): # if excluded, skip\n log.info('Acquisition \"' + acquisition.label + \\\n '\" matches exclusion regex, skipping it')\n lemme_out = True\n continue\n if lemme_out:\n continue\n\n if acq_include_list:\n match = False\n for regex in acq_include_list:\n if not re.search(regex, acquisition.label):\n match = True\n if match:\n continue # if not included (matches any regex), skip\n else:\n log.info('Acquisition \"' + acquisition.label + '\" ' + \\\n 'matches an inclusion regex, keeping it')\n\n for afile in acquisition.files:\n\n # Scan must be nifti\n if afile.type == 'nifti':\n\n found_one = False\n for cm in class_meas:\n if 'Measurement' in afile.classification:\n if cm in afile.classification['Measurement']:\n found_one = True\n log.info('Found ' + cm + ' file')\n\n if found_one:\n download_it(fw, acquisition, afile.name, input_path)\n context.gear_dict['visits'].append(\n make_file_name_safe(session.label, '_'))\n else:\n log.info('Ignoring ' + afile.name)", "def test_call_write_to_file(self):\r\n app = ReferenceRepSetPicker(params={'Algorithm': 'first',\r\n 'ChoiceF': first_id})\r\n app(self.tmp_seq_filepath,\r\n self.tmp_otu_filepath,\r\n self.ref_seq_filepath,\r\n result_path=self.result_filepath)\r\n with open(self.result_filepath) as f:\r\n actual = SequenceCollection.from_fasta_records(parse_fasta(f), DNA)\r\n expected = SequenceCollection.from_fasta_records(\r\n parse_fasta(rep_seqs_reference_result_file_exp.split('\\n')), DNA)\r\n # we don't care about order in the results\r\n self.assertEqual(set(actual), set(expected))", "def write_response(response):\n \n text_r = response.text\n \n \t# Individua le righe del file\n lines = text_r.strip().split('\\n')\n \n \"\"\" Crea df dove ogni linea corrisponde ad una riga del df\n In questa operazione non si ha ancora la divisione per le singole colonne\n questo dipende dalla response della richiesta\"\"\"\n return pd.DataFrame(lines[:], columns=[lines[0]])", "def fileCheckOriginal():\n\n print('[+] Populating File Hasing for later check')\n for url in check_files:\n try:\n data = query(url)\n file_name = url.split(\"/\")[-1]\n _,tmp_file = tempfile.mkstemp(prefix=\"exitmap_%s_\" % file_name)\n\n with open(tmp_file, \"wb\") as fd:\n fd.write(data)\n print('[+] Saving File \\\"%s\\\".' % tmp_file)\n check_files_patch_results.append( File_Check_Results(url, file_name, tmp_file, \"NO\", sha512_file(tmp_file)) )\n print('[+] First Time we see the file..')\n print(' |_________> exitnode : None' )\n print(' |_________> :url: %s' % str(url) )\n print(' |_________> :filePath: %s' % str(tmp_file))\n print(' |_________> :file Hash: %s' % str(sha512_file(tmp_file)))\n except Exception as err:\n print('[-] Error ! %s' % err)\n traceback.print_exc()\n pass\n return time.time()", "def _setup_output_file(self):\n\n columns = [\"Hero file\",\n \"Test type\",\n \"Name of tested entry\",\n \"Misc dice sum input\",\n \"Value of tested entry\",\n \"Modifier\",\n \"Values of related attributes\",\n \"Rolls\",\n \"Result\",\n \"Description\",\n \"Timestamp\",\n \"Type of dice input\"]\n\n # if file does not exist, add first row of column names\n if not os.path.isfile(self._result_csv):\n with open(self._result_csv, \"w\", encoding=\"utf-8\") as csv_file:\n file_writer = csv.writer(csv_file, delimiter=',',\n quotechar='|',\n quoting=csv.QUOTE_MINIMAL)\n file_writer.writerow(columns)\n return True\n return False", "def write_result(fragment_list, results, output_file):\n if not output_file:\n output_file = results + os.sep + \"prodigal_filtered.txt\"\n try:\n with open(output_file, \"wt\") as output:\n for fragment in fragment_list:\n output.write(\"{1}{0}{2}{0}\".format(\n os.linesep, fragment[0], fill(fragment[1])))\n except IOError:\n sys.exit(\"Error cannot open {0}\".format(output_file))", "def gen_file():\n content = clean(read_file())\n content += PREFIX\n instances = ec2.instances.filter(Filters=[{\"Name\": \"instance-state-name\", \"Values\": [\"running\"]}])\n for instance in instances:\n private_dns_name = instance.private_dns_name\n private_hostname = instance.private_dns_name.split('.')[0]\n if instance.public_ip_address:\n content += \"{} {} {}\\n\".format(instance.public_ip_address.ljust(15), private_dns_name, private_hostname)\n content += SUFFIX + \"\\n\"\n return content", "def recordResponse(filename, response, header,\n\t\t\t\t separator = ',',\n\t\t\t\t ender = \"\\n\"):\n\tif os.path.exists(filename):\n\t\twriteCode = 'a'\n\t\twith open(filename, writeCode) as f:\n\t\t\trecord = \"\"\n\t\t\tfor value in header:\n\t\t\t\trecord += str(response[value]) + separator\n\t\t\trecord = record[:-len(separator)]\n\t\t\trecord += ender\n\t\t\tf.write(record)\n\telse:\n\t\twriteCode = 'w'\n\t\twith open(filename, writeCode) as f:\n\t\t\trecord = \"\"\n\t\t\tfor variable in header:\n\t\t\t\trecord += variable + separator\n\t\t\trecord = record[:-len(separator)]\n\t\t\trecord += ender\n\t\t\tf.write(record)\n\t\t\trecord = \"\"\n\t\t\tfor value in header:\n\t\t\t\trecord += str(response[value]) + separator\n\t\t\trecord = record[:-len(separator)]\n\t\t\trecord += ender\n\t\t\tf.write(record)", "def create_submission(pred_sub, name_of_the_file='submission'):\n\n df_sub = pd.DataFrame(pred_sub, columns=['Prediction'])\n df_sub.index.name = 'Id'\n df_sub.index = np.arange(1, 10001)\n df_sub[df_sub['Prediction'] == 0] = -1\n df_sub.to_csv(name_of_the_file + '.csv',index_label='Id')\n\n print('submission file created as \"'+ name_of_the_file+'.csv\"')", "def handle_request(self, given_request: Request):\n with open(request.output, mode=\"w\", encoding='utf-8') as file:\n file.write(request.result)\n return True", "def __init__(self, api=None):\n self.file = open(OUTPUT_FILE, \"w\")", "def mk_filename(pattern, queryresponserow, resp, url):\n name = None\n if resp:\n cdheader = resp.headers.get(\"Content-Disposition\", None)\n if cdheader:\n _, params = parse_header(cdheader)\n name = params.get('filename', \"\")\n # Work around https://github.com/sunpy/sunpy/issues/3372\n if name.count('\"') >= 2:\n name = name.split('\"')[1]\n\n # This is a hack to to prevent IRIS data from being labelled as XML files\n if name is None and \"VOEvent_IRIS\" not in queryresponserow['fileid']:\n # Advice from the VSO is to fallback to providerid + fileid for a filename\n # As it's possible multiple providers give the same fileid.\n # However, I haven't implemented this yet as it would be a breaking\n # change to the filenames we expect.\n fileid = queryresponserow['fileid']\n\n # Some providers make fileid a path\n # Some also don't specify a file extension, but not a lot we can do\n # about that.\n name = fileid.split(\"/\")[-1]\n\n # If somehow we have got this far with an empty string, fallback to url segment\n if not name:\n name = url.split('/')[-1]\n\n # Remove any not-filename appropriate characters\n name = slugify(name)\n\n # If absolutely everything else fails make a filename based on download time\n if not name:\n name = f\"vso_file_{datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')}\"\n\n fname = pattern.format(file=name,\n **queryresponserow.response_block_map)\n\n return fname", "def create_mock_files(self,\n project_id=\"DEV-test\",\n count=3,\n prefix=\"mock_data_file\",\n file_format=\"dcm\",\n outdir=\".\",\n msg = \"This is a mock data file for testing purposes. Delete me!\",\n write_tsv = True\n ):\n prog,proj = project_id.split(\"-\")\n authz = [\"/programs/{}/projects/{}\".format(prog,proj)]\n acl = [prog,proj]\n\n mfiles = {'file_name':[],'md5sum':[],\"file_size\":[],\"object_id\":[],\"storage_urls\":[],\"acl\":[],\"authz\":[]}\n for i in range(count):\n file_name = \"{}_{}.{}\".format(prefix,i+1,file_format)\n object_id = str(uuid.uuid4())\n mfiles['file_name'].append(file_name)\n mfiles['object_id'].append(object_id)\n mfiles['authz'].append(authz)\n mfiles['acl'].append(acl)\n\n\n output = \"{}/{}\".format(outdir,file_name)\n os.system(\"touch {}\".format(output))\n file_msg =\"{} File {} of {}. {} with object_id {}.\".format(msg,i+1,count,file_name,object_id)\n cmd = 'echo \"{}\" > {}'.format(file_msg,file_name)\n os.system(cmd)\n\n with open(output, 'rb') as file_to_check:\n file_contents = file_to_check.read()\n #cmd = \"!md5 mock_data_file_{}.{}\".format(i+1,file_format))\n md5 = hashlib.md5(file_contents).hexdigest() #check in shell: !md5 mock_data_file_3.dcm\n\n mfiles['md5sum'].append(md5)\n mfiles['file_size'].append(os.stat(output).st_size)\n urls=\"s3://this-is-a-fake-url-for:{}\".format(file_name)\n mfiles['storage_urls'].append([urls])\n\n return mfiles", "def test_write_source(self):\n req = Request()\n for name in sample_data.keys():\n orig_fn = self._filepath(name)\n temp_fn = self._filepath(name + '-write-source')\n\n # Read the message\n resp = req.get(fromfile=orig_fn)\n\n # Write to a temporary JSON file\n resp.write_source(temp_fn)\n\n # Read the two files and compare JSON (ignores ordering)\n with open(orig_fn) as orig, open(temp_fn) as temp:\n assert json.load(orig) == json.load(temp)\n\n # Delete the temporary file\n os.remove(temp_fn)", "def main():\n \n lookupslocation = 'C:\\\\Users\\\\gwilliams\\\\Desktop\\\\Python Experiments\\\\work projects\\\\FaresIndexSourceData\\\\regulated_fares_data\\\\'\n destination = 'C:\\\\Users\\\\gwilliams\\\\Desktop\\\\Python Experiments\\\\work projects\\\\FaresIndexSourceData\\\\regulated_fares_data\\\\comparison output\\\\'\n lookupfileslist, count = getdata(lookupslocation)\n\n print(f\"there are {count} files found.\")\n\n newlookup = lookupfileslist[0]\n oldlookup = lookupfileslist[1]\n\n #join new to old // old to new\n new_uniquevalues = pd.merge(left=newlookup,right=oldlookup,how='left',\n left_on=['orig','dest','route','ticket'],right_on=['orig','dest','route','ticket'])\n\n old_uniquevalues = pd.merge(left=newlookup,right=oldlookup,how='right',\n left_on=['orig','dest','route','ticket'],right_on=['orig','dest','route','ticket'])\n\n print(\"These are values unique to new lookup\") \n new_uniquevalues = new_uniquevalues[new_uniquevalues.ticketa.isnull()==True]\n exportfile(new_uniquevalues,destination,'unique_new_values',1)\n\n print(\"These are values unique to old lookup\")\n old_uniquevalues = old_uniquevalues[old_uniquevalues.new_flag.isnull()==True]\n exportfile(old_uniquevalues,destination,'unique_old_values',1)", "def create_entity_claim_input_file_doc_ret():\n claim_doc = open(r\"C:\\study\\technion\\MSc\\Thesis\\Y!\\rawClaim_SW.txt\").read().strip()\n \"remove the stop words from the claims\"\n SW_doc = r\"C:\\study\\technion\\MSc\\Thesis\\Y!\\stopWords.xml\"\n stopWords_list = []\n claims_no_SW_dict = {}\n with open(SW_doc, 'r') as f:\n line = f.readline()\n while line !=\"\":\n if \"<word>\" in line:\n stopWords_list.append(line.split(\"<word>\")[1].split(\"</word>\")[0])\n line = f.readline()\n \n for i,line in enumerate(claim_doc.split(\"\\n\")):\n clmLMdocLM_doc_ret_query_file = open(\"LMdocLM_doc_ret_query_file_clm_\"+str(i+1),\"wb\")\n clmLMdocLM_doc_ret_query_file.write(\"<parameters>\\n\")\n curr_claim_words = line.split(\"|\")[1].lower().split()\n curr_entity_words = line.split(\"|\")[0].lower().split()\n noSW_claim = \"\"\n noSW_entity = \"\"\n for word in curr_claim_words:\n if word not in stopWords_list: \n noSW_claim += word+\" \"\n for word in curr_entity_words:\n if word not in stopWords_list: \n noSW_entity += word+\" \"\n# clmLMdocLM_doc_ret_query_file.write(\"<query><number>\"+str(i+1)+\"</number><text>\"+noSW_entity+\"|\"+noSW_claim+\"</text></query>\\n\")\n# clmLMdocLM_doc_ret_query_file.write(\"</parameters>\")\n# clmLMdocLM_doc_ret_query_file.close()\n claims_no_SW_dict[str(i+1)] = (noSW_entity,noSW_claim)\n save_pickle(\"claims_no_SW_dict\", claims_no_SW_dict)", "def openie_prepare_files(document_file, no_entity_filter=False, consider_sections=False):\n temp_dir = tempfile.mkdtemp()\n temp_in_dir = os.path.join(temp_dir, \"input\")\n filelist_fn = os.path.join(temp_dir, \"filelist.txt\")\n out_fn = os.path.join(temp_dir, \"output.txt\")\n os.mkdir(temp_in_dir)\n input_files = []\n\n amount_skipped_files = 0\n doc_count = count_documents(document_file)\n logging.info('counting files to process....')\n if no_entity_filter:\n for document_content in read_pubtator_documents(document_file):\n doc = TaggedDocument(from_str=document_content)\n if not doc or not doc.title or not doc.abstract:\n amount_skipped_files += 1\n else:\n doc_count += 1\n # TODO: Not beautiful but join sections via a '.' to ensure sentence splitting in CoreNLP\n content = '. '.join([te for te, _ in doc.iterate_over_text_elements(sections=consider_sections)])\n input_file = os.path.join(temp_in_dir, \"{}.txt\".format(doc.id))\n input_files.append(input_file)\n with open(input_file, \"w\") as f:\n f.write(content)\n else:\n logging.info('Init spacy nlp...')\n spacy_nlp = English() # just the language with no model\n spacy_nlp.add_pipe(\"sentencizer\")\n\n doc2sentences, doc2tags = filter_document_sentences_without_tags(doc_count, document_file, spacy_nlp,\n consider_sections=consider_sections)\n doc_count = len(doc2tags)\n for doc_id, sentences in doc2sentences.items():\n if sentences:\n input_file = os.path.join(temp_in_dir, \"{}.txt\".format(doc_id))\n input_files.append(input_file)\n with open(input_file, 'wt') as f:\n f.write(' '.join(sentences))\n\n logging.info('{} files need to be processed. {} files skipped.'.format(doc_count, amount_skipped_files))\n with open(filelist_fn, \"w\") as f:\n f.write(\"\\n\".join(input_files))\n return filelist_fn, out_fn, doc_count", "def dump_to_file(final_results):\n\t#Add prefix result\n\tif final_results[\"Results\"][\"Test passed\"] == True:\n\t\ttime_now = time.time()\n\t\touput_filepath = checklists_filepath.replace(\".json\", \"\", 1) + \"_\" + datetime.datetime.fromtimestamp(time_now).strftime('%Y-%m-%d_%Hh%Mm%Ss') + \"_PASSED.json\"\n\telse:\n\t\ttime_now = time.time()\n\t\touput_filepath = checklists_filepath.replace(\".json\", \"\", 1) + \"_\" + datetime.datetime.fromtimestamp(time_now).strftime('%Y-%m-%d_%Hh%Mm%Ss') + \"_FAILED.json\"\n\twith open(ouput_filepath, 'w') as fp:\n\t\tjson.dump(final_results, fp)\n\treturn ouput_filepath", "def file_output(patient):\n import json\n outfile = open(\"{}-{}.json\".format(patient[\"First\"], patient[\"Last\"]), \"w\")\n patient_dictionary = {}\n patient_dictionary[\"First Name\"] = patient[\"First\"]\n patient_dictionary[\"Last Name\"] = patient[\"Last\"]\n patient_dictionary[\"Age\"] = patient[\"Age\"]\n patient_dictionary[\"Gender\"] = patient[\"Gender\"]\n patient_dictionary[\"Diagnosis\"] = patient[\"TSH Result\"]\n patient_dictionary[\"TSH\"] = patient[\"TSH Data\"]\n json.dump(patient_dictionary, outfile)\n outfile.close()\n return", "def write_to_file(self):\n name = datetime.today().date()\n with open(f'{name}.csv', 'w', newline='') as file_create:\n fieldnames = ['date', 'value_in_pln']\n writer = csv.DictWriter(file_create, fieldnames=fieldnames)\n writer.writeheader()\n while datetime.today() < self.track_to:\n value_of_currency = PriceTracker.track_price()\n with open(f'{file_create.name}', 'a', newline='') as file_append:\n fieldnames = ['date', 'value_in_pln']\n writer = csv.DictWriter(file_append, fieldnames=fieldnames)\n writer.writerow({'date': datetime.today().strftime(\"%H:%M:%S\"), 'value_in_pln': value_of_currency})\n\n self.check_min_value(tracked_price=value_of_currency)\n sleep(1)\n\n return self.generate_report(file_create.name)", "def test_compress_file_response(self):\n with open(__file__, \"rb\") as file1:\n\n def get_response(req):\n file_resp = FileResponse(file1)\n file_resp[\"Content-Type\"] = \"text/html; charset=UTF-8\"\n return file_resp\n\n r = GZipMiddleware(get_response)(self.req)\n with open(__file__, \"rb\") as file2:\n self.assertEqual(self.decompress(b\"\".join(r)), file2.read())\n self.assertEqual(r.get(\"Content-Encoding\"), \"gzip\")\n self.assertIsNot(r.file_to_stream, file1)", "def generate_output_file(data, extension, headers):\n output_data = _replace_boolean(data)\n output_name = _generate_output_name(extension)\n with open(output_name, 'a', newline='') as file:\n _file_writer(file, extension, output_data, headers)", "def filter_output(self, request, output):\n return output", "def get_files_io():\n if GC.conf['general']['training']:\n files_zip = {\n 'raw': os.path.join(COOKED_DATA, 'train.txt'),\n 'new': os.path.join(COOKED_DATA, 'train_new.txt'),\n 'norm': os.path.join(COOKED_DATA, 'train_norm.txt'),\n 'manu': os.path.join(RAW_DATA, 'others', 'temp_updt_manu.txt'),\n 'labels': os.path.join(TRAIN_DATA, 'train_norm.txt_labels.pkl'),\n 'segll': os.path.join(TRAIN_DATA, 'train_norm.txt_seginf_loglab.pkl'),\n 'segdl': os.path.join(TRAIN_DATA, 'train_norm.txt_seginf_deeplog.pkl'),\n 'struct': os.path.join(TRAIN_DATA, 'train_norm.txt_structured.csv'),\n 'output': TRAIN_DATA\n }\n else:\n files_zip = {\n 'raw': os.path.join(COOKED_DATA, 'test.txt'),\n 'new': os.path.join(COOKED_DATA, 'test_new.txt'),\n 'norm': os.path.join(COOKED_DATA, 'test_norm.txt'),\n 'labels': os.path.join(TEST_DATA, 'test_norm.txt_labels.pkl'),\n 'segll': os.path.join(TEST_DATA, 'test_norm.txt_seginf_loglab.pkl'),\n 'segdl': os.path.join(TEST_DATA, 'test_norm.txt_seginf_deeplog.pkl'),\n 'map_norm_raw': os.path.join(TEST_DATA, 'map_norm_raw.pkl'),\n 'map_norm_rcv': os.path.join(TEST_DATA, 'map_norm_rcv.pkl'),\n 'norm_rcv': os.path.join(TEST_DATA, 'test_norm_rcv.txt'),\n 'struct': os.path.join(TEST_DATA, 'test_norm.txt_structured.csv'),\n 'struct_rcv': os.path.join(TEST_DATA, 'test_norm_rcv.txt_structured.csv'),\n 'top': os.path.join(TEST_DATA, 'analysis_summary_top.txt'),\n 'sum': os.path.join(TEST_DATA, 'analysis_summary.csv'),\n 'rst_llab': os.path.join(TEST_DATA, 'results_loglab.csv'),\n 'rst_dlog': os.path.join(TEST_DATA, 'results_deeplog.txt'),\n 'rst_llzr': os.path.join(TEST_DATA, 'results_loglizer.csv'),\n 'dbg': os.path.join(TEST_DATA, 'debug.csv'),\n 'output': TEST_DATA\n }\n return files_zip", "def generate_report(self, output_path):\n with open(output_path, 'w', newline='', encoding=\"utf-8\") as csv_fd:\n writer = csv.writer(csv_fd, quoting=csv.QUOTE_NONNUMERIC, doublequote=False, escapechar=\"\\\\\")\n writer.writerow([\"category\", \"level\", \"description\", \"method\", \"parameter\", \"url\", \"body\"])\n writer.writerows(self._vulns)\n writer.writerows(self._anomalies)\n writer.writerows(self._additionals)", "def create_submission(name, results):\n if name[-4:] != \".csv\":\n file_path = name + \".csv\"\n else:\n file_path = name\n \n file = open(file_path, \"w\")\n \n id = 0\n file.write(\"Id,Prediction\\n\") # Header\n \n # Write each result one by one\n for result in results:\n id += 1\n line = str(id) + \",\" + str(result) + \"\\n\"\n file.write(line)\n \n file.close()\n print(\"File \" + file_path + \" succesfully created with \" + str(id) + \" entries\")", "def create_samfile(self):", "def search(self, request):\n file = self.request.data['file']\n with open(file) as f:\n content = f.readlines()\n content = [x.strip() for x in content]\n # creating a new file output.txt for storing results.\n output = open('Output.txt', 'w+')\n # write all matching results while iterating over queries item\n for query in content:\n output.write(\"Matches for: %s\\n\" % query)\n # filtering out all the exact matches from phone-book in sorted manner by first name.\n matches = self.queryset.filter(last_name__icontains=query).order_by('first_name')\n if not matches:\n output.write(\"No results found\")\n else:\n for count, result in enumerate(matches):\n output.write(\"Result {0}: {1}, {2}, {3}, {4}\\n\".format(\n count, result.last_name, result.first_name, result.state, result.phone_number\n ))\n # closing file after final iteration of all queries in a file.\n output.close()\n file_handle = output.open()\n response = FileResponse(file_handle, content_type='text/plain')\n response['Content-Length'] = file_handle.size\n response['Content-Disposition'] = 'attachment; filename=\"%s\"' % file_handle.name\n return Response(response)", "def vantechy(request):\n return FileResponse(open('/files/presentation.pdf', 'rb'))", "def newfile(self) :\n\n\t\tfrom tempfile import mkstemp\n\t\timport os\n\t\tglobal configurer\n\n\t\tfd,name = mkstemp(suffix='.blend')\n\t\tos.close(fd)\n\t\tself.name = name\n\t\tfd = open(name,'wb', configurer.get('ServerBufferSize'))\n\t\tself.fd = fd\n\t\tprint name\n\t\treturn 1", "def create(self):\n\n if len(self.filenames) != len(self.download_links):\n print(\"Must have the same amount off file names than download links\", file=sys.stderr)\n return None\n\n resources = []\n\n #Creating the resource dict\n for i in range(len(self.filenames)):\n resources.append(\n {\n \"id\": self.ids[i],\n \"description\":\"\",\n \"filename\":self.filenames[i],\n \"download_link\":self.download_links[i]\n }\n )\n\n\n #The JSON\n data = {\n \"dataset\":{\n \"project\":self.project,\n \"version\":self.version,\n \"description\":self.description,\n \"project_link\":self.project_link,\n \"data_path\": self.data_path,\n \"metadata\": self.metadata,\n \"files_type\":self.file_type,\n \"protocole\":self.protocole,\n \"resources\":resources,\n \"data_representation\":self.data_representation\n }\n }\n with open(self.dataset_path, \"w\") as json_file:\n json_file.write(json.dumps(data))", "def save_file():\n generic = pull_list()\n result = list()\n i = 0\n while True:\n try:\n if generic[i].startswith('CVE'):\n cve_pattern = \"^CVE-\\d+-\\d+|^CVE-\\d+-[X]+\"\n header = re.findall(cve_pattern, generic[i])[0]\n i += 1\n notes = list()\n while not generic[i].startswith('CVE'):\n commit_pattern = \"http[s]?:\\/\\/.+commit\\/[\\S]+\"\n if re.search(commit_pattern, generic[i]):\n link = re.findall(commit_pattern, generic[i])\n notes.append(link[0])\n i += 1\n if notes != list():\n result.append(Data(header, notes))\n except IndexError:\n print('Finished')\n break\n return result", "def create_dicts(self):\n \n # remove this string from filename to make output file names more manageable\n pre_output1 = self.file1.replace(\"_Guys121919_CGH_1100_Jul11\", '')\n pre_output2 = self.file2.replace(\"_Guys121919_CGH_1100_Jul11\", '')\n \n # Build the output file name.\n # if prefix is present add it\n if self.out_file_prefix is not None:\n # concatenate prefix, filenames and dyes into output filename file1_file1_dye_file2_file2_dye.txt\n self.outputfilename = self.out_file_prefix+pre_output1.replace(\".txt\", '') + \"_\" + self.file1_dye + \"_\" + pre_output2.replace(\".txt\", '') + \"_\" + self.file2_dye + \".txt\"\n # if no prefix don't add it!\n else:\n # concatenate filenames and dyes into output filename file1_file1_dye_file2_file2_dye.txt\n self.outputfilename = pre_output1.replace(\".txt\", '') + \"_\" + self.file1_dye + \"_\" + pre_output2.replace(\".txt\", '') + \"_\" + self.file2_dye + \".txt\"\n\n # add temp to end of file name to create a temporary output filename\n self.tempoutputfilename = self.outputfilename.replace(\".txt\", '') + \"temp.txt\"\n\n # open temp output file\n self.tempoutputfile = open(self.outputfolder + self.tempoutputfilename, 'w')\n\n \n # open FE files\n file1_open = open(self.chosenfolder + self.file1, 'r')\n file2_open = open(self.chosenfolder + self.file2, 'r')\n\n # open file1 and create a dict of the features.\n for linenumber, line in enumerate(file1_open):\n if linenumber >= 10:\n splitline = line.split('\\t')\n self.file1_dict[int(splitline[1])] = line\n # get n of rows in file1 (take the linenumber of the last line)\n self.file1_len = linenumber\n\n # repeat for features in second file but first writing the feparam and stats to temp file - when pairing with control this ensures the \"header\" comes from the test (file2) not control (file1), NB NEITHER ARE ACCURATE!!!!\n for linenumber, line in enumerate(file2_open):\n if linenumber < 10:\n self.tempoutputfile.write(line)\n # then add all features to a dictionary, with the unique feature number as a key\n if linenumber >= 10:\n splitline = line.split('\\t')\n self.file2_dict[int(splitline[1])] = line\n # get n of rows in file2\n self.file2_len = linenumber\n\n # close files\n file1_open.close()\n file2_open.close()", "def download_report(self, response):\n \n if self.is_visited(response.url) == True:\n return None\n \n def get_filename_from_url(url):\n #http://www.gtja.com/f//lotus/201510/20151023%20Company%20Report%2001816%20HK_addStamper_addEncrypt.pdf\n import re\n pattern = re.compile(\"http://www.gtja.com/f//lotus/(\\d+)/(.*)\")\n result = pattern.match(url)\n if result is None:\n return str(datetime.date.today()), hashlib.md5(url).hexdigest() + \".pdf\"\n else:\n #return str(datetime.date.today()), hashlib.md5(url).hexdigest() + \".pdf\"\n return result.group(1), unquote(result.group(2))\n \n date, name = get_filename_from_url(response.url) #TODO Create date directory.\n\n file_path = settings[\"FILES_STORE_PATH\"] + date + \"/\"\n if os.path.exists(file_path) != True:\n os.mkdir(file_path)\n\n filename = file_path + name\n with open(filename.decode(\"utf-8\"), \"wb\") as f: #TODO what is the diffenrence between \"w+\" and \"wb\"\n f.write(response.body)\n \n item = ReportFileItem()\n item[\"url\"] = unquote(response.url)\n item[\"date\"] = date\n item[\"path\"] = \"/\" + date + \"/\" + name #Relative path\n item[\"link\"] = response.meta[\"link_url\"]\n item[\"create_date\"] = datetime.datetime.now()\n \n self.visit(response.url)\n \n return item", "def get_file(field, solver):\n if not field in self.res_files:\n fh = open(join(self.logs_dir, field+\".dat\"), 'w')\n fh.write(\"# Field: %s; Solver: %s\\n\"%(field, solver))\n fh.write(\"Time SubIteration InitialResidual FinalResidual NoIterations\\n\")\n self.res_files[field] = fh\n return self.res_files[field]", "def test_create_unique_files(self):\n fitting_report.create(results=self.results,\n support_pages_dir=self.dir.name,\n options=self.options)\n\n file_names = sorted([r.fitting_report_link\n for r in self.results])\n\n unique_names = sorted(list(set(file_names)))\n\n self.assertListEqual(unique_names, file_names)", "def file(self):\n\n corrdict = (self.cat_corr)['correction']\n specdict = (self.cat_corr)['spec'] \n \n fft_dir = direc('fft', self.cat_corr)\n \n if self.type == 'data': \n galdata = Data(self.type, self.cat_corr, **self.kwargs) # data class \n elif self.type == 'random': \n galdata = Random(self.type, self.cat_corr, **self.kwargs) # data class \n\n self.data_file = galdata.file_name # galaxy data file\n\n # FFT label \n fft_str = 'FFT_'\n if specdict['ell'] != 0: \n fft_str += 'Q_'\n \n fft_corr_str = ''\n if (corrdict['name'].lower() in ('floriansn', 'hectorsn')) & (self.type != 'random'):\n fft_corr_str = ''.join(['.', corrdict['name'].lower()])\n\n # FFTs from data file \n fft_file = ''.join([\n fft_dir, \n fft_str, (self.data_file).rsplit('/')[-1], \n fft_corr_str,\n '.grid', str(specdict['Ngrid']), \n '.P0', str(specdict['P0']), \n '.box', str(specdict['Lbox'])\n ])\n\n return fft_file", "def export_to_file(self):\r\n return True", "def _create_temp_batch_file(self):\n return tempfile.NamedTemporaryFile(delete=False)", "def write_valids():\n har_path = os.path.join(PLT_SRC, 'data/replay/*')\n valid_path = '../data/filtered_stats/valids.txt'\n\n har_files = [f for f in glob(har_path)] # Include pc files?\n urls = \\\n [urlsafe_b64decode(f.split('/')[-1].split('.')[0]) for f in har_files]\n with open(valid_path, 'w') as f:\n for url, url_har_path in zip(urls, har_files):\n f.write('{0} {1}\\n'.format(url, url_har_path))" ]
[ "0.602621", "0.59944147", "0.5985722", "0.5863088", "0.5780173", "0.5737445", "0.5735932", "0.5701409", "0.56273806", "0.5584363", "0.5571645", "0.55378807", "0.551075", "0.54842454", "0.5458675", "0.5398169", "0.5377229", "0.5367841", "0.5365113", "0.5354977", "0.5352039", "0.5347492", "0.5342986", "0.53339154", "0.53269506", "0.53170735", "0.531115", "0.53064704", "0.53000015", "0.5293311", "0.52881765", "0.52877724", "0.52781814", "0.52708495", "0.52689797", "0.5260619", "0.52424496", "0.5239502", "0.5236226", "0.5227106", "0.522234", "0.5211963", "0.52007216", "0.5197088", "0.51968336", "0.51804096", "0.5174672", "0.516588", "0.51563215", "0.51542336", "0.51516855", "0.51475114", "0.51474804", "0.51472217", "0.5132503", "0.5120604", "0.5099711", "0.50989133", "0.5085105", "0.5083779", "0.5066644", "0.5062978", "0.506205", "0.50574857", "0.5054979", "0.5054942", "0.505485", "0.50544184", "0.5044262", "0.5042673", "0.5040982", "0.50386477", "0.5032877", "0.50209945", "0.5017109", "0.5013707", "0.50133306", "0.5012255", "0.5009621", "0.5005225", "0.50033367", "0.4982428", "0.4979621", "0.49784356", "0.49776685", "0.4976166", "0.4975413", "0.49694481", "0.49672678", "0.49655667", "0.49606648", "0.49521267", "0.49504566", "0.49441513", "0.49374756", "0.49284193", "0.4927103", "0.4925266", "0.49248978", "0.49242234" ]
0.631874
0
Print differences between reference and response in console
def print_diff(ref_file, resp_file): # open reference with open(ref_file) as reference_text: reference = reference_text.readlines() # open response with open(resp_file) as response_text: response = response_text.readlines() # Print failed test name print_color("\n\n" + str(file_name) + " failed :" + "\n\n", Colors.PINK) symbol2color = {"+": Colors.GREEN, "-": Colors.RED} for line in difflib.unified_diff(reference, response): print_color(line, symbol2color.get(line[0], Colors.DEFAULT))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compare_with_ref(\n self, response, response_checker=default_checker.default_journey_checker\n ):\n\n def ref_resp2files(output_file, output_json):\n \"\"\"\n Create a file for the filtered response and for the filtered reference\n \"\"\"\n with open(output_file, \"w\") as reference_text:\n reference_text.write(output_json)\n\n def print_diff(ref_file, resp_file):\n \"\"\"\n Print differences between reference and response in console\n \"\"\"\n # open reference\n with open(ref_file) as reference_text:\n reference = reference_text.readlines()\n # open response\n with open(resp_file) as response_text:\n response = response_text.readlines()\n\n # Print failed test name\n print_color(\"\\n\\n\" + str(file_name) + \" failed :\" + \"\\n\\n\", Colors.PINK)\n\n symbol2color = {\"+\": Colors.GREEN, \"-\": Colors.RED}\n for line in difflib.unified_diff(reference, response):\n print_color(line, symbol2color.get(line[0], Colors.DEFAULT))\n\n # Filtering the answer. (We compare to a reference also filtered with the same filter)\n filtered_response = response_checker.filter(response)\n\n # Get the reference\n\n # Create the file name\n filename = self.get_file_name()\n filepath = os.path.join(config[\"REFERENCE_FILE_PATH\"], filename)\n\n assert os.path.isfile(filepath), \"{} is not a file\".format(filepath)\n\n with open(filepath, \"r\") as f:\n raw_reference = f.read()\n\n # Transform the string into a dictionary\n dict_ref = json.loads(raw_reference)\n\n # Get only the full_response part from the ref\n ref_full_response = dict_ref[\"full_response\"]\n\n # Filtering the reference\n filtered_reference = response_checker.filter(ref_full_response)\n\n # Compare response and reference\n try:\n response_checker.compare(filtered_response, filtered_reference)\n except AssertionError as e:\n # print the assertion error message\n logging.error(\"Assertion Error: %s\" % str(e))\n # find name of test\n file_name = filename.split(\"/\")[-1]\n file_name = file_name[:-5]\n\n # create a folder\n dir_path = config[\"RESPONSE_FILE_PATH\"]\n if not os.path.exists(dir_path):\n os.makedirs(dir_path)\n\n # create path to ref and resp\n full_file_name_ref = dir_path + \"/reference_\" + file_name + \".txt\"\n full_file_name_resp = dir_path + \"/response_\" + file_name + \".txt\"\n\n json_filtered_reference = json.dumps(filtered_reference, indent=4)\n json_filtered_response = json.dumps(filtered_response, indent=4)\n\n # Save resp and ref as txt files in folder named outputs\n ref_resp2files(full_file_name_ref, json_filtered_reference)\n ref_resp2files(full_file_name_resp, json_filtered_response)\n\n # Print difference in console\n print_diff(full_file_name_ref, full_file_name_resp)\n\n raise", "def print_response(response):\n print(f\"Response for {url}\")\n if response.status_code == 200:\n # Green text\n print(f\"\\033[1;32;40m {response.status_code} {response.reason}\\033[1;37;40m\")\n else:\n # Red text\n print(f\"\\033[1;31;40m {response.status_code} {response.reason}\\033[1;37;40m\")\n # print(response.json())\n print(f\" {response.elapsed.total_seconds()} seconds elapsed.\")", "def print_diff(ip, common, diff1, diff2):\n logging.info('IP: %s', ip)\n if common:\n common = [' {0}'.format(elem) for elem in common]\n logging.info('\\n'.join(common))\n if diff1:\n diff = ['+ {0}'.format(elem) for elem in diff1]\n logging.info('\\n'.join(diff))\n if diff2:\n diff = ['- {0}'.format(elem) for elem in diff2]\n logging.info('\\n'.join(diff))", "def print_response(response):\n print(response)\n print(\"-\"*30)", "def showref_output(self, *arguments, **kwargs):\n return self.get_output('show-ref', *arguments, **kwargs)", "def print_request_response(request_response: json):\n print(\"Printing response:\")\n print(json.dumps(request_response, indent=4))", "def test_get_request_output(self):\n pass", "def view_full_response(line):\n reqs = yield load_reqlist(line)\n for req in reqs:\n if req.response:\n if len(reqs) > 1:\n print '-'*15 + (' %s ' % req.reqid) + '-'*15\n view_full_message(req.response)\n else:\n print \"Request %s does not have a response\" % req.reqid", "def run_diagnostics(self):\n request = {\n 'jsonrpc': '2.0',\n 'id': 0,\n 'method': 'ping'\n }\n result = CurlTestBase.send_request('&diag=1', request)\n response = '<html><body><pre>'\n response += cgi.escape(result.content)\n response += '</pre></body></html>'\n self.response.out.write(response)", "def debug_html(label, response):\n\n print(\"\\n\\n\\n\", \"*********\", label, \"\\n\")\n print(response.data.decode('utf8'))\n print(\"\\n\\n\")", "def dump_request_and_response(response: Response) -> str:\n return _dump_request(response.request) + _dump_response(response)", "def get_raw_diff(self, review):\r\n return self.http_request('/r/%s/diff/raw/' % review, {})", "def view_response_bytes(line):\n reqs = yield load_reqlist(line)\n for req in reqs:\n if req.response:\n if len(reqs) > 1:\n print '-'*15 + (' %s ' % req.reqid) + '-'*15\n print req.response.full_message\n else:\n print \"Request %s does not have a response\" % req.reqid", "def printable_reponse(self):\n resp = self.response\n msg = \"-- Reponse : {} -- \\r\\n\".format(resp.status_code)\n msg += \"Headers: {} \\r\\n\".format(str(resp.headers))\n msg += \"Body: {} \\r\\n\\r\\n\".format(str(resp.content))\n return msg", "def print_response(response):\n\n lines = response.split(\"\\n\")\n for line in lines:\n print line.strip()", "def __repr__(self):\n return pprint.saferepr(self.redirects)", "def annotate_diff(desc, stdout_e, stdout_a, stdout_e_strp, stdout_a_strp):\n id_str= \"%s_\" % desc\n result[id_str + \"stdout_expected\"] = stdout_e\n result[id_str + \"stdout_actual\"] = stdout_a\n result[id_str + \"stdout_expected_stripped\"]= stdout_e_strp\n result[id_str + \"stdout_actual_stripped\"] = stdout_a_strp\n result[id_str + \"stripped_diff\"] = '\\n'.join( difflib.ndiff( stdout_e_strp.splitlines(),\n stdout_a_strp.splitlines() ))\n result.fail(\"Expected standard output from %s does not match actual output.\" % desc)", "def print_diff(diff, out):\n for interface_name, interface in diff.iteritems():\n change_color_by_tag(interface)\n out.change_color('YELLOW')\n print '[[{Interface}]]'.format(Interface=interface_name)\n for member_name, member in interface.iteritems():\n if member_name == 'ExtAttributes':\n out.reset_color()\n print 'ExtAttributes'\n print_extattribute(member)\n elif member_name == 'Consts':\n out.reset_color()\n print ' Consts'\n print_const(member)\n elif member_name == 'Attributes':\n out.reset_color()\n print ' Attributes'\n print_attribute(member)\n elif member_name == 'Operations':\n out.reset_color()\n print ' Operations'\n print_operation(member)", "def PrintDiffs(message, lhs, rhs):\n dif = set(lhs).difference(rhs)\n if dif:\n print message, ', '.join(dif)", "def print_query_response(response):\n if response.text is not None:\n print(json.loads(response.text))\n else:\n logger.warning('Response not valid.')", "def get_git_diff_stdout() -> str:\n proc = subprocess.run(\n [\"git\", \"diff\", \"origin/main\", \"HEAD\"],\n capture_output=True,\n check=True,\n text=True,\n )\n return proc.stdout", "def print_results(request, response, procedure_name) -> None:\n procedure_names_dict = {\n 'SquareRoot': calculator_pb2_grpc.CalculatorServicer.SquareRoot.__name__,\n 'Square': calculator_pb2_grpc.CalculatorServicer.Square.__name__,\n }\n print_string = f\"Request: {procedure_names_dict[procedure_name]} for {request.value}.\\nResponse: {response.value}.\\n\"\n print(print_string)", "def test_difference(self, client):\n\n expected = {\n 'a': [0,2,4,6,8],\n 'b': [4,6,8,10,12,14,16],\n 'result': [0, 2]\n }\n\n res = client.post('/api/v1/difference', json={'a': expected['a'], 'b': expected['b'] })\n assert res.status_code == 200\n assert res.json['data'] == expected['result']\n assert res.json['status'] == 2000", "def _print_status(self):", "def show_refs(config, args):\n for item in lib.input_json_lines():\n yield config.repo.ref(item)", "def view_response_headers(line):\n reqs = yield load_reqlist(line)\n for req in reqs:\n if req.response:\n if len(reqs) > 1:\n print '-'*15 + (' %s ' % req.reqid) + '-'*15\n view_full_message(req.response, True)\n else:\n print \"Request %s does not have a response\" % req.reqid", "def disp_resp(self, resp, content=False):\n for field in dir(resp):\n if not content and field=='content': continue\n if field.startswith('_'): continue\n try:\n print '%s = %s\\n\\n' % (field,resp.__getattribute__(field))\n except (SyntaxError,KeyError):\n pass", "def compare():\n body: t.Any = request.json\n check_error({'input': {'old': {}, 'new': {}}}, body)\n response_new = rpc_search({'input': body['input']['new']})\n response_old = rpc_search({'input': body['input']['old']})\n\n modules_new = response_new['yang-catalog:modules']['module']\n modules_old = response_old['yang-catalog:modules']['module']\n\n if len(modules_new) == 0 or len(modules_old) == 0:\n abort(404, description='No hits found either in old or new input')\n\n new_mods = []\n for mod_new in modules_new:\n new_rev = mod_new['revision']\n new_name = mod_new['name']\n found = False\n new_rev_found = False\n for mod_old in modules_old:\n old_rev = mod_old['revision']\n old_name = mod_old['name']\n if new_name == old_name and new_rev == old_rev:\n found = True\n break\n if new_name == old_name and new_rev != old_rev:\n new_rev_found = True\n if not found:\n mod_new['reason-to-show'] = 'New module'\n new_mods.append(mod_new)\n if new_rev_found:\n mod_new['reason-to-show'] = 'Different revision'\n new_mods.append(mod_new)\n if len(new_mods) == 0:\n abort(404, description='No new modules or modules with different revisions found')\n output = {'output': new_mods}\n return output", "def pytest_assertrepr_compare(op: str, left: Any, right: Any) -> List[str]: # noqa: U100\n output = [\"Compare Result:\"]\n\n for line in list(dictdiffer.diff(left, right)):\n output.extend(pp.pformat(line).split(\"\\n\"))\n\n return output", "def print_response(response):\n #fyi this is not my code, i grabbed it from github\n #forgot to copy the url though\n for report in response.get('reports', []):\n columnHeader = report.get('columnHeader', {})\n dimensionHeaders = columnHeader.get('dimensions', [])\n metricHeaders = columnHeader.get('metricHeader', {}).get('metricHeaderEntries', [])\n\n for row in report.get('data', {}).get('rows', []):\n dimensions = row.get('dimensions', [])\n dateRangeValues = row.get('metrics', [])\n\n for header, dimension in zip(dimensionHeaders, dimensions):\n print header + ': ' + dimension\n\n for i, values in enumerate(dateRangeValues):\n print 'Date range: ' + str(i)\n for metricHeader, value in zip(metricHeaders, values.get('values')):\n print metricHeader.get('name') + ': ' + value", "def format_response_for_display(self, response, case):\n out_bits = []\n parsed = self.parse_response(response, case)\n\n request = parsed['request']\n out_bits.append(request['request_line'])\n for header, value in request['headers'].items():\n out_bits.append('%s: %s' % (header, value))\n if request['body']:\n out_bits.extend(('', request['body']))\n\n out_bits.extend([''] * 2)\n\n response = parsed['response']\n out_bits.append(response['response_line'])\n for header, value in response['headers'].items():\n out_bits.append('%s: %s' % (header, value))\n if response['body']:\n out_bits.extend(('', response['body']))\n\n return '\\n'.join(out_bits)", "def output(self, response: str):\n\n # Try to output through the prefered medium, but revert to\n # backup if need to and log any errors found, for example:\n # logging.error(\"Problem!\")\n\n IO.stdout(response)", "def pretty_print_GET(req):\n print('{}\\n{}\\n{}\\n\\n{}'.format(\n '-----------START-----------',\n req.method + ' ' + req.url,\n '\\n'.join('{}: {}'.format(k, v) for k, v in req.headers.items()),\n req.body,\n ))", "def do_show(self, arg):\n obj = self.verify(arg, 1)\n if obj:\n print(obj)", "def test_update_refruns_info(self):\n run_number = 321123\n mixer.blend(\n RunReconstruction,\n reconstruction=RunReconstruction.EXPRESS,\n run=mixer.blend(OmsRun, run_number=run_number),\n is_reference=True,\n )\n req = RequestFactory().get(reverse(\"addrefrun:update_refruns_info\"))\n req.user = mixer.blend(User, user_privilege=User.SHIFTLEADER)\n resp = views.update_refruns_info(req)\n\n # Response is a JSON with a \"success\" key\n data = json.loads(resp.content.decode(\"utf-8\"))\n assert data[\"success\"] is True", "def pretty_print_response(line):\n args = shlex.split(line)\n if len(args) < 2:\n raise PappyException(\"Usage: pretty_print_request <format> <reqid(s)>\")\n reqids = args[1]\n\n reqs = yield load_reqlist(reqids)\n for req in reqs:\n if req.response:\n pretty_print_body(args[0], req.response.body)\n else:\n print 'No response associated with request %s' % req.reqid", "def print_standout(info):\n sys.stdout.write(\"Info: %s\" % info)\n sys.stdout.write(\"\\n\")\n sys.stdout.flush()", "def result_printer(self):\n for i in self.output:\n for item, value in i.items():\n if not isinstance(value, list) and \"http://\" not in value:\n print(f\"{item} : {value}\")\n print(20 * '-')", "def print_out():\n pass", "def print_response(responses):\n connection_error, empty_results = False, False\n\n # the \"global\" keyword tells python that these variables are defined\n # *outside* our print_response() function\n global query_type\n global verbose\n # you should mostly avoid global variables, but they are sometimes handy\n\n # First, check if we have gotten any errors when connecting to the api\n # enumerate() returns each item in a list along with the item's index\n for index, response in enumerate(responses):\n # an http status code is a number sent from the web server\n # everyone knows the dreaded \"404\" (not found)\n # there is also 200 (ok), 503 (service unavailable), 418 (i'm a teapot -- not joking!)\n # and dozens of others\n if response.status_code != requests.codes.OK:\n connection_error = True\n del responses[index]\n # we also check if the response is empty\n # (that means the api found no words matching our query)\n elif response.json() == []:\n empty_results = True\n del responses[index]\n\n # this is because Windows doesn't understand ANSI color codes >:(\n # e.g. \\033[0;36m means \"turn the text after me blue\" -- but windows is like \"??\"\n # so the colorama library translates the ANSI codes\n colorama_init()\n\n if responses == [] and connection_error == True:\n print(\"\\033[0;36mUnable to reach API.\\033[0m Check your internet connection or try again with more feeling.\")\n sys.exit(1)\n elif responses == [] and empty_results == True:\n # if the user has the BSD 'fortune' program installed, use it\n try:\n fortune = call(['fortune','-s'])\n except FileNotFoundError:\n # otherwise, get a fortune from the web\n fortune = fortune_cookie()\n if fortune:\n print(\"\\033[0;36mNo results found!\\033[0m Have a fortune cookie:\")\n print(fortune)\n else:\n print(\"\\033[0;36mNo results found!\\033[0m Try a paper dictionary instead?\")\n sys.exit(1)\n\n # quick note about JSON before we dive in further\n # json is a method of representing abitrarily complex objects\n # it comes from javascript (JavaScript Object Notation)\n # like most js stuff it is excellently useful and a touch unholy\n # together with xml, yaml, and cvs, it is the commonest way of\n # making text data machine-readable\n # to help you understand, here are some examples of json objects\n #\n # [ {'type': 'noun', 'definition': 'available money; cash.', 'example': None},\n # {'type': 'adjective', 'definition': 'willing or eager to do something.',\n # 'example': 'she is ready to die for her political convictions'} ]\n # a list containing two dictionaries\n # each dictionary contains keys of 'type', 'definition', and 'example'\n #\n # [ {'word': 'ready', 'score': 2147483647, 'tags': ['query'],\n # 'defs': ['n\\tpoised for action', 'v\\tprepare for eating by applying heat'] } ]\n # a list containing one dictionary with keys 'word','score','tags', and 'defs'\n # notice that the value of 'tags' and 'defs' are both lists!\n #\n # [ {'word': 'devil', 'score': 2147483647,\n # 'tags': ['query', 'pron:D EH1 V AH0 L ', 'ipa_pron:dˈɛvʌɫ'] } ]\n # a list containing one dictionary with keys 'word', 'score', and 'tags'\n #\n # [ {'word': 'coleslaw', 'score': 26424, 'tags': ['n']},\n # {'word': 'dressing', 'score': 26424, 'tags': ['n']},\n # {'word': 'greens', 'score': 26424, 'tags': ['n'] } ]\n # you can read this one by yourself :)\n\n if query_type == \"DEF\":\n for response in responses:\n # print out helpful info if the user asked for it\n if verbose > 1: print(response.url) # What we asked the remote server\n if verbose > 2: print(response.text) # The raw return JSON\n # check if this is the datamuse API or the owlbot API\n if re.search(r'datamuse',response.url):\n api = \"datamuse\"\n # the json() function turns the raw response (bytes of data)\n # into python lists, dictionaries, etc (like demonstrated above)\n # we take the first item in the list [0] because a dictionary query\n # only has one entry (the word and its definition)\n payload = response.json()[0]\n word = payload[\"word\"]\n # since 'defs' is a list, let's join it together into a string for printing\n definition = '\\n'.join(payload['defs'])\n lines = []\n for entry in payload['defs']:\n # get the word type and its definition out of the string\n # yes, you can have two (or more!) return values from a function in python\n # groups() returns a tuple of all the capture groups in the regex (see below)\n # notice that _def not def (b/c def is a keyword)\n type,_def = re.match(r'([^\\\\]*)\\t(.*)',entry).groups()\n # put the type and def back into a string :)\n # ljust(11) is left justify by 11 spaces (neat formatted columns!)\n line = f\"{type.ljust(11)} {_def}\"\n # put that line into a list\n lines.append(line)\n # go back up and get another ^\n # now join all the lines together with a new line character (\\n) between them\n definition = '\\n'.join(lines)\n # regex explained: ([^\\\\]*)\\t(.*)\n # () capturing group -- what we find in here, we keep, lol\n # [] character set -- match any of the characters in here\n # [^ ] negation -- do not match any of the characters in here\n # \\\\ *one* literal backslash -- b/c \\ is special in regex \\\\ means \\\n # * the previous thing, zero or more times\n # \\t literal tab character\n # . any character at all ever -- even ones you weren't thinking about when you typed it :D\n # all together: anything which is not a \\, followed by a \\t, followed by anything\n # capture the first bit (type), forget the \\t, caputre the second bit (_def)\n else:\n api = \"owlbot\"\n payload = response.json()\n word = re.search(r'dictionary/(.*)$',response.url).groups()[0]\n # regex explained: $ means \"end of the line\"\n # it's not a character like \\n or \\r\n # it is an anchor (^ means \"start of the line\")\n lines = []\n for entry in payload:\n line = f\"{entry['type'].ljust(11)} {entry['definition']}\"\n # ' ' * 12 means insert 12 spaces\n if entry['example']: line += f\"\\n{' ' * 12}Example:{entry['example']}\"\n lines.append(line)\n definition = '\\n'.join(lines)\n # lots of work, but now we print it! \\o/\n print(f\"\\033[0;36m{api}\\033[0m says word \\033[0;32m{word}\\033[0m means\")\n print(definition)\n if query_type == \"PRO\":\n # print out helpful info if the user asked for it\n if verbose > 1: print(\"The answer came from: \",responses[0].url)\n if verbose > 2: print(\"The raw JSON response was: \",responses[0].text)\n # no for loop and only one response (responses[0])\n # (b/c we use only one API for everything except dictionary lookups)\n payload = responses[0].json()[0]\n word = payload[\"word\"]\n for tag in payload['tags']:\n if re.match(r'pron:',tag):\n pron = re.match(r'pron:(.*)',tag).groups()[0]\n elif re.match(r'ipa_pron:',tag):\n ipa = re.match(r'ipa_pron:(.*)',tag).groups()[0]\n pronunciation = f\"\\033[0;32m{pron}\\033[0m (\\033[0;32m{ipa}\\033[0m)\"\n print(f\"\\033[0;36mdatamuse\\033[0m says word \\033[0;32m{word}\\033[0m is pronounced like {pronunciation}\")\n else:\n # print out helpful info if the user asked for it\n if verbose > 1: print(\"The answer came from: \",responses[0].url)\n if verbose > 2: print(\"The raw JSON response was: \",responses[0].text)\n payload = responses[0].json()\n # this will be fun to explain but. . .\n # 1. go through each entry. if it has tags (a list), turn the list into a string\n for entry in payload:\n entry['tags'] = ', '.join(entry['tags']) if 'tags' in entry else ''\n # 2. create a function which takes one argument (entry -- a dictionary)\n # and returns a formatted string with justification and coloring\n fentry = lambda entry: (f\"\\033[0;32m{entry['word'].rjust(13)}\\033[0m \"\n f\"\\033[0;36m{entry['tags'].rjust(13)}\\033[0m \")\n # 3. for each entry in the payload list, run fentry(entry)*\n # (all the entries are now formatted as strings)\n entries = list(map(fentry, payload))\n # 4. starting at 0, go up to len(entries)-1 in steps of 3 (0,3,6,9. . .)\n # for each step *i*, take a slice of entries from i to i+3\n # join them together\n # this creates a single string containing three list entries\n # store all the strings in a list in the variable lines\n lines = (''.join(entries[i:i+3]) for i in range(0,len(entries),3))\n print(\"\\033[0;36mdatamuse thinks these words may help!\\033[0m\".rjust(94))\n # 5. join the lines together with \\n in between each\n print('\\n'.join(lines))\n\n # * extra note here about map()\n # since you are interested in data stuff :3\n # there's two very common data operations\n # one is \"for every datum, do something to it\"\n # another is \"keep some data, get rid of others\"\n # the first is usually called map\n # the second is called filter\n # python has functions for both of them (helpfully called map() and fliter(), tada!)\n # both take two arguments: a function and a list (or tuple or dictionary)\n # eg. filter(my_function,my_list)\n # with map, the function should take one argument, transform it, and return it\n # eg. def my_function(x):\n # return x + 3\n # (or my_function = lambda x: x + 3)\n # that function adds three but you can do any kind of (very complex) transforms\n # with filter, the function should take one argument, return true if it should be kept,\n # or false if not\n # eg. def my_function(x):\n # if x > 34.99: return True\n # else: return False\n # (or my_function = lambda x: True if x > 34.99 else False )\n #\n # the tricky bit is that neither map() or filter() return your data (huh?)\n # they return iterators\n # what's an iterator, sam?\n # an iterator is like a soda vending machine\n # it has all the cans of pop inside,\n # but you stick your quarters in and get them out one by one\n # for example:\n # >>> lst = [1,2,3,4,5]\n # >>> map(lambda x: x + 3, lst)\n # <map object at 0x7f1c78673b38> <-- this is the iterator\n # . . . and here's the loop that \"iterates\" over it:\n # >>> for item_plus_three in map(lambda x: x + 3, lst):\n # ... print(item_plus_three)\n # ... \n # 4\n # 5\n # 6\n # 7\n # 8", "def PrintResponse(batch_job_helper, response_xml):\n response = batch_job_helper.ParseResponse(response_xml)\n\n if 'rval' in response['mutateResponse']:\n for data in response['mutateResponse']['rval']:\n if 'errorList' in data:\n print 'Operation %s - FAILURE:' % data['index']\n print '\\terrorType=%s' % data['errorList']['errors']['ApiError.Type']\n print '\\ttrigger=%s' % data['errorList']['errors']['trigger']\n print '\\terrorString=%s' % data['errorList']['errors']['errorString']\n print '\\tfieldPath=%s' % data['errorList']['errors']['fieldPath']\n print '\\treason=%s' % data['errorList']['errors']['reason']\n if 'result' in data:\n print 'Operation %s - SUCCESS.' % data['index']", "def pretty_print_query_response(response):\n if response.text is not None:\n pprint.pprint(json.loads(response.text))\n else:\n logger.warning('Response not valid.')", "def print_comparison(name, dates, times, orig_data, comp_data):\n\n # Output comparison of data\n print(' ORIGINAL COMPUTED')\n print(f' DATE TIME {name.upper():>9} {name.upper():>9} DIFFERENCE')\n print('------- ------ --------- --------- ----------')\n zip_data = zip(dates, times, orig_data, comp_data)\n for date, time, orig, comp in zip_data:\n diff = orig - comp\n print(f'{date} {time:>6} {orig:9.6f} {comp:9.6f} {diff:10.6f}')", "def print_unidiff(self):\n\n color_stdout(\"\\nTest failed! Result content mismatch:\\n\", schema='error')\n with open(self.result, \"r\") as result:\n with open(self.reject, \"r\") as reject:\n result_time = time.ctime(os.stat(self.result).st_mtime)\n reject_time = time.ctime(os.stat(self.reject).st_mtime)\n diff = difflib.unified_diff(result.readlines(),\n reject.readlines(),\n self.result,\n self.reject,\n result_time,\n reject_time)\n\n color_stdout.writeout_unidiff(diff)", "def debug():\r\n global CurrentState\r\n global CurrentInput\r\n global RESPONSEOPTIONS\r\n print(\"___________________________\")\r\n for state in RESPONSEOPTIONS:\r\n score = calcTotalScore(state, CurrentInput, CurrentState)\r\n print(state.id + \": \" + str(score) + \" ,\", end=\"\")\r\n print(\"\\n___________________________\")", "def test_str_method(self):\n r = Review()\n try:\n stdout, sys.stdout = sys.stdout, StringIO()\n print(r)\n output = sys.stdout.getvalue().strip()\n expect = '[{}] ({}) {}'.format(type(r).__name__, r.id, r.__dict__)\n assert output == expect\n finally:\n sys.stdout = stdout", "def print_output(self):\n print(\"Reference score: \" + str(self.PotTax_reference.sum().TFI))\n print(\"Intervention score: \" + str(self.PotTax_intervention.sum().TFI))\n return", "def print_response(prompt, response, sep=' '):\n print(bold(prompt), end=sep)\n print(response)", "def log_bad_request_details(r):\n logger.info(\"Response status code: \" + str(r.status_code))\n logger.info(\"Response Details: \" + json.dumps(r.json()))\n logger.info(\"History: \" + str(r.history))\n logger.info(\"Cookies: \" + str(requests.utils.dict_from_cookiejar(r.cookies)))\n logger.info(\"URL: \" + str(r.url))\n logger.info(\"Links: \" + str(r.links))", "def verbose(self, block: Block):\n print('\\n\\n==============================')\n print('Hash:\\t\\t', block.hash.hexdigest())\n print('Previous Hash:\\t', block.previous_hash.hexdigest())\n print('Nounce:\\t\\t', block.nonce)\n print('Data:\\t\\t', block.data)\n print('\\n\\n==============================')", "def verify_response(self, system_name, expected_api_response,\n expected_response_type, comparison_mode,\n request_id=None, generate_output_diff_file=\"Yes\"):\n arguments = {'system_name': system_name,\n 'expected_api_response': expected_api_response,\n 'expected_response_type': expected_response_type,\n 'comparison_mode': comparison_mode,\n 'request_id': request_id,\n 'generate_output_diff_file': generate_output_diff_file}\n wdesc = \"Verify API response with the expected API response\"\n pNote(wdesc)\n output_file = self.logsdir+\"/difference_output.log\"\n output_file = Utils.file_Utils.addTimeDate(output_file)\n generate_output_diff_file = Utils.rest_Utils.\\\n resolve_value_of_verify(generate_output_diff_file)\n\n try:\n arguments[\"expected_api_response\"] = Utils.rest_Utils.\\\n check_ext_get_abspath(arguments[\"expected_api_response\"],\n self.tc_path)\n\n credentials = Utils.data_Utils.\\\n get_user_specified_tag_values_in_tc(self.datafile, **arguments)\n\n credentials[\"expected_api_response\"] = Utils.rest_Utils.\\\n check_ext_get_abspath(credentials[\"expected_api_response\"],\n os.path.dirname(self.datafile))\n\n if request_id:\n response = Utils.data_Utils.get_object_from_datarepository(\n \"{0}_{1}_api_response_object\".format(system_name,\n credentials['request_id']))\n else:\n response = Utils.data_Utils.get_object_from_datarepository(\n \"{0}_api_response_object\".format(system_name))\n except Exception as exception:\n pNote(exception, \"error\")\n return False\n if any([x in credentials[\"comparison_mode\"] for x in [\"xpath=\", \"jsonpath=\", \"regex=\"]]) \\\n or credentials[\"comparison_mode\"] == \"\":\n status = self.rest_object.cmp_content_response(self.datafile, system_name, response,\n credentials['expected_api_response'],\n credentials['expected_response_type'],\n credentials['comparison_mode'])\n else:\n status = self.rest_object.cmp_response(response,\n credentials['expected_api_response'],\n credentials['expected_response_type'],\n output_file,\n credentials['generate_output_diff_file'])\n return status", "def test_show(self):\n _help = \"[Usage: show <class name> <id>] or \"\\\n \"[Usage: <class name>.show(<id>)]\\n\"\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"help show\")\n self.assertEqual(f.getvalue(), _help)", "async def difference(self, ctx, *, query):\n if not query:\n query = \"X Y\"\n urlquery = '+'.join(query.split())\n emb = hf.green_embed(f\"A lot of 'what is the difference between X and Y' kinds of questions can be answered \"\n f\"by typing something like the following into google (click the links to see the \"\n f\"search results):\\n\\n\"\n f\"['Japanese {query} difference']\"\n f\"(https://www.google.com/search?q=japanese+{urlquery}+difference)\\n\"\n f\"['{query} difference']\"\n f\"(https://www.google.com/search?q={urlquery}+difference)\\n\"\n f\"['{query} 違い']\"\n f\"(https://www.google.com/search?q={urlquery}+違い)\\n\"\n )\n await hf.safe_send(ctx, embed=emb)", "def compare_output(file1, file2):\n output = subprocess.getoutput(f\"diff -u -b {file1} {file2} | sed -n '12d;/^[-+]/p'\")\n\n if not output.strip():\n name = file1.rsplit('/', 1)[-1]\n print('Equivalent:', name)\n else:\n print(output)", "def test_get_details(self):\n\t\tactual_details = self.watcher.analyze(layers=[self.second_layer])\n\t\texpected_details = self.watcher.get_details()\n\t\t\n\t\tself.assertEqual(len(actual_details), len(expected_details), \"actual and expected details differ\")", "def printable_request(self):\n req = self.response.request\n msg = \"-- Request : {} | {} -- \\r\\n\".format(req.method, req.url)\n msg += \"Headers: {} \\r\\n\".format(str(req.headers))\n msg += \"Body: {} \\r\\n\\r\\n\".format(str(req.body))\n return msg", "def test_review(self):\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"create Review\")\n id = f.getvalue().strip()\n self.assertTrue(type(f), str)\n self.assertEqual(len(id), 36)\n\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"update Review \" + str(id) + \" name Manga\")\n self.assertTrue(type(f), str)\n self.assertEqual(f.getvalue().strip(), \"\")\n\n with patch('sys.stdout', new=StringIO()) as f:\n HBNBCommand().onecmd(\"show Review \" + str(id))\n self.assertTrue(\"name\" in f.getvalue().strip())\n self.assertTrue(\"Manga\" in f.getvalue().strip())", "def diffstat(self):\r\n url = '{0}/diffstat'.format(self.get_url())\r\n request = http.Request('GET', url)\r\n\r\n return request, parsers.parse_json", "def printLastReqStats(self):\n print(\"Tokens used by the request: \" + str(self.getLastHeader(\"req-tokens\")))\n print(\"Performed action: \" + str(self.getLastHeader(\"req-action\")))\n print(\"Was archive used for the query: \" + (self.getLastHeader(\"req-archive\") == \"1\" and \"Yes\" or \"No\"))", "def dump_response(line):\n # dump the data of a response\n args = shlex.split(line)\n reqs = yield load_reqlist(args[0])\n for req in reqs:\n if req.response:\n rsp = req.response\n if len(args) >= 2:\n fname = args[1]\n else:\n fname = req.path.split('/')[-1]\n\n with open(fname, 'w') as f:\n f.write(rsp.body)\n print 'Response data written to %s' % fname\n else:\n print 'Request %s does not have a response' % req.reqid", "def cmd_get_diff(base, target):\n return ['git', 'diff', base, target]", "def print_request(r):\n def fmt_token(t):\n return t['shape'] + t['after']\n\n print('Subject: ' + ''.join(map(fmt_token, filter(\n lambda x: x['where'] == 'subject', r['tokens']))))\n print(''.join(map(fmt_token, filter(\n lambda x: x['where'] == 'body', r['tokens']))))", "def print_comparison(name, dates, times, original_data, computed_data):\n \n # Output comparison of data\n print(' ORIGINAL COMPUTED')\n print(f' DATE TIME {name.upper():>9} {name.upper():>9} DIFFERENCE')\n print('------- ------ --------- --------- ----------')\n zip_data = zip(dates, times, original_data, computed_data)\n for date, time, orig, comp in zip_data:\n diff = orig - comp\n print(f'{date} {time:>6} {orig:9.6f} {comp:9.6f} {diff:10.6f}')", "def print_collisions(self):", "def _handle_info_response(self, resp, info, prev_info):\r\n if info.line_num != prev_info.line_num:\r\n return\r\n\r\n if resp['calltip']:\r\n info.editor.show_calltip('Arguments', resp['calltip'],\r\n signature=True,\r\n at_position=prev_info.position)\r\n\r\n if resp['name']:\r\n self.send_to_inspector.emit(\r\n resp['name'], resp['argspec'],\r\n resp['note'], resp['docstring'],\r\n not prev_info.auto)", "def do_diff_report():\n diff_report = render_diff_report()\n nori.core.email_loggers['report'].info(\n diff_report + '\\n\\n\\n' + ('#' * 76)\n )\n # use the output logger for the report files (for now)\n nori.core.output_logger.info('\\n\\n' + diff_report + '\\n\\n')", "def DumpDiff(blocks, line1, line2):\n for offset1, offset2, size in blocks:\n print offset1, offset2, size\n print offset1, size, \": \", line1[offset1:offset1+size]\n print offset2, size, \": \", line2[offset2:offset2+size]", "def print_help():\n print(\"Archive generated report to a web server. e.g.\")\n print(\"rm -rf /cs-shared/contrail_code_coverage/test_coverage\")\n print(\"cp -a build/coverage/controller/test_coverage \" +\n \"/cs-shared/contrail_code_coverage/\")\n print(\"http://10.84.5.100/cs-shared/contrail_code_coverage/test_coverage\")", "def get_traceroute_output(self):\n url = self.source['url']\n if 'post_data' in self.source:\n context = self.source['post_data']\n else:\n context = None\n status_code, content = self.urlopen(url, context=context)\n content = content.strip()\n regex = r'<pre.*?>(?P<traceroute>.*?)</pre>'\n pattern = re.compile(regex, re.DOTALL | re.IGNORECASE)\n try:\n traceroute = re.findall(pattern, content)[0].strip()\n except IndexError:\n # Manually append closing </pre> for partially downloaded page\n content = \"{0}</pre>\".format(content)\n traceroute = re.findall(pattern, content)[0].strip()\n return (status_code, traceroute)", "def response(self, data, response_type = \"terminal\"):\n if (response_type == \"terminal\"):\n print(data, end=\"\\n\")", "def output_debug_info(self):", "def print_requests(requests):\n\n if not _debug: return\n keys = get_sorted_keys(requests)\n\n print \"\\nIn Memory Structure:\"\n print \"{\"\n for key in keys:\n\tprint \" %s:[\" % (key)\n for request in requests[key]:\n\t\tprint \" (%s, %s),\" % (key, request.url)\n\tprint \" ]\"\n print \"}\\n\"", "def GetChangesSample():\n client = CreateClient()\n changes = client.GetChanges()\n for change in changes.entry:\n print change.title.text, change.changestamp.value", "def hit_details(hit_id, sandbox, recruiter):\n prolific_check(recruiter, sandbox)\n rec = by_name(recruiter, skip_config_validation=True)\n details = rec.hit_details(hit_id, sandbox)\n print(json.dumps(details, indent=4, default=str))", "def test_update_extra(self):\n output = StringIO()\n sys.stdout = output\n r1 = Rectangle(10, 10, 10, 10)\n r1.update(89, 2, 3, 4, 5, 6, 7)\n print(r1)\n sys.stdout = sys.__stdout__\n assert output.getvalue() == \"[Rectangle] (89) 4/5 - 2/3\\n\"", "def _unidiff_output(expected, actual):\n\n expected=expected.splitlines(1)\n actual=actual.splitlines(1)\n\n diff=difflib.unified_diff(expected, actual)\n\n return ''.join(diff)", "def explain_status(response):\n verbose = STATUS_LIST[response['code']]\n response['verbose'] = verbose\n return response", "def showref_rc(self, *arguments, **kwargs):\n return self.get_returncode('show-ref', *arguments, **kwargs)", "def w_print_diff(self, message, d1, d2, expectedResult=None):\n print(\"Message: '%s'\" % message)\n print(\"Message length: %d\" % len(message))\n if expectedResult:\n print(\"%-48s (expected)\" % self._format(expectedResult))\n print(\"%-48s (Std. lib. MD5)\" % self._format_hex(d1))\n print(\"%-48s (Pure Python MD5)\" % self._format_hex(d2))\n print()", "def test_get_results_verbose(self):\n\t\tpass", "def detailed_refcounts(self, rc, prev):\r\n print (\" sum detail refcount=%-8d\"\r\n \" sys refcount=%-8d\"\r\n \" change=%-6d\"\r\n % (self.n, rc, rc - prev))\r\n self.output()", "def format_response_for_docs(self, response, case):\n parsed = self.parse_response(response, case)\n formatted = {\n 'request': {'body': parsed['request']['body']},\n 'response': {'body': parsed['response']['body']}\n }\n\n request = parsed['request']\n headers = request['request_line']\n if request['headers']:\n headers += '\\n' + '\\n'.join(\n '%s: %s' % pair for pair in request['headers'].items())\n formatted['request']['headers'] = headers\n\n response = parsed['response']\n headers = response['response_line']\n if response['headers']:\n headers += '\\n' + '\\n'.join(\n '%s: %s' % pair for pair in response['headers'].items())\n formatted['response']['headers'] = headers\n\n for phase in ('request', 'response'):\n for part in ('headers', 'body'):\n if (formatted[phase][part] and\n not formatted[phase][part].endswith('\\n')):\n formatted[phase][part] += '\\n'\n\n return formatted", "def PrintReport(self):\n print('=== Summary of Baidu Real-time Bidding test ===')\n print('Requests sent: %d' % self._requests_sent)\n print('Responses with a 200/OK HTTP response code: %d' % self._responses_ok)\n print('Responses with a non-200 HTTP response code: %d' % len(self._error))\n print('Good responses (no problems found): %d' % len(self._good))\n print('Invalid (unparseable) with a 200/OK HTTP response code: %d' % len( self._invalid))\n print('Parseable responses with problems: %d' % len(self._problematic))\n if self._responses_successful_without_bids == self._requests_sent:\n print('ERROR: None of the responses had bids!')", "def test_get_label_with_diffs(self) -> None:\n review_request = self.create_review_request(create_repository=True)\n self.create_diffset(review_request)\n\n self.request.user = review_request.submitter\n\n self.assertEqual(\n self.action.get_label(context=Context({\n 'review_request': review_request,\n 'request': self.request,\n })),\n 'Update Diff')", "def testSimpleEchoMethodReturnsVersion(self):\n body = dumps({'id': 100, 'jsonrpc': '2.0', 'method': 'pass',\n 'params': [39, 'steps']})\n headers = Headers({'Content-Length': [str(len(body))],\n 'Content-Type': ['application/json']})\n request = FakeRequest(headers=headers, body=body)\n resource = TestResource(None, None)\n result = yield resource.deferred_render_POST(request)\n response = loads(result)\n self.assertEqual('2.0', response['jsonrpc'])", "def print_results(self):\n pass", "def testShowHistory(self):\n self.maxDiff = None\n de_object = de.DockerExplorerTool()\n de_object._explorer = self.explorer_object\n # We pick one of the container IDs.\n container_id = container.GetAllContainersIDs(self.docker_directory_path)[0]\n with unittest.mock.patch('sys.stdout', new=StringIO()) as fake_output:\n de_object.docker_directory = self.docker_directory_path\n de_object.ShowHistory(container_id)\n expected_string = \"\"\"{\n \"sha256:8ac48589692a53a9b8c2d1ceaa6b402665aa7fe667ba51ccc03002300856d8c7\": {\n \"created_at\": \"2018-04-05T10:41:28.876407+00:00\", \n \"container_cmd\": \"/bin/sh -c #(nop) CMD [\\\\\"sh\\\\\"]\", \n \"size\": 0\n }\n}\n\n\"\"\"\n\n self.assertEqual(expected_string, fake_output.getvalue())", "def print_test_comparison(test_name, expected, result):\n line = \"\\n\"\n line += \"-\" * 60 + \"\\n\"\n line += \"{}\\n\".format(test_name)\n line += \"-\" * 60 + \"\\n\"\n line += \"-\" * 26 + \"EXPECTED\" + \"-\" * 26 + \"\\n\"\n line += \"{}\\n\".format(expected)\n line += \"-\" * 28 + \"END\" + \"-\" * 29 + \"\\n\"\n line += \"-\" * 27 + \"RESULT\" + \"-\" * 27 + \"\\n\"\n line += \"{}\\n\".format(result)\n line += \"-\" * 28 + \"END\" + \"-\" * 29 + \"\\n\"\n line += \"\\n\"\n return line", "def list_refs(self):\n print('----\\nREFs\\n----')\n self._print_dict(self.refs)", "def print_response(response):\n for report in response.get('reports', []):\n columnHeader = report.get('columnHeader', {})\n dimensionHeaders = columnHeader.get('dimensions', [])\n metricHeaders = columnHeader.get('metricHeader', {}).get('metricHeaderEntries', [])\n rows = report.get('data', {}).get('rows', [])\n\n for row in rows:\n dimensions = row.get('dimensions', [])\n dateRangeValues = row.get('metrics', [])\n\n for header, dimension in zip(dimensionHeaders, dimensions):\n print(header + ': ' + dimension)\n\n for i, values in enumerate(dateRangeValues):\n print('Date range (' + str(i) + ')')\n for metricHeader, value in zip(metricHeaders, values.get('values')):\n\t print(metricHeader.get('name') + ': ' + value)", "def Diff(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def print_result(h, deriv, rel_error):\n\tprint(f\"For h = {h}, f'(x) = {round(deriv, 8)}\\t relative error = {round(rel_error, 8)}\")", "def display(self, request):\n l = []\n try:\n result = self.stream(l.append, request)\n if result is not None:\n return result\n return l\n except:\n return [webutil.formatFailure(failure.Failure())]", "def print_response(response):\n for report in response.get('reports', []):\n rows = report.get('data', {}).get('rows', [])\n for row in rows:\n print(row)", "def show_diff(text, n_text):\n seqm = difflib.SequenceMatcher(None, text, n_text)\n output= []\n for opcode, a0, a1, b0, b1 in seqm.get_opcodes():\n if opcode == 'equal':\n output.append(seqm.a[a0:a1])\n elif opcode == 'insert':\n output.append(colored(seqm.b[b0:b1],'red'))\n elif opcode == 'delete':\n output.append(colored(seqm.a[a0:a1],'blue'))\n elif opcode == 'replace':\n output.append(colored(seqm.b[b0:b1],'green'))\n return ''.join(output)", "async def test_can_ipython_help_double(request_format): # type: ignore[no-untyped-def]\n given = \" bruh??\\nprint('test')\\n#test?\"\n expected = ' bruh??\\nprint(\"test\")\\n# test?'\n\n response: HTTPResponse = await request_format(\n formatter=\"black\",\n code=[given],\n options={},\n )\n json_result = _check_http_code_and_schema(\n response=response,\n expected_code=200,\n expected_schema=EXPECTED_FROMAT_SCHEMA,\n )\n assert json_result[\"code\"][0][\"code\"] == expected", "def update_outputs_recon(self, new):\n self.stages[\"Diffusion\"].define_inspect_outputs()", "def _retrieve_html_diff(self, oldrevid, newrevid):\n req = requests.get(self.endpoint,\n {\n 'action': 'compare',\n 'fromrev': oldrevid,\n 'torev': newrevid,\n 'uselang': 'en',\n 'format': 'json',\n })\n req.raise_for_status()\n return req.json().get('compare', {}).get('*')", "def print_resp(self, resp: dict):\n if \"details\" in resp:\n if isinstance(resp[\"details\"], str):\n self.write_string(resp[\"details\"])\n if isinstance(resp[\"details\"], Table):\n self.write_table(resp[\"details\"])\n\n if \"data\" in resp:\n for item in resp[\"data\"]:\n if not isinstance(item, dict):\n continue\n item_type = item.get(\"type\")\n if item_type == \"string\":\n self.write_string(item[\"data\"])\n elif item_type == \"table\":\n table = Table(None)\n table.set_rows(item[\"rows\"])\n self.write_table(table)\n elif item_type == \"error\":\n self.write_error(item[\"data\"])\n elif item_type == \"dict\":\n self.write_dict(item[\"data\"])\n\n if \"details\" not in resp and \"data\" not in resp:\n self.write_string(\"Response is not correct.\")", "def show_diff(seqm):\n output= []\n for opcode, a0, a1, b0, b1 in seqm.get_opcodes():\n if opcode == 'equal':\n output.append(seqm.a[a0:a1])\n elif opcode == 'insert':\n output.append(\"{+\" + seqm.b[b0:b1] + \"+}\")\n elif opcode == 'delete':\n output.append(\"{-\" + seqm.a[a0:a1] + \"-}\")\n elif opcode == 'replace':\n output.append(\"<del>\" + seqm.a[a0:a1] + \"</del><ins>\" + seqm.b[b0:b1] + \"</ins>\")\n else:\n raise RuntimeError(\"unexpected opcode\")\n return ''.join(output)" ]
[ "0.6958349", "0.6202376", "0.6095348", "0.5904432", "0.5885077", "0.5830992", "0.5810929", "0.57152325", "0.5700688", "0.5698527", "0.56759423", "0.56628805", "0.56229484", "0.55711806", "0.55609244", "0.55433357", "0.5525367", "0.549411", "0.5481868", "0.5465276", "0.5456778", "0.5436791", "0.5427032", "0.5395528", "0.537482", "0.53715324", "0.5352058", "0.52979964", "0.529103", "0.5286781", "0.5263692", "0.5253873", "0.5238516", "0.52338076", "0.52326655", "0.5222756", "0.5216639", "0.52100646", "0.5207699", "0.520402", "0.5195409", "0.5183336", "0.51778513", "0.5169681", "0.51690775", "0.515832", "0.51549006", "0.51453733", "0.51450944", "0.5129695", "0.5121599", "0.5103884", "0.50966275", "0.50954294", "0.50865245", "0.5085674", "0.50803286", "0.5078721", "0.5065026", "0.5063244", "0.5061596", "0.50606257", "0.5055133", "0.5054318", "0.5051128", "0.5044896", "0.504332", "0.50277853", "0.50194496", "0.5017704", "0.5013008", "0.5012806", "0.5008499", "0.500785", "0.50011814", "0.4985189", "0.49839443", "0.49830765", "0.49782133", "0.49716958", "0.49682155", "0.49644604", "0.4955759", "0.49553964", "0.495345", "0.49453044", "0.493989", "0.49396065", "0.49317467", "0.4931334", "0.49308383", "0.49304804", "0.49258137", "0.49248886", "0.49244624", "0.4914471", "0.4914284", "0.4913465", "0.49082297", "0.49041563" ]
0.73701763
0
Generate a rightclick menu for the items
def context_menu(self, treeview, position): all_item = get_current_item(self,treeview,single=False) if len(all_item) == 1: item = all_item[0] list_operations = ['Print attrs','-','Plot Hist', 'Plot 2D'] action,actions = get_actions(treeview,position,list_operations) if action == actions['Print attrs']: send_dict_to_console(self,item,treeview) #print_attributes(self,item,treeview) if action == actions['Plot Hist']: plot_histogram(self,item,treeview) if action == actions['Plot 2D']: plot2d(self,item,treeview) elif len(all_item) == 2: item0,item1 = all_item list_operations = ['Plot Scatter','Plot Line'] action,actions = get_actions(treeview,position,list_operations) if action == actions['Plot Scatter']: plot1D(self,item0,item1,treeview,plot='scatter') if action == actions['Plot Line']: plot1D(self,item0,item1,treeview,plot='line')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_menus( self ):", "def create_menu():", "def addContextMenuItems(*args):", "def rightClickMenu(controlparent,control,qp,menuarray,rightclickfunction,added_arg):\r\n if menuarray == []:\r\n return 0;\r\n Rmnu = QtGui.QMenu(controlparent)\r\n for i, itm in enumerate(menuarray):\r\n newmenuitem = QtGui.QAction(itm, controlparent)\r\n controlparent.connect(newmenuitem, QtCore.SIGNAL(\"triggered()\"), lambda passarg=(itm,i,added_arg): rightclickfunction(passarg))\r\n Rmnu.addAction(newmenuitem)\r\n\r\n qp.setY(qp.y() + 0)\r\n qp.setX(qp.x() + 0)\r\n Rmnu.exec_(control.mapToGlobal(qp))\r\n del(Rmnu)", "def _createUtilityMenuItems(ned, node):\n pass", "def show_right_mouse_menu(self, pos):\n\n globalpos = self.viewport().mapToGlobal(pos)\n\n if not self.selectionModel().hasSelection():\n print(\"No selection\")\n # Build menu without selection, feed an empty list\n menu = self.build_item_menu([])\n menu.exec_(globalpos)\n return\n\n active = self.currentIndex() # index under mouse\n active = active.sibling(active.row(), 0) # get first column\n\n # move index under mouse\n indices = self.get_indices()\n if active in indices:\n indices.remove(active)\n\n indices.append(active)\n\n # Extend to the sub-items\n all_indices = self.extend_to_children(indices)\n nodes = [dict(i.data(InventoryModel.NodeRole)) for i in all_indices\n if i.parent().isValid()]\n\n if self._hierarchy_view:\n # Ensure no group node\n nodes = [n for n in nodes if not n.get(\"isGroupNode\")]\n\n menu = self.build_item_menu(nodes)\n menu.exec_(globalpos)", "def MakeCustomMenu(content): #py:MakeCustomMenu\n RUR._MakeCustomMenu_(content)", "def about_right_click(event):\n popup_menu = Menu(tearoff=0)\n popup_menu.add_command(label='Copy')\n\n popup_menu.post(event.x_root, event.y_root)", "def rightshow(self):\n self._showitemlist(ITEMLIST_RIGHT)", "def showRightClickMenu(self,pos):\n\t\tprint('bStackWidget.showRightClickMenu()')\n\t\tmenu = QtWidgets.QMenu()\n\t\t#self.menu = QtWidgets.QMenu()\n\n\t\tnumChannels = self.mySimpleStack.numChannels # number of channels in stack\n\t\tmaxNumChannels = self.mySimpleStack.maxNumChannels\n\t\t#actions = ['Channel 1', 'Channel 2', 'Channel 3', 'RGB', 'Channel 1 Mask', 'Channel 2 Mask', 'Channel 3 Mask']\n\t\tprint(' showRightClickMenu() numChannels:', numChannels, 'maxNumChannels:', maxNumChannels)\n\t\tactionsList = []\n\t\tisEnabledList = []\n\t\tisCheckedList = []\n\t\t# abb oct 2020, maybe put these back in\n\t\t'''\n\t\tfor i in range(numChannels):\n\t\t\tchanNumber = i + 1\n\t\t\tactionsList.append(f'Channel {chanNumber}')\n\t\t\tisEnabled = self.mySimpleStack.hasChannelLoaded(chanNumber)\n\t\t\tisEnabledList.append(isEnabled)\n\t\t\tisChecked = self.getStackView().displayStateDict['displayThisStack'] == chanNumber\n\t\t\tisCheckedList.append(isChecked)\n\t\t'''\n\t\tfor i in range(numChannels):\n\t\t\tchanNumber = i + 1\n\t\t\tactionsList.append(f'Channel {chanNumber} Mask')\n\t\t\tactualChanNumber = maxNumChannels + i + 1\n\t\t\tisEnabled = self.mySimpleStack.hasChannelLoaded(actualChanNumber)\n\t\t\tisEnabledList.append(isEnabled)\n\t\t\tisChecked = self.getStackView().displayStateDict['displayThisStack'] == actualChanNumber\n\t\t\tisCheckedList.append(isChecked)\n\t\t'''\n\t\tfor i in range(numChannels):\n\t\t\tchanNumber = i + 1\n\t\t\tactionsList.append(f'Channel {chanNumber} Skel')\n\t\t\tactualChanNumber = 2 * maxNumChannels + i + 1\n\t\t\tisEnabled = self.mySimpleStack.hasChannelLoaded(actualChanNumber)\n\t\t\tisEnabledList.append(isEnabled)\n\t\t\tisChecked = self.getStackView().displayStateDict['displayThisStack'] == actualChanNumber\n\t\t\tisCheckedList.append(isChecked)\n\t\t'''\n\n\t\t# abb oct 2020, maybe put this back in ???\n\t\t'''\n\t\tif numChannels>1:\n\t\t\tactionsList.append('RGB')\n\t\t\tisEnabledList.append(True)\n\t\t\tisChecked = self.getStackView().displayStateDict['displayThisStack'] == 'rgb' # lower case !!!\n\t\t\tisCheckedList.append(isChecked)\n\t\t'''\n\n\t\tfor i, actionStr in enumerate(actionsList):\n\t\t\t# make an action\n\t\t\tcurrentAction = QtWidgets.QAction(actionStr, self, checkable=True)\n\t\t\t# decide if it is checked\n\t\t\tisEnabled = isEnabledList[i]\n\t\t\tisChecked = self.getStackView().displayStateDict['displayThisStack'] == i+1\n\t\t\tisChecked = isCheckedList[i]\n\n\t\t\tcurrentAction.setEnabled(isEnabled)\n\t\t\tcurrentAction.setChecked(isChecked)\n\t\t\t# add to menu\n\t\t\tmenuAction = menu.addAction(currentAction)\n\n\t\t#\n\t\t# do again for edt\n\t\tedtIdx = 3 # (raw==0, mask==1, skel==2, edt==3)\n\t\tactionsList = []\n\t\tisEnabledList = []\n\t\tisCheckedList = []\n\t\tfor i in range(numChannels):\n\t\t\tchanNumber = i + 1\n\t\t\tactionsList.append(f'Channel {chanNumber} EDT')\n\t\t\tactualChanNumber = (maxNumChannels * edtIdx) + i + 1\n\t\t\tisEnabled = self.mySimpleStack.hasChannelLoaded(actualChanNumber)\n\t\t\tprint(' edt actualChanNumber:', actualChanNumber, 'isEnabled:', isEnabled)\n\t\t\tisEnabledList.append(isEnabled)\n\t\t\tisChecked = self.getStackView().displayStateDict['displayThisStack'] == actualChanNumber\n\t\t\tisCheckedList.append(isChecked)\n\t\tfor i, actionStr in enumerate(actionsList):\n\t\t\t# make an action\n\t\t\tcurrentAction = QtWidgets.QAction(actionStr, self, checkable=True)\n\t\t\t# decide if it is checked\n\t\t\tisEnabled = isEnabledList[i]\n\t\t\tisChecked = self.getStackView().displayStateDict['displayThisStack'] == i+1\n\t\t\tisChecked = isCheckedList[i]\n\n\t\t\tcurrentAction.setEnabled(isEnabled)\n\t\t\tcurrentAction.setChecked(isChecked)\n\t\t\t# add to menu\n\t\t\tmenuAction = menu.addAction(currentAction)\n\n\t\t#\n\t\tmenu.addSeparator()\n\n\t\t#\n\t\t# view\n\t\t# abb oct 2020, maybe put these back in ???\n\t\t#actions = ['Image', 'Sliding Z', 'Nodes', 'Edges']\n\t\tactions = ['Image']\n\t\tfor actionStr in actions:\n\t\t\t# make an action\n\t\t\tcurrentAction = QtWidgets.QAction(actionStr, self, checkable=True)\n\t\t\t# decide if it is checked\n\t\t\tisChecked = False\n\t\t\tif actionStr == 'Image':\n\t\t\t\tisChecked = self.getStackView().displayStateDict['showImage']\n\t\t\telif actionStr == 'Sliding Z':\n\t\t\t\tisChecked = self.getStackView().displayStateDict['displaySlidingZ']\n\t\t\telif actionStr == 'Nodes':\n\t\t\t\tisChecked = self.getStackView().displayStateDict['showNodes']\n\t\t\telif actionStr == 'Edges':\n\t\t\t\tisChecked = self.getStackView().displayStateDict['showEdges']\n\t\t\tcurrentAction.setChecked(isChecked)\n\t\t\tcurrentAction.triggered.connect(self.actionHandler)\n\t\t\t# add to menu\n\t\t\t#menuAction = self.menu.addAction(currentAction)\n\t\t\tmenuAction = menu.addAction(currentAction)\n\n\t\tmenu.addSeparator()\n\n\t\t#\n\t\t# panels\n\n\t\t'''\n\t\tannotationsAction = QtWidgets.QAction('Left Toolbar', self, checkable=True)\n\t\tannotationsAction.setChecked(self.options['Panels']['showLeftToolbar'])\n\t\t#annotationsAction.setShortcuts('[')\n\t\ttmpMenuAction = menu.addAction(annotationsAction)\n\t\t'''\n\n\t\t'''\n\t\t# nodes\n\t\tannotationsAction = QtWidgets.QAction('Node List', self, checkable=True)\n\t\tannotationsAction.setChecked(self.options['Panels']['showNodeList'])\n\t\ttmpMenuAction = menu.addAction(annotationsAction)\n\t\t'''\n\n\t\t'''\n\t\t# edges\n\t\tannotationsAction = QtWidgets.QAction('Edge List', self, checkable=True)\n\t\tannotationsAction.setChecked(self.options['Panels']['showEdgeList'])\n\t\ttmpMenuAction = menu.addAction(annotationsAction)\n\t\t'''\n\n\t\t'''\n\t\t# search\n\t\tannotationsAction = QtWidgets.QAction('Search List', self, checkable=True)\n\t\tannotationsAction.setChecked(self.options['Panels']['showSearch'])\n\t\ttmpMenuAction = menu.addAction(annotationsAction)\n\t\t'''\n\n\t\t'''\n\t\t# annotations\n\t\tannotationsAction = QtWidgets.QAction('Annotation List', self, checkable=True)\n\t\tannotationsAction.setChecked(self.options['Panels']['showAnnotations'])\n\t\ttmpMenuAction = menu.addAction(annotationsAction)\n\t\t'''\n\n\t\t'''\n\t\t# contrast\n\t\tcontrastAction = QtWidgets.QAction('Contrast Panel', self, checkable=True)\n\t\tcontrastAction.setChecked(self.options['Panels']['showContrast'])\n\t\ttmpMenuAction = menu.addAction(contrastAction)\n\t\t'''\n\n\t\t'''\n\t\t# status toolbar\n\t\tannotationsAction = QtWidgets.QAction('Status Panel', self, checkable=True)\n\t\tannotationsAction.setChecked(self.options['Panels']['showStatus'])\n\t\ttmpMenuAction = menu.addAction(annotationsAction)\n\t\t'''\n\n\t\t'''\n\t\t# line profile toolbar\n\t\tannotationsAction = QtWidgets.QAction('Line Profile Panel', self, checkable=True)\n\t\tannotationsAction.setChecked(self.options['Panels']['showLineProfile'])\n\t\ttmpMenuAction = menu.addAction(annotationsAction)\n\t\t'''\n\n\t\t# napari\n\t\tmenu.addSeparator()\n\t\tnapariAction = QtWidgets.QAction('Napari', self, checkable=False)\n\t\ttmpMenuAction = menu.addAction(napariAction)\n\n\t\tmenu.addSeparator()\n\t\t# make square\n\t\tmakeSquareAction = QtWidgets.QAction('Square', self, checkable=True)\n\t\tmakeSquareAction.setChecked(False)\n\t\ttmpMenuAction = menu.addAction(makeSquareAction)\n\n\t\tmenu.addSeparator()\n\n\t\t# save image\n\t\tsaveImageAction = QtWidgets.QAction('Save Image', self, checkable=False)\n\t\ttmpMenuAction = menu.addAction(saveImageAction)\n\n\t\t# save movie\n\t\tsaveMovieAction = QtWidgets.QAction('Save Movie', self, checkable=False)\n\t\ttmpMenuAction = menu.addAction(saveMovieAction)\n\n\t\t# options\n\t\t'''\n\t\tmenu.addSeparator()\n\t\toptionsAction = QtWidgets.QAction('Options', self, checkable=False)\n\t\ttmpMenuAction = menu.addAction(optionsAction)\n\t\t'''\n\n\t\t# refresh tracing\n\t\tmenu.addSeparator()\n\t\trefeshAction = QtWidgets.QAction('Refresh', self, checkable=False)\n\t\ttmpMenuAction = menu.addAction(refeshAction)\n\n\t\t#\n\t\t# edits\n\t\tself.addEditMenu(menu)\n\n\t\t#\n\t\t# get the action selection from user\n\n\t\tprint('=== bStackWidget.showRightClickMenu()')\n\t\t# was this\n\t\tuserAction = menu.exec_(self.mapToGlobal(pos))\n\t\t# now this\n\t\t'''\n\t\tself.menu.move(self.mapToGlobal(pos))\n\t\tself.menu.show()\n\t\t'''\n\n\t\t#userAction = None\n\t\tif userAction is None:\n\t\t\t# abort when no menu selected\n\t\t\treturn\n\t\tuserActionStr = userAction.text()\n\t\tprint(' showRightClickMenu() userActionStr:', userActionStr)\n\t\tsignalName = 'bSignal ' + userActionStr\n\t\tuserSelectedMenu = True\n\n\t\tdoStackRefresh = False\n\n\t\t# image\n\t\tmaxNumChannels = self.mySimpleStack.maxNumChannels\n\t\tif userActionStr == 'Channel 1':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 1\n\t\t\t#doStackRefresh = True\n\t\t\tself.optionsChange('Panels', 'displayThisStack', value=1, doEmit=True)\n\t\t\t#self.getStackView().displayStateChange('displayThisStack', value=1)\n\t\telif userActionStr == 'Channel 2':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 2\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=2)\n\t\telif userActionStr == 'Channel 3':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 3\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=3)\n\n\t\telif userActionStr == 'Channel 1 Mask':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 4\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=4)\n\t\telif userActionStr == 'Channel 2 Mask':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 4+1\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=4+1)\n\t\telif userActionStr == 'Channel 3 Mask':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 4+2\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=4+2)\n\n\t\telif userActionStr == 'Channel 1 Skel':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 7\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=7)\n\t\telif userActionStr == 'Channel 2 Skel':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 7+1\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=7+1)\n\t\telif userActionStr == 'Channel 3 Skel':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 7+2\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=7+2)\n\n\t\t# EDT\n\t\telif userActionStr == 'Channel 1 EDT':\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=10)\n\t\telif userActionStr == 'Channel 2 EDT':\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=10+1)\n\t\telif userActionStr == 'Channel 3 EDT':\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value=10+2)\n\n\n\t\telif userActionStr == 'RGB':\n\t\t\t#self.getStackView().displayStateDict['displayThisStack'] = 'rgb'\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displayThisStack', value='rgb')\n\n\t\t#\n\t\t# view of tracing\n\t\telif userActionStr == 'Image':\n\t\t\tself.getStackView().displayStateChange('showImage', toggle=True)\n\t\t\tdoStackRefresh = True\n\t\t\t#self.displayStateDict['showImage'] = not self.displayStateDict['showImage']\n\t\telif userActionStr == 'Sliding Z':\n\t\t\t#self.getStackView().displayStateDict['displaySlidingZ'] = not self.getStackView().displayStateDict['displaySlidingZ']\n\t\t\t#doStackRefresh = True\n\t\t\tself.getStackView().displayStateChange('displaySlidingZ', toggle=True)\n\t\telif userActionStr == 'Nodes':\n\t\t\t#optionsChange('Panels', 'showLeftToolbar', toggle=True, doEmit=True)\n\t\t\tself.getStackView().displayStateDict['showNodes'] = not self.getStackView().displayStateDict['showNodes']\n\t\t\tdoStackRefresh = True\n\t\telif userActionStr == 'Edges':\n\t\t\tself.getStackView().displayStateDict['showEdges'] = not self.getStackView().displayStateDict['showEdges']\n\t\t\tdoStackRefresh = True\n\n\t\t#\n\t\t# toolbars\n\t\telif userActionStr == 'Left Toolbar':\n\t\t\tself.optionsChange('Panels', 'showLeftToolbar', toggle=True, doEmit=True)\n\t\t\t#self.options['Panels']['showLeftToolbar'] = not self.options['Panels']['showLeftToolbar']\n\t\t\t#self.mainWindow.updateDisplayedWidgets()\n\t\telif userActionStr == 'Contrast Panel':\n\t\t\tself.optionsChange('Panels', 'showContrast', toggle=True, doEmit=True)\n\t\t\t#self.options['Panels']['showContrast'] = not self.options['Panels']['showContrast']\n\t\t\t#self.mainWindow.updateDisplayedWidgets()\n\t\telif userActionStr == 'Node List':\n\t\t\tself.optionsChange('Panels', 'showNodeList', toggle=True, doEmit=True)\n\t\t\t#self.options['Panels']['showNodeList'] = not self.options['Panels']['showNodeList']\n\t\t\t#self.mainWindow.updateDisplayedWidgets()\n\t\telif userActionStr == 'Edge List':\n\t\t\tself.optionsChange('Panels', 'showEdgeList', toggle=True, doEmit=True)\n\t\t\t#self.options['Panels']['showEdgeList'] = not self.options['Panels']['showEdgeList']\n\t\t\t#self.mainWindow.updateDisplayedWidgets()\n\t\telif userActionStr == 'Search List':\n\t\t\tself.optionsChange('Panels', 'showSearch', toggle=True, doEmit=True)\n\t\t\t#self.options['Panels']['showSearch'] = not self.options['Panels']['showSearch']\n\t\t\t#self.mainWindow.updateDisplayedWidgets()\n\t\telif userActionStr == 'Annotation List':\n\t\t\tself.optionsChange('Panels', 'showAnnotations', toggle=True, doEmit=True)\n\t\t\t#self.options['Panels']['showSearch'] = not self.options['Panels']['showSearch']\n\t\t\t#self.mainWindow.updateDisplayedWidgets()\n\t\telif userActionStr == 'Status Panel':\n\t\t\tself.optionsChange('Panels', 'showStatus', toggle=True, doEmit=True)\n\t\t\t#self.options['Panels']['showStatus'] = not self.options['Panels']['showStatus']\n\t\t\t#self.mainWindow.updateDisplayedWidgets()\n\t\telif userActionStr == 'Line Profile Panel':\n\t\t\tself.optionsChange('Panels', 'showLineProfile', toggle=True, doEmit=True)\n\t\t\t#self.options['Panels']['showLineProfile'] = not self.options['Panels']['showLineProfile']\n\t\t\t#self.mainWindow.updateDisplayedWidgets()\n\t\telif userActionStr == 'Caiman':\n\t\t\tself.optionsChange('Panels', 'showCaiman', toggle=True, doEmit=True)\n\n\t\t# other\n\t\telif userActionStr == 'Options':\n\t\t\toptionsDialog = bimpy.interface.bOptionsDialog(self, self)\n\t\telif userActionStr == 'Napari':\n\t\t\tself.openNapari()\n\t\telif userActionStr == 'Square':\n\t\t\tself.myStackView2.toggleMakeSquare()\n\t\t\t#self.resizeEvent(QtGui.QResizeEvent(self.size(), QtCore.QSize()))\n\t\t\t#self.repaint()\n\t\telif userActionStr == 'Save Image':\n\t\t\tself.saveImage()\n\t\telif userActionStr == 'Save Movie':\n\t\t\tself.saveMovie()\n\t\telif userActionStr == 'Refresh':\n\t\t\tself.getStackView()._preComputeAllMasks()\n\n\t\telse:\n\t\t\tprint(' showRightClickMenu() -->> no action taken for userActionStr:', userActionStr)\n\t\t\tuserSelectedMenu = False\n\n\t\t# emit a signal\n\t\t# todo: this is emitting when self.getStackView().displayStateDict is not changing, e.g. for user action 'Contrast' and 'Annotations'\n\t\t'''\n\t\tif userSelectedMenu:\n\t\t\tself.setSlice() # update\n\t\t\tself.displayStateChangeSignal.emit(signalName, self.getStackView().displayStateDict)\n\t\t'''\n\n\t\tif doStackRefresh:\n\t\t\tself.getStackView().setSlice()\n\n\t\t#return False\n\t\t#print('right click menu return')\n\t\treturn", "def onRightClick(self, event): \n\t\tpt = event.GetPosition()\n\t\titem, flags = self.tree.HitTest(pt)\n\t\tif not item:\n\t\t\tLogging.info(\"No item to select\", kw = \"ui\")\n\t\t\treturn\n\t\tself.tree.SelectItem(item)\n\t\tself.selectedItem = item\n\t\tself.PopupMenu(self.menu, event.GetPosition())", "def get_menu_items():\n\n pass", "def addMenu():\n mb.addAction(actionAccessories)\n actionAccessories.setVisible(True)", "def get_menus():\n\n pass", "def create_menu():\n MenuData = [\n (\"&Draw Variables\",drawable.ask),\n (\"&Show Variables\",printall),\n (\"&Print Variables\",printval),\n (\"&Edit Variable\",edit),\n (\"&Rename Variable\",rename),\n (\"&Forget Variables\",forget),\n (\"---\",None),\n (\"&Create Plane\",\n [(\"Coordinates\", \n [(\"Point and normal\", createPlaneCoordsPointNormal),\n (\"Three points\", createPlaneCoords3Points),\n ]), \n (\"Visually\", \n [(\"Three points\", createPlaneVisual3Points),\n ]),\n ]),\n (\"&Select Plane\",planes.ask),\n (\"&Draw Selection\",planes.draw),\n (\"&Forget Selection\",planes.forget),\n (\"---\",None),\n (\"&Pick Actors\",pick_actors),\n (\"&Pick Elements\",pick_elements),\n (\"&Pick Points\",pick_points),\n (\"&Pick Edges\",pick_edges),\n (\"---\",None),\n ('&Selection',\n [('&Create Report',report_selection),\n ('&Set Property',setprop_selection),\n ('&Grow',grow_selection),\n ('&Partition',partition_selection),\n ('&Get Partition',get_partition),\n ('&Export',export_selection),\n ]),\n (\"---\",None),\n ('&Query',\n [('&Actors',query_actors),\n ('&Elements',query_elements),\n ('&Points',query_points),\n ('&Edges',query_edges),\n ('&Distances',query_distances),\n ]),\n (\"---\",None),\n (\"&Close\",close_menu),\n ]\n return widgets.Menu('Tools',items=MenuData,parent=GD.gui.menu,before='help')", "def on_actions_list(self, e):\n self.PopupMenu(self.popup_menu())", "def _addUtilityMenuItems(ned, node):\n pass", "def _createFileMenuItems(ned, node):\n pass", "def _createDisplayMenu(ned, menu):\n pass", "def render(self):\n menu = etree.Element('openbox_pipe_menu')\n \n walk(self.menuItems, menu)\n \n print etree.tostring(menu)", "def _createPsdFileTexMenuItems(ned, node):\n pass", "def menuItem(*args):\n\toptionsWindow()", "def right_click(self, event):\n\n super().right_click(event)\n self.popup_menu.add_command(label=\"Edit..\", command=self.edit)\n\n self.popup_menu.tk_popup(event.x_root, event.y_root, 0)", "def right_click(self, event):\n\n super().right_click(event)\n self.popup_menu.add_command(label=\"Edit..\", command=self.edit)\n\n self.popup_menu.tk_popup(event.x_root, event.y_root, 0)", "def _addCustomNodeItemMenus(ned, node):\n pass", "def widget_ctx_menu(self):\n def toggle_step():\n self.showStepExponent = not self.showStepExponent\n\n def toggle_write():\n self.writeOnPress = not self.writeOnPress\n\n menu = self.lineEdit().createStandardContextMenu()\n menu.addSeparator()\n ac = menu.addAction('Toggle Show Step Size')\n ac.triggered.connect(toggle_step)\n\n ac_write = menu.addAction('Toggle Write On Press')\n ac_write.triggered.connect(toggle_write)\n\n return menu", "def makeMenu(self):\n\t\tself.fileMenu = self.menuBar().addMenu(self.tr(\"&Arquivo\"))\n\t\tself.fileMenu.addAction(self.newAct)\n\t\tself.fileMenu.addAction(self.openAct)\n\t\tself.fileMenu.addAction(self.saveAct)\n\t\tself.fileMenu.addAction(self.exportAct)\n\t\tself.fileMenu.addSeparator() \n\t\tself.fileMenu.addAction(self.exitAct)\n\n\t\tself.editMenu = self.menuBar().addMenu(self.tr(\"&Editar\"))\n\t\t\n\t\tself.helpMenu = self.menuBar().addMenu(self.tr(\"&Ajuda\"))\n\t\tself.helpMenu.addAction(self.aboutAct)", "def _createLightMenuItems(node):\n pass", "def OutputMenuItems():\r\n print('''\r\n Menu of Options\r\n 1) Show current data\r\n 2) Add a new item.\r\n 3) Save Data to File\r\n 4) Exit Program\r\n ''')\r\n print() # Add an extra line for looks\r", "def _right_click(self, event):\n if self.disabled is False:\n self.menu.tk_popup(event.x_root, event.y_root)", "def create_menu(list_recipes):\n\n title = 'PyVegan - List of Recipes'\n menu = CursesMenu(title, 'Select one and press enter')\n msg = 'This search isn\\'t a valid one'\n\n for recipe in list_recipes:\n recipe_title = clean_title(recipe['post_title'])\n\n if 'post_link' in recipe:\n item = FunctionItem(\n recipe_title,\n url_open,\n args=[recipe['post_link']]\n )\n else:\n item = FunctionItem(recipe_title, lambda x: print(x), args=[msg])\n menu.append_item(item)\n\n return menu", "def build_item_menu(self, items):\n\n menu = QtWidgets.QMenu(self)\n\n # update to latest version\n def _on_update_to_latest(items):\n for item in items:\n api.update(item, -1)\n self.data_changed.emit()\n\n update_icon = qta.icon(\"fa.angle-double-up\", color=DEFAULT_COLOR)\n updatetolatest_action = QtWidgets.QAction(update_icon,\n \"Update to latest\",\n menu)\n updatetolatest_action.triggered.connect(\n lambda: _on_update_to_latest(items))\n\n # set version\n set_version_icon = qta.icon(\"fa.hashtag\", color=DEFAULT_COLOR)\n set_version_action = QtWidgets.QAction(set_version_icon,\n \"Set version\",\n menu)\n set_version_action.triggered.connect(\n lambda: self.show_version_dialog(items))\n\n # switch asset\n switch_asset_icon = qta.icon(\"fa.sitemap\", color=DEFAULT_COLOR)\n switch_asset_action = QtWidgets.QAction(switch_asset_icon,\n \"Switch Asset\",\n menu)\n switch_asset_action.triggered.connect(\n lambda: self.show_switch_dialog(items))\n\n # remove\n remove_icon = qta.icon(\"fa.remove\", color=DEFAULT_COLOR)\n remove_action = QtWidgets.QAction(remove_icon, \"Remove items\", menu)\n remove_action.triggered.connect(\n lambda: self.show_remove_warning_dialog(items))\n\n # go back to flat view\n if self._hierarchy_view:\n back_to_flat_icon = qta.icon(\"fa.list\", color=DEFAULT_COLOR)\n back_to_flat_action = QtWidgets.QAction(back_to_flat_icon,\n \"Back to Full-View\",\n menu)\n back_to_flat_action.triggered.connect(self.leave_hierarchy)\n\n # send items to hierarchy view\n enter_hierarchy_icon = qta.icon(\"fa.indent\", color=\"#d8d8d8\")\n enter_hierarchy_action = QtWidgets.QAction(enter_hierarchy_icon,\n \"Cherry-Pick (Hierarchy)\",\n menu)\n enter_hierarchy_action.triggered.connect(\n lambda: self.enter_hierarchy(items))\n\n # expand all items\n expandall_action = QtWidgets.QAction(menu, text=\"Expand all items\")\n expandall_action.triggered.connect(self.expandAll)\n\n # collapse all items\n collapse_action = QtWidgets.QAction(menu, text=\"Collapse all items\")\n collapse_action.triggered.connect(self.collapseAll)\n\n # add the actions\n has_selection = len(items)\n\n if has_selection:\n menu.addAction(updatetolatest_action)\n menu.addAction(set_version_action)\n menu.addAction(switch_asset_action)\n\n menu.addSeparator()\n menu.addAction(remove_action)\n\n menu.addSeparator()\n\n # These two actions should be able to work without selection\n menu.addAction(expandall_action)\n menu.addAction(collapse_action)\n\n custom_actions = self.get_custom_actions(containers=items)\n if custom_actions:\n submenu = QtWidgets.QMenu(\"Actions\", self)\n for action in custom_actions:\n\n color = action.color or DEFAULT_COLOR\n icon = qta.icon(\"fa.%s\" % action.icon, color=color)\n action_item = QtWidgets.QAction(icon, action.label, submenu)\n action_item.triggered.connect(\n partial(self.process_custom_action, action, items))\n\n submenu.addAction(action_item)\n\n menu.addMenu(submenu)\n\n if has_selection:\n menu.addAction(enter_hierarchy_action)\n\n if self._hierarchy_view:\n menu.addAction(back_to_flat_action)\n\n return menu", "def keyboard_popup(self):\n c = self.c\n menu = QtWidgets.QMenu(c.frame.top)\n\n cmds = {}\n\n need_submenu = 'Move', 'Copy', 'Clone', 'Bookmark', 'Link'\n current_kind = None\n current_submenu = None\n todo: Any\n for name, first_last, long, short in quickMove.flavors:\n if first_last:\n todo = 'first child', 'last child', 'next sibling', 'prev sibling'\n else:\n todo = ['']\n for which in todo:\n if which:\n which = \" \" + which.title()\n k = \"Set as \" + long + \" \" + short + which + ' target'\n cmds[k] = {'first': which, 'type': name}\n kind = long.split()[0]\n if kind in need_submenu:\n if current_kind != kind:\n current_submenu = menu.addMenu(kind)\n current_kind = kind\n else:\n current_submenu = menu\n current_submenu.addAction(k)\n\n pos = c.frame.top.window().frameGeometry().center()\n action = menu.exec_(pos)\n if action is None:\n return\n k = str(action.text())\n g.es(k)\n self.keyboard_target = quickMoveButton(\n self, c.p.v, cmds[k]['first'], type_=cmds[k]['type'])", "def create_menu(self, parent):\n menu = QtGui.QMenu(parent=parent)\n return menu.menuAction()", "def _createTextureMenuItems(ned, node):\n pass", "def _param_right_click(self,event,param_name):\n self._update_dynamic_menu_entry(param_name)\n self.popup_menu.tk_popup(event.x_root,event.y_root)", "def _createTextureUtilityMenuItems(ned, node):\n pass", "def menus(self):\r\n return []", "def _create_context_menu(self):\n self.menu = Gtk.Menu()\n delete_menu = Gtk.MenuItem(\"Delete Task\")\n self.menu.append(delete_menu)", "def build_menuable_items(self):\n cols = []\n for bundle in app.bundles:\n bundle_metadata = bundle['Meta']['bundle-metadata']\n try:\n conjure_data = bundle['Meta']['extra-info/conjure-up']\n name = conjure_data.get('friendly-name',\n bundle['Meta']['id']['Name'])\n except KeyError:\n name = bundle['Meta']['id']['Name']\n self.fname_id_map[name] = bundle\n cols.append(\n Columns(\n [\n (\"weight\", 0.2, Color.body(\n menu_btn(label=name,\n on_press=self.done),\n focus_map=\"menu_button focus\")),\n (\"weight\", 0.3, Text(\n bundle_metadata.get('Description',\n 'Needs a description'),\n align=\"left\"))\n ],\n dividechars=1\n )\n )\n cols.append(Padding.line_break(\"\"))\n return Pile(cols)", "def menu(*args, allowOptionBoxes: bool=True, defineTemplate: AnyStr=\"\", deleteAllItems:\n bool=True, docTag: Union[AnyStr, bool]=\"\", enable: bool=True, exists: bool=True,\n familyImage: Union[AnyStr, bool]=\"\", helpMenu: bool=True, itemArray: bool=True, label:\n Union[AnyStr, bool]=\"\", ltVersion: Union[AnyStr, bool]=\"\", mnemonic: Union[AnyStr,\n bool]=\"\", numberOfItems: bool=True, parent: AnyStr=\"\", postMenuCommand: Script=None,\n postMenuCommandOnce: bool=True, scrollable: bool=True, tearOff: bool=True,\n useTemplate: AnyStr=\"\", version: Union[AnyStr, bool]=\"\", visible: bool=True, q=True,\n query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def right_click_event(self, icon, button, time):\n menu = gtk.Menu()\n\n about = gtk.MenuItem(\"About\")\n quit = gtk.MenuItem(\"Quit\")\n\n about.connect(\"activate\", self.show_about_dialog)\n quit.connect(\"activate\", gtk.main_quit)\n\n menu.append(about)\n menu.append(quit)\n\n menu.show_all()\n\n menu.popup(None, None, gtk.status_icon_position_menu, button, time, self.statusicon)", "def _addMenu(self):\n self.action = QAction(QIcon(), 'WakaTime', self)\n self.action.triggered.connect(self._promptForApiKey)\n fileMenu = e5App().getObject('UserInterface').getMenu('file')\n fileMenu.addAction(self.action)", "def _context_menu_make(self, pos):\n format = self._control.cursorForPosition(pos).charFormat()\n name = format.stringProperty(QtGui.QTextFormat.ImageName)\n if name:\n menu = QtGui.QMenu()\n\n menu.addAction('Copy Image', lambda: self._copy_image(name))\n menu.addAction('Save Image As...', lambda: self._save_image(name))\n menu.addSeparator()\n\n svg = self._name_to_svg_map.get(name, None)\n if svg is not None:\n menu.addSeparator()\n menu.addAction('Copy SVG', lambda: svg_to_clipboard(svg))\n menu.addAction('Save SVG As...',\n lambda: save_svg(svg, self._control))\n else:\n menu = super(RichJupyterWidget, self)._context_menu_make(pos)\n return menu", "def accessoriesMenu():\n pref = QtGui.QAction(mw)\n pref.setText(\"Command panel\")\n pref.setObjectName(\"CommandPanel\")\n pref.triggered.connect(onPreferences)\n try:\n import AccessoriesMenu\n AccessoriesMenu.addItem(\"CommandPanel\")\n except ImportError:\n a = mw.findChild(QtGui.QAction, \"AccessoriesMenu\")\n if a:\n a.menu().addAction(pref)\n else:\n mb = mw.menuBar()\n action = QtGui.QAction(mw)\n action.setObjectName(\"AccessoriesMenu\")\n action.setIconText(\"Accessories\")\n menu = QtGui.QMenu()\n action.setMenu(menu)\n menu.addAction(pref)\n\n def addMenu():\n \"\"\"Add accessories menu to the menu bar.\"\"\"\n toolsMenu = mb.findChild(QtGui.QMenu, \"&Tools\")\n if toolsMenu:\n toolsMenu.addAction(action)\n\n addMenu()\n mw.workbenchActivated.connect(addMenu)", "def create_menu(self):\n self.create_actions()\n self._menu = QtGui.QMenu('QA measurements')\n for action in self._menu_actions:\n self._menu.addAction(action)\n menuBar = util.get_main_win().menuBar()\n for action in menuBar.actions():\n if action.menu().title() == \"QA measurements\":\n menuBar.removeAction(action)\n menuBar.addMenu(self._menu)", "def set_right_context_menu(self, menu):\r\n self.right_context_menu = menu", "def get_menus(self):\n \n return [\n ('File', [\n ('New task...\\tCtrl+N', 'Add a new task', self.OnAddTask, wx.ID_NEW),\n ('Edit task...\\tCtrl+E', 'Edit the selected task', None, wx.ID_OPEN),\n ('Remove task...\\tDel', 'Remove the selected task', None, wx.ID_CLOSE),\n (None, ),\n ('&Quit\\tCtrl+Q', 'Close down this program', self.OnExit, wx.ID_EXIT)\n ]),\n ('Help', [\n ('About %s...\\tCtrl+H' % APP_TITLE, 'Learn a little about this program', self.OnAbout, wx.ID_ABOUT),\n ]),\n ]", "def __on_treeview_button_press_event(self, widget, event):\n # Display the menu when right clicked\n if event.button == 3:\n x = int(event.x)\n y = int(event.y)\n\n # Get the path of the item clicked we'll need this to retrieve the\n # corresponding PathDescriptor\n path = self.__tree_view.get_path_at_pos(x, y)\n\n if path is not None:\n # Get the PathDescriptor of the item clicked for use when\n # opening up the menu\n path_iter = self.__tree_view.get_model().get_iter(path[0])\n desc = self.__tree_view.get_model().get_value(path_iter, 0)\n\n # Display the menu based on the PathDescriptor\n menu = TreeCellMenu(self.__project_explorer)\n menu.display(desc, event)", "def popupMenu(*args, allowOptionBoxes: bool=True, altModifier: bool=True, button: Union[int,\n bool]=0, ctrlModifier: bool=True, defineTemplate: AnyStr=\"\", deleteAllItems:\n bool=True, exists: bool=True, itemArray: bool=True, markingMenu: bool=True,\n numberOfItems: bool=True, parent: AnyStr=\"\", postMenuCommand: Script=None,\n postMenuCommandOnce: bool=True, shiftModifier: bool=True, useTemplate: AnyStr=\"\",\n q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def createMenus(self):\n\n self.fileMenu = QMenu(\"&File\", self)\n self.fileMenu.addAction(self.openAct)\n self.fileMenu.addAction(self.addAct)\n self.fileMenu.addSeparator()\n # self.fileMenu.addAction(self.showSessionAct)\n self.fileMenu.addAction(self.exitAct)\n\n self.helpMenu = QMenu(\"&Help\", self)\n self.helpMenu.addAction(self.aboutAct)\n self.helpMenu.addAction(self.aboutQtAct)\n\n self.viewMenu = QMenu(\"&View\", self)\n\n self.sortMenu = QMenu(\"Sort by\", self.viewMenu, enabled=False)\n self.groupMenu = QMenu(\"Group by\", self.viewMenu, enabled=False)\n\n self.showGroupMenu = QMenu(\"Load Group\", self.fileMenu, enabled=False)\n self.addGroupDataMenu = QMenu('Add Group', self.fileMenu, enabled=False)\n self.fileMenu.addMenu(self.showGroupMenu)\n self.fileMenu.addMenu(self.addGroupDataMenu)\n self.fileMenu.addAction(self.seeAllGroupAct)\n self.viewMenu.addMenu(self.groupMenu)\n self.viewMenu.addMenu(self.sortMenu)\n\n # Add filters to \"Sort by\"\n self.create_sort_menu()\n self.sortMenu.addAction(self.ageSortAct)\n self.sortMenu.addAction(self.sexSortAct)\n self.sortMenu.addAction(self.genotypeSortAct)\n self.sortMenu.addAction(self.speciesSortAct)\n self.sortMenu.addAction(self.subjectIDSortAct)\n self.sortMenu.addAction(self.weightSortAct)\n self.sortMenu.addAction(self.birthSortAct)\n self.sortMenu.addSeparator()\n\n self.sortMenu.addAction(self.fluorescenceSortAct)\n self.sortMenu.addAction(self.imagesegSortAct)\n self.sortMenu.addAction(self.rasterSortAct)\n\n # Add filters to \"Group by\"\n self.create_group_menu()\n self.groupMenu.addAction(self.ageGroupAct)\n self.groupMenu.addAction(self.sexGroupAct)\n self.groupMenu.addAction(self.genotypeGroupAct)\n self.groupMenu.addAction(self.speciesGroupAct)\n self.groupMenu.addAction(self.subjectIDGroupAct)\n self.groupMenu.addAction(self.weightGroupAct)\n self.groupMenu.addAction(self.birthGroupAct)\n\n self.groupMenu.addSeparator()\n\n self.groupMenu.addAction(self.fluorescenceGroupAct)\n self.groupMenu.addAction(self.imagesegGroupAct)\n self.groupMenu.addAction(self.rasterGroupAct)\n\n self.menuBar().addMenu(self.fileMenu)\n self.menuBar().addMenu(self.viewMenu)\n self.menuBar().addMenu(self.helpMenu)", "def get_items(self):\n options = \"\"\n for item in self.menu:\n options += f\"{item.name}/\"\n return options", "def request_context_menu(self, pos):\n pass", "def create_menus( self ):\n\n self.saveAct = QAction( \"&Save\", self, shortcut=\"Ctrl+S\",\n triggered=self.save_database )\n self.exitAct = QAction( \"E&xit\", self, shortcut=\"Ctrl+Q\",\n triggered=self.close )\n\n self.aboutAct = QAction( \"&About\", self, triggered=self.about )\n\n self.aboutQtAct = QAction( \"About &Qt\", self,\n triggered=QApplication.instance().aboutQt )\n\n self.fileMenu = QMenu( \"&File\", self )\n self.fileMenu.addAction( self.saveAct )\n self.fileMenu.addAction( self.exitAct )\n\n self.helpMenu = QMenu( \"&Help\", self )\n self.helpMenu.addAction( self.aboutAct )\n self.helpMenu.addAction( self.aboutQtAct )\n\n self.menuBar().addMenu( self.fileMenu )\n self.menuBar().addMenu( self.helpMenu )", "def CreatePopupMenu(self):\n menu = wx.Menu()\n menu.Append(self.TBMENU_RESTORE, \"Restore wxPython Demo\")\n menu.Append(self.TBMENU_CLOSE, \"Close wxPython Demo\")\n menu.AppendSeparator()\n menu.Append(self.TBMENU_CHANGE, \"Change the TB Icon\")\n menu.Append(self.TBMENU_REMOVE, \"Remove the TB Icon\")\n return menu", "def popUpMenu(self, menuRequestingtObject, PopupPoint, menuListString, funcToInvoke, additionalArguments='', iconList = []):\r\n if menuListString == []:\r\n return 0;\r\n Rmnu = QtWidgets.QMenu(self.CallingUI)\r\n for i, itm in enumerate(menuListString):\r\n\r\n newmenuitem = QtWidgets.QAction(itm, self.CallingUI)\r\n #newmenuitem\r\n\r\n if len(itm)>1 and itm[0]=='|':\r\n itm = itm[1:len(itm)]\r\n newmenuitem.setEnabled(False)\r\n newmenuitem.setText(itm)\r\n #var = QtCore.QVariant()\r\n\r\n\r\n\r\n if itm != '':\r\n if len(iconList)>1 and len(iconList)>i:\r\n if iconList[i]!=None:\r\n icon = QtGui.QIcon()\r\n icon.addPixmap(QtGui.QPixmap(iconList[i]), QtGui.QIcon.Normal, QtGui.QIcon.On)\r\n newmenuitem.setIcon(icon)\r\n\r\n #self.CallingUI.connect(newmenuitem, QtCore.SIGNAL(\"triggered()\"), lambda passarg=(itm,i,additionalArguments,newmenuitem): funcToInvoke(passarg))\r\n newmenuitem.triggered.connect(lambda passarg=([itm,i,additionalArguments,newmenuitem]): funcToInvoke(passarg))\r\n newmenuitem.setData(PopupPoint)\r\n\r\n if itm=='':\r\n Rmnu.addSeparator()\r\n else:\r\n Rmnu.addAction(newmenuitem)\r\n\r\n\r\n PopupPoint.setY(PopupPoint.y())\r\n PopupPoint.setX(PopupPoint.x())\r\n Rmnu.exec_(menuRequestingtObject.mapToGlobal(PopupPoint))\r\n del(Rmnu)", "def _createMainContextMenu(self) -> None:\n # separators for improved readability\n separator1 = QAction(self)\n separator1.setSeparator(True)\n separator2 = QAction(self)\n separator2.setSeparator(True)\n\n self._mainFileView.setContextMenuPolicy(Qt.ActionsContextMenu)\n for action in self._fileActions:\n if action == self._exitAction: # don't include Exit button in the context menu\n continue\n self._mainFileView.addAction(action)\n self._mainFileView.addAction(separator1)\n for action in self._editActions:\n self._mainFileView.addAction(action)\n self._mainFileView.addAction(separator2)\n for action in self._viewActions:\n self._mainFileView.addAction(action)", "def menu_wxglade_tutorial(self, event=None):\n self.link(\"http://spe.stani.be/manual/wxGlade/tutorial.html\")", "def create_menu(self):\n about = gtk.ImageMenuItem(gtk.STOCK_ABOUT)\n about.connect_object('activate', self.about, 'about')\n about.show()\n\n# prefs = gtk.ImageMenuItem(gtk.STOCK_PREFERENCES)\n# prefs.connect_object('activate', self.prefs, 'prefs')\n# prefs.show()\n\n quit = gtk.ImageMenuItem(gtk.STOCK_QUIT)\n quit.connect_object('activate', self.exit, 'quit')\n quit.show()\n\n menu = gtk.Menu()\n menu.append(about)\n# menu.append(prefs)\n menu.append(quit)\n return menu", "def request_context_menu(self, pos):\n super(ItemListView, self).request_context_menu(pos)\n self.get_selected()\n self.manage_actions()\n self.display_context_menu(pos)", "def render(self,session,context):\n #TODO: at some point this should actually get implemented, but to do so\n # would need to convert self.menu_items from list to a dict, where the\n # keys are the items that should be selected in the menu. \n # to do so would need to update MenuItem as well.\n menu = [] if self.title_str == '' else [\" %s\" % self.title_str]\n for idx , item in enumerate(self.menu_items):\n if item.hide_index: menu_text = str(item)\n else:\n menu_text = \"%s. %s\"%(item.custom_index, item)\n menu.append( menu_text )\n return \"\\n\".join(menu)", "def get_menu ( self, object, row ):\n return self.menu", "def _right_click(self, event, widget):\n self._currently_selected_widget = widget\n\n # need an actual mechanism for populating the menu, rather than this!!\n ### copied from edit_PO_in_currently...\n param_name = None\n for name,representation in self.representations.items():\n if self._currently_selected_widget is representation['widget']:\n param_name=name\n break\n # CEBALERT: should have used get_parameter_value(param_name)?\n PO_to_edit = self._string2object(param_name,self._tkvars[param_name].get())\n ###\n\n if hasattr(PO_to_edit,'params'):\n self.menu.tk_popup(event.x_root, event.y_root)", "def menu_items():\n def show():\n form.show();\n form.activateWindow()\n form.raise_()\n\n lst = []\n lst.append((\"Import Programmableweb\", show))\n \n return tuple(lst)", "def _create_menu(self):\n\n self.quit_item.connect('activate', gtk.main_quit, gtk)\n\n self.menu.append(self.quit_item)\n self.status_icon.connect('popup-menu', show_menu, self.menu)", "def _createShaderMenuItems(ned, node):\n pass", "def onContextMenu(self, event):\r\n # Slicer plot popup menu\r\n #slicerpop = wx.Menu()\r\n #slicerpop.Append(314, \"&Save 1D model points (%s)\" % self.file_data.name,\r\n # 'Save randomly oriented data currently displayed')\r\n\r\n #slicerpop.Append(316, '&Load 1D data file')\r\n\r\n #slicerpop.Append(315, '&Toggle Linear/Log intensity scale Y-axis')\r\n PlotPanel.onContextMenu(self,event)\r\n #pos = event.GetPosition()\r\n #pos = self.ScreenToClient(pos)\r\n #self.PopupMenu(slicerpop, pos)\r", "def right_click(self, *args):\n return _ida_hexrays.Hexrays_Hooks_right_click(self, *args)", "def add_to_menu ( self, menu_item ):\r\n pass", "def get_all_menu():", "def main_menu(self):\n menu_string = \"Main menu\\n\"\n menu_string += \"\\t1. Modify a list\\n\"\n menu_string += \"\\t2. Grade submenu\\n\"\n menu_string += \"\\t3. Search for something\\n\"\n menu_string += \"\\t4. Get a statistic\\n\"\n menu_string += \"\\t5. Undo/Redo\\n\"\n menu_string += \"\\t0. Exit\\n\"\n stop = False\n\n while not stop:\n command_list = \\\n {'0': self.__no_command,\n '1': self.__modify_submenu,\n '2': self.__grade_submenu,\n '3': self.__search_submenu,\n '4': self.__statistics_submenu,\n '5': self.__undo_submenu\n }\n command = self.__ui_read_command(menu_string)\n\n if command in command_list.keys():\n if command == '0':\n return\n else:\n command_list[command]()\n\n else:\n print(\"Invalid command!\")", "def misc_menu(self):\n # info needed to separate edit and view widgets in self.widget_classes\n name_test_current = [\n (\"Editor\", lambda x: x.lep_type == 'EDITOR', self.edit_widget.__class__),\n (\"Viewer\", lambda x: x.lep_type != 'EDITOR', self.view_widget.__class__),\n ]\n\n menu = QtWidgets.QMenu()\n for name, is_one, current in name_test_current:\n # list Editor widgets, then Viewer widgets\n for widget_class in [i for i in self.widget_classes if is_one(i)]:\n\n def cb(checked, widget_class=widget_class):\n self.set_widget(widget_class=widget_class)\n\n act = QAction(f\"{name}: {widget_class.lep_name}\", self)\n act.setCheckable(True)\n act.setChecked(widget_class == current)\n act.triggered.connect(cb)\n menu.addAction(act)\n\n button = self.control_menu_button\n point = button.position().toPoint() if isQt6 else button.pos() # Qt6 documentation is wrong.\n global_point = button.mapToGlobal(point)\n menu.exec_(global_point)", "def contextMenuEvent(self, e):\n\n count = len(self.selectedObjects())\n menu = QtWidgets.QMenu()\n\n if count:\n self.__menuActions.shows().addAction(menu, \"properties\")\n if count == 1:\n menu.addSeparator()\n self.__menuActions.shows().addAction(menu, \"createSubscription\")\n\n menu.exec_(QtCore.QPoint(e.globalX(), e.globalY()))", "def menuItem(*args, allowOptionBoxes: bool=True, annotation: Union[AnyStr, bool]=\"\", boldFont:\n bool=False, checkBox: bool=True, collection: Union[AnyStr, bool]=\"\", command:\n Union[Script, bool]=None, data: Union[int, bool]=0, defineTemplate: AnyStr=\"\",\n divider: bool=True, dividerLabel: Union[AnyStr, bool]=\"\", docTag: Union[AnyStr,\n bool]=\"\", dragDoubleClickCommand: Union[Script, bool]=None, dragMenuCommand:\n Union[Script, bool]=None, echoCommand: bool=True, enable: bool=True,\n enableCommandRepeat: bool=True, exists: bool=True, familyImage: Union[AnyStr,\n bool]=\"\", image: Union[AnyStr, bool]=\"\", imageOverlayLabel: Union[AnyStr, bool]=\"\",\n insertAfter: AnyStr=\"\", isCheckBox: bool=True, isOptionBox: bool=True,\n isRadioButton: bool=True, italicized: bool=False, label: Union[AnyStr, bool]=\"\",\n longDivider: bool=True, ltVersion: Union[AnyStr, bool]=\"\", optionBox: bool=True,\n optionBoxIcon: Union[AnyStr, bool]=\"\", parent: AnyStr=\"\", postMenuCommand:\n Union[Script, bool]=None, postMenuCommandOnce: bool=True, radialPosition:\n Union[AnyStr, bool]=\"\", radioButton: bool=True, runTimeCommand: AnyStr=\"\",\n sourceType: Union[AnyStr, bool]=\"\", subMenu: bool=True, tearOff: bool=True,\n useTemplate: AnyStr=\"\", version: Union[AnyStr, bool]=\"\", visible: bool=True,\n q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def restaurantMenuItem(restaurant_id, menu_id):\n\n return \"List menu item \" + str(menu_id) + \" for \" + str(restaurant_id)", "def createNodeItemMarkingMenu(ned, node):\n pass", "def do_menu(parser, token):\n bits = token.split_contents()\n return RenderMenuNode()", "def addMenu(self):\n menu = self.interface.getPulldownMenu(0)\n actionBefore = menu.actions()[8]\n menu.insertAction(actionBefore, self.action)", "def enable_right_click_menu(self, enable=True):\r\n self.enable_right_click = enable", "def create_menu_set(name):\n\t\t\tmenu = getattr(self, name + \"_menu\")\n\t\t\ttv = getattr(self, \"tv_\" + name)\n\t\t\tcid_index = getattr(self, \"cid_\" + name)\n\n\t\t\t# bind menu helper\n\t\t\tdef bind_menu(label):\n\t\t\t\tdef bind_menu_inner(func):\n\t\t\t\t\tmenu.add_command(label=label, command=func)\n\t\t\t\t\treturn func\n\t\t\t\treturn bind_menu_inner\n\n\t\t\t# add commands...\n\t\t\t@bind_menu(\"刪除\")\n\t\t\tdef tvdelete():\n\t\t\t\tif messagebox.askyesno(\"Comic Crawler\", \"確定刪除?\"):\n\t\t\t\t\tselected = tv.selection()\n\t\t\t\t\tself.remove(name, *[cid_index[cid] for cid in selected])\n\n\t\t\t@bind_menu(\"移至頂部\")\n\t\t\tdef tvlift():\n\t\t\t\tselected = tv.selection()\n\t\t\t\tself.downloader.mission_manager.lift(name, *[cid_index[cid] for cid in selected])\n\n\t\t\t@bind_menu(\"移至底部\")\n\t\t\tdef tvdrop():\n\t\t\t\tselected = tv.selection()\n\t\t\t\tself.downloader.mission_manager.drop(name, *[cid_index[cid] for cid in selected])\n\n\t\t\t@bind_menu(\"改名\")\n\t\t\tdef tvchangetitle():\n\t\t\t\tselected = tv.selection()\n\t\t\t\tmission = cid_index[selected[0]]\n\t\t\t\tselect_title(self.root, mission)\n\n\t\t\t@bind_menu(\"重新選擇集數\")\n\t\t\tdef tvReselectEP():\n\t\t\t\ts = tv.selection()\n\t\t\t\tmissions = [ cid_index[i] for i in s ]\n\t\t\t\tfor mission in missions:\n\t\t\t\t\treselect_episodes(self.root, mission)\n\n\t\t\t@bind_menu(\"開啟資料夾\")\n\t\t\tdef tvOpen():\n\t\t\t\ts = tv.selection()\n\t\t\t\tmissions = [ cid_index[i] for i in s ]\n\t\t\t\tsavepath = setting[\"savepath\"]\n\t\t\t\tfor mission in missions:\n\t\t\t\t\tfolder = os.path.join(savepath, safefilepath(mission.title))\n\t\t\t\t\tos.startfile(os.path.expanduser(folder))\n\n\t\t\t@bind_menu(\"開啟網頁\")\n\t\t\tdef tvOpenBrowser():\n\t\t\t\ts = tv.selection()\n\t\t\t\tmissions = [ cid_index[i] for i in s ]\n\t\t\t\tfor mission in missions:\n\t\t\t\t\twebbrowser.open(mission.url)\n\n\t\t\tif name == \"view\":\n\t\t\t\t@bind_menu(\"加入圖書館\")\n\t\t\t\tdef tvAddToLib():\n\t\t\t\t\ts = tv.selection()\n\t\t\t\t\tmissions = [ cid_index[i] for i in s ]\n\t\t\t\t\ttitles = [ m.title for m in missions ]\n\t\t\t\t\tself.downloader.mission_manager.add(\"library\", *missions)\n\t\t\t\t\tsafeprint(\"已加入圖書館︰{}\".format(\", \".join(titles)))\n\n\t\t\t# menu call\n\t\t\tdef tvmenucall(event):\n\t\t\t\tmenu.post(event.x_root, event.y_root)\n\t\t\ttv.bind(\"<Button-3>\", tvmenucall)", "def menu(self):\n menu = list()\n \n \n menu.extend([\n {\n 'title': 'Bootstrap Demo',\n 'href': self.request.route_url('bootstrap_demo'),\n 'icon': \"fa fa-twitter-square\"\n },\n {\n 'title': 'Jade Demo',\n 'href': self.request.route_url('jade_demo'),\n 'icon': \"fa fa-indent\"\n },\n ])\n if self.user:\n menu.extend([\n {\n 'title': 'Entities',\n 'icon': \"fa fa-bar-chart\",\n 'dropdown': [\n {\n 'title': 'All entities',\n 'href': self.request.route_url(\n 'entities',\n ext='html',\n _query={\n 'renderer': 'datatable',\n 'options': 'serverside-columnsearch'\n }\n ),\n 'icon': \"fa fa-bar-chart\"},\n {\n 'title': 'CPTs',\n 'href': self.request.route_url(\n 'cpts',\n ext='html',\n _query={\n 'renderer': 'datatable',\n 'options': 'columnsearch'\n }\n ),\n }\n ]\n }\n ]),\n if self.user.has_admin:\n menu.append(\n {\n 'title': \"User Management\",\n 'icon': \"fa fa-users\",\n 'dropdown': [\n {\n 'title': 'User Overview',\n 'href': self.request.route_url(\n 'users',\n ext='html',\n _query={\n 'renderer': 'datatable',\n 'options': 'serverside-columnsearch'\n }\n ),\n 'icon': 'fa fa-users',\n },\n {\n 'title': 'Add User',\n 'href': self.request.route_url('user_create'),\n 'icon': 'fa fa-user-plus',\n }\n ]\n }\n )\n\n return menu", "def makeActionMenu(self):\n\t\tself.newAct = QtGui.QAction(self.tr(\"&Novo\"),self)\n\t\tself.newAct.setShortcut(self.tr(\"Ctrl+N\"))\n\t\tself.newAct.setStatusTip(self.tr(\"Cria uma nova area de desenho em branco\"))\n\t\tself.connect(self.newAct,SIGNAL(\"triggered()\"),self.glwidget.newFile)\n\t\t\n\t\tself.openAct = QtGui.QAction(self.tr(\"&Abrir\"),self)\n\t\tself.openAct.setShortcut(self.tr(\"Ctrl+o\"))\n\t\tself.openAct.setStatusTip(self.tr(\"Abrir arquivo do elvis\"))\n\t\tself.connect(self.openAct,SIGNAL(\"triggered()\"),self.glwidget.openElvisfile)\t\t\n\n\t\tself.saveAct = QtGui.QAction(self.tr(\"&Salvar\"),self)\n\t\tself.saveAct.setShortcut(self.tr(\"Ctrl+S\"))\n\t\tself.saveAct.setStatusTip(self.tr(\"Salva a imagem do canvas\"))\n\t\tself.connect(self.saveAct,SIGNAL(\"triggered()\"),self.glwidget.saveElvisfile)\n\t\t\n\t\tself.exportAct = QtGui.QAction(self.tr(\"&Exportar SVG\"),self)\n\t\tself.exportAct.setShortcut(self.tr(\"Ctrl+E\"))\n\t\tself.exportAct.setStatusTip(self.tr(\"Exporta para formato SVG\"))\n\t\tself.connect(self.exportAct,SIGNAL(\"triggered()\"),self.glwidget.ExportSVG)\n\t\t\t\t\n\t\t\n\t\tself.exitAct = QtGui.QAction(self.tr(\"&Sair\"),self)\n\t\tself.exitAct.setStatusTip(self.tr(\"Sair do programa\"))\n\t\tself.connect(self.exitAct,SIGNAL(\"triggered()\"),self.close)\n\t\t\n\t\n\t\tself.aboutAct = QtGui.QAction(self.tr(\"&Sobre\"),self)\n\t\tself.aboutAct.setStatusTip(self.tr(\"Sobre o programa\"))\n\t\tself.connect(self.aboutAct,SIGNAL(\"triggered()\"),self.about)", "def get_menu_items(self) -> typing.List[typing.Tuple[str, typing.List[typing.Tuple[str, typing.Callable[[], None]]]]]: #this method is to be queried by the root frame when it is creating the menu bar at the top of the screen and needs options to put in it\n return []", "def showContextMenu(self, event):\r\n menu = wx.Menu()\r\n menu.Append(wx.ID_OPEN, \"Open...\\tCtrl+O\", \"Open an image...\", )\r\n menu.Append(wx.ID_SAVE, \"Save\\tCtrl+S\", \"Save the cropped image...\")\r\n menu.AppendSeparator()\r\n menu.Append(wx.ID_ABOUT, \"About\\tCtrl+I\", \"About this program...\")\r\n\r\n menu.Bind(wx.EVT_MENU, self.showOpenImageDialog, id=wx.ID_OPEN)\r\n menu.Bind(wx.EVT_MENU, self.saveImage, id=wx.ID_SAVE)\r\n menu.Bind(wx.EVT_MENU, self.showAboutDialog, id=wx.ID_ABOUT)\r\n\r\n self.PopupMenu(menu, event.GetPosition())\r\n menu.Destroy()", "def add_menu():\n\n def _(*args, **kwargs):\n args = (cast_str(i) if isinstance(i, six.text_type) else i for i in args)\n kwargs = tuple(\n {\n k: cast_str(v) if isinstance(v, six.text_type) else v\n for k, v in kwargs.items()\n }.items()\n )\n return (args, kwargs)\n\n def _auto_comp():\n try:\n comp.Comp().create_nodes()\n except comp.FootageError:\n nuke.message(cast_str(\"请先导入素材\"))\n\n all_menu = [\n {\n _(\"工具\"): [\n {\n _(\"按素材名组装\"): [\n _(\"对当前工程执行\", _auto_comp, icon=\"autocomp.png\"),\n _(\n \"批量执行\",\n lambda: comp.panels.BatchCompPanel().showModalDialog(),\n icon=\"autocomp.png\",\n ),\n _(\n \"设置\",\n lambda: comp.panels.CompConfigPanel().showModalDialog(),\n icon=\"autocomp.png\",\n ),\n ],\n },\n {\n _(\"转换为序列工程\"): [\n _(\"对当前工程执行\", edit.script_use_seq.execute),\n _(\n \"批量执行\",\n lambda: edit.script_use_seq.panels.BatchPanel().showModalDialog(),\n ),\n _(\n \"设置\",\n lambda: edit.script_use_seq.panels.ConfigPanel().showModalDialog(),\n ),\n ]\n },\n ]\n }\n ]\n\n # Add all menu.\n def _add_menu(menu, parent=nuke.menu(cast_str(\"Nuke\"))):\n # type: (..., nuke.Menu) -> None\n assert isinstance(menu, dict)\n\n for k, v in menu.items():\n m = parent.addMenu(*k[0], **dict(k[1]))\n for i in v:\n if i is None:\n _ = m.addSeparator()\n elif isinstance(i, dict):\n _add_menu(i, m)\n elif isinstance(i, tuple):\n _ = m.addCommand(*i[0], **dict(i[1]))\n\n for menu in all_menu:\n _add_menu(menu)", "def create_menus( self ):\n\n self.closeAct = QAction( \"&Close Window\", self, shortcut=\"Ctrl+W\",\n triggered=self.close )\n\n self.commitAct = QAction( \"&Commit Photo Record\", self, shortcut=\"Ctrl+S\",\n triggered=lambda: self.commit_record( update_photo_state=True ) )\n\n self.editAct = QAction( \"&Edit Image\", self, shortcut=\"Ctrl+E\",\n triggered=self.run_image_editor )\n self.viewAct = QAction( \"&View Image\", self, shortcut=\"Ctrl+V\",\n triggered=self.run_image_viewer )\n\n self.windowMenu = QMenu( \"&Window\", self )\n self.windowMenu.addAction( self.commitAct )\n self.windowMenu.addAction( self.editAct )\n self.windowMenu.addAction( self.viewAct )\n self.windowMenu.addAction( self.closeAct )\n\n self.menuBar().addMenu( self.windowMenu )", "def create_menu_par(self, name, trig_func, menu, shrt_cut):\n\n createdAction = QAction(name, self)\n createdAction.setShortcut(shrt_cut)\n createdAction.triggered.connect(trig_func)\n menu.addAction(createdAction)\n return createdAction", "def create_menu_bar(self):\n file = wx.Menu()\n file.Append(wx.ID_NEW, \"&New\\tCtrl-n\", \"Create a new braille document\")\n file.Append(wx.ID_OPEN, \"&Import\\tCtrl-o\", \"Import and convert a print document\")\n file.AppendSeparator()\n file.Append(wx.ID_SAVE, \"&Save\\tCtrl-s\", \"Save the current braille document\")\n file.Append(wx.ID_SAVEAS, \"Save &As\\tCtrl-Shift-s\", \"Save the current braille document under a new name\")\n file.AppendSeparator()\n file.Append(wx.ID_EXIT, \"E&xit\\t\\tCtrl-q\", \"Close this program\")\n\n edit = wx.Menu()\n edit.Append(wx.ID_UNDO, \"&Undo\\tCtrl-z\", \"Undoes the last operation\")\n edit.Append(wx.ID_REDO, \"&Redo\\tCtrl-y\", \"Redoes the last operation\")\n edit.AppendSeparator()\n edit.Append(wx.ID_CUT, \"&Cut\\tCtrl-x\", \"Move text to the clipboard\")\n edit.Append(wx.ID_COPY, \"Cop&y\\tCtrl-c\", \"Copies text to the clipboard\")\n edit.Append(wx.ID_PASTE, \"&Paste\\tCtrl-v\", \"Insert text from the clipboard\")\n edit.AppendSeparator()\n edit.Append(wx.ID_PREFERENCES, \"P&references\\tCtrl-,\", \"Adjust transcription settings\")\n\n menu = wx.MenuBar()\n menu.Append(file, \"&File\")\n menu.Append(edit, \"&Edit\")\n return menu", "def create_menu(self: object) -> None:\n menubar = Menu(self)\n menuFile = Menu(menubar, tearoff=0)\n menubar.add_cascade(label=\"Menu\", menu=menuFile)\n menuFile.add_command(label=\"Choose a file\", command=self.open_file,\n accelerator=\"Ctrl+o\")\n menuFile.add_command(label=\"About\", command=self.about)\n self.bind_all(\"<Control-o>\", lambda e: self.open_file())\n self.config(menu=menubar)", "def create_popup_menu(self):\n menu = wx.Menu()\n\n # open folder\n item = wx.MenuItem(menu, -1, 'Open Digital Panda folder')\n menu.Bind(wx.EVT_MENU, self.open_folder, id=item.GetId())\n menu.AppendItem(item)\n\n # settings\n item = wx.MenuItem(menu, -1, 'Settings...')\n menu.Bind(wx.EVT_MENU, self.show_settings, id=item.GetId())\n menu.AppendItem(item)\n\n # quit\n item = wx.MenuItem(menu, -1, 'Quit')\n menu.Bind(wx.EVT_MENU, self.on_exit, id=item.GetId())\n menu.AppendItem(item)\n\n # status\n item = wx.MenuItem(menu, -1, 'Status: %s' % self.status)\n menu.AppendItem(item)\n return menu", "def context_menu(self, treeview, position):\n\n all_item = get_current_item(self,treeview,single=False)\n\n if len(all_item) == 1:\n\n item = all_item[0]\n data = get_group_data(get_current_hdf5_group(self,item))\n\n if data is None:\n list_operations = ['Print attrs', 'PyMol']\n\n elif data.ndim == 1:\n list_operations = ['Print attrs','-','Plot Hist', 'Plot Line']\n\n elif data.ndim == 2:\n list_operations = ['Print attrs','-','Plot Hist', 'Plot Map']\n\n else:\n list_operations = ['Print attrs']\n\n action,actions = get_actions(treeview,position,list_operations)\n #action, actions = get_multilevel_actions(treeview,position,list_operations,list_sub)\n\n if action == actions['Print attrs']:\n send_dict_to_console(self,item,treeview)\n\n if 'Plot Hist' in actions:\n if action == actions['Plot Hist']:\n plot_histogram(self,item,treeview)\n\n if 'Plot Line' in actions:\n if action == actions['Plot Line']:\n plot_line(self,item,treeview)\n\n if 'Plot Map' in actions:\n if action == actions['Plot Map']:\n plot2d(self,item,treeview)\n\n if 'PyMol' in actions:\n if action == actions['PyMol']:\n\n grp = get_current_hdf5_group(self,item)\n data_dict = {'_grp':grp}\n treeview.emitDict.emit(data_dict)\n\n cmd = 'launchPyMol(_grp)'\n data_dict = {'exec_cmd':cmd}\n treeview.emitDict.emit(data_dict)", "def ShowDropDown(self, wnd, items):\r\n\r\n menuPopup = wx.Menu()\r\n items_added = 0\r\n\r\n for item in items:\r\n\r\n if item.GetKind() not in [ITEM_SEPARATOR, ITEM_SPACER, ITEM_CONTROL]:\r\n \r\n text = item.GetShortHelp()\r\n if text == \"\":\r\n text = item.GetLabel()\r\n if text == \"\":\r\n text = \" \"\r\n\r\n kind = item.GetKind()\r\n m = wx.MenuItem(menuPopup, item.GetId(), text, item.GetShortHelp(), kind)\r\n orientation = item.GetOrientation()\r\n item.SetOrientation(AUI_TBTOOL_HORIZONTAL)\r\n \r\n if kind not in [ITEM_CHECK, ITEM_RADIO]:\r\n m.SetBitmap(item.GetBitmap())\r\n\r\n item.SetOrientation(orientation) \r\n \r\n menuPopup.AppendItem(m)\r\n if kind in [ITEM_CHECK, ITEM_RADIO]: \r\n state = (item.state & AUI_BUTTON_STATE_CHECKED and [True] or [False])[0]\r\n m.Check(state)\r\n\r\n items_added += 1\r\n \r\n else:\r\n \r\n if items_added > 0 and item.GetKind() == ITEM_SEPARATOR:\r\n menuPopup.AppendSeparator()\r\n \r\n # find out where to put the popup menu of window items\r\n pt = wx.GetMousePosition()\r\n pt = wnd.ScreenToClient(pt)\r\n\r\n # find out the screen coordinate at the bottom of the tab ctrl\r\n cli_rect = wnd.GetClientRect()\r\n pt.y = cli_rect.y + cli_rect.height\r\n\r\n cc = ToolbarCommandCapture()\r\n wnd.PushEventHandler(cc)\r\n\r\n # Adjustments to get slightly better menu placement\r\n if wx.Platform == \"__WXMAC__\":\r\n pt.y += 5\r\n pt.x -= 5\r\n\r\n wnd.PopupMenu(menuPopup, pt)\r\n command = cc.GetCommandId()\r\n wnd.PopEventHandler(True)\r\n\r\n return command", "def addMenu():\n toolsMenu = mb.findChild(QtGui.QMenu, \"&Tools\")\n if toolsMenu:\n toolsMenu.addAction(action)", "def onContextMenu(self, event):\n # Slicer plot popup menu\n slicerpop = wx.Menu()\n slicerpop.Append(313,'&Save image', 'Save image as PNG')\n wx.EVT_MENU(self, 313, self.onSaveImage)\n\n pos = event.GetPosition()\n pos = self.ScreenToClient(pos)\n self.PopupMenu(slicerpop, pos)", "def rightClick(self):\n cmdId = self.executeCommand(Command.CLICK, {'button': 2})\n return cmdId", "def RightClick(self):\n self._PressRightButton()\n self._ReleaseAllButtons()", "def create_menu(self, root):\n menubar = Menu(root)\n root['menu'] = menubar\n\n menu_file = Menu(menubar)\n menu_run = Menu(menubar)\n menu_folders = Menu(menubar)\n menu_links = Menu(menubar)\n menu_help = Menu(menubar)\n menu_beta = Menu(menubar)\n menubar.add_cascade(menu=menu_file, label='File')\n menubar.add_cascade(menu=menu_run, label='Run')\n menubar.add_cascade(menu=menu_folders, label='Folders')\n menubar.add_cascade(menu=menu_links, label='Links')\n menubar.add_cascade(menu=menu_help, label='Help')\n menubar.add_cascade(menu=menu_beta, label='Experimental')\n\n menu_file.add_command(\n label='Re-load param set', command=self.load_params,\n accelerator='Ctrl+L')\n menu_file.add_command(\n label='Re-save param set', command=self.save_params,\n accelerator='Ctrl+S')\n menu_file.add_command(\n label='Output log', command=lambda: LogWindow(self.root))\n if sys.platform != 'darwin':\n menu_file.add_command(\n label='Exit', command=self.exit_program, accelerator='Alt+F4')\n root.bind_all('<Control-l>', lambda e: self.load_params())\n root.bind_all('<Control-s>', lambda e: self.save_params())\n\n menu_run.add_command(\n label='Dwarf Fortress', command=self.lnp.run_df,\n accelerator='Ctrl+R')\n menu_run.add_command(\n label='Init Editor', command=self.run_init, accelerator='Ctrl+I')\n root.bind_all('<Control-r>', lambda e: self.lnp.run_df())\n root.bind_all('<Control-i>', lambda e: self.run_init())\n\n menu_folders.add_command(\n label='Savegame Folder', command=self.lnp.open_savegames)\n menu_folders.add_command(\n label='Utilities Folder', command=self.lnp.open_utils)\n menu_folders.add_command(\n label='Graphics Folder', command=self.lnp.open_graphics)\n menu_folders.add_separator()\n menu_folders.add_command(\n label='Main Folder', command=self.lnp.open_main_folder)\n menu_folders.add_command(\n label='LNP Folder', command=self.lnp.open_lnp_folder)\n menu_folders.add_command(\n label='Dwarf Fortress Folder', command=self.lnp.open_df_folder)\n menu_folders.add_command(\n label='Init Folder', command=self.lnp.open_init_folder)\n\n menu_links.add_command(\n label=\"DF Homepage\", command=self.lnp.open_df_web)\n menu_links.add_command(label=\"DF Wiki\", command=self.lnp.open_wiki)\n menu_links.add_command(label=\"DF Forums\", command=self.lnp.open_forums)\n\n menu_help.add_command(\n label=\"Help\", command=self.show_help, accelerator='F1')\n menu_help.add_command(\n label=\"About\", command=self.show_about, accelerator='Alt+F1')\n root.bind_all('<F1>', lambda e: self.show_help())\n root.bind_all('<Alt-F1>', lambda e: self.show_about())\n root.createcommand('tkAboutDialog', self.show_about)\n\n menu_beta.add_command(\n label='Toggle graphics pack patching', command=self.toggle_patching)", "def create_menus(self) -> list[DropdownMenu]:\n # #### Menu <File> #####\n file_menu = DropdownMenu(text='File')\n l_commands = [\n MenuCommand(label='Options', command=self.options),\n MenuCommand(label='Restart', command=self.reset),\n MenuCommand(label='Quit', command=self.quit_)\n ]\n file_menu.add_command(l_commands)\n\n # #### Menu <Help> #####\n help_menu = DropdownMenu(text='Help')\n l_commands = [\n MenuCommand(label='How the game works', command=self.principle),\n MenuCommand(label='About', command=self.about)\n ]\n help_menu.add_command(l_commands)\n\n return [file_menu, help_menu]", "def popUpMenu(callingClassObject,menuRequestingtObject,PopupPoint,menuListString,funcToInvoke,additionalArguments='',iconList = []):\r\n if menuListString == []:\r\n return 0;\r\n Rmnu = QtGui.QMenu(callingClassObject)\r\n for i, itm in enumerate(menuListString):\r\n\r\n newmenuitem = QtGui.QAction(itm, callingClassObject)\r\n\r\n if len(itm)>1 and itm[0]=='|':\r\n itm = itm[1:len(itm)]\r\n newmenuitem.setEnabled(False)\r\n newmenuitem.setText(itm)\r\n\r\n if itm != '':\r\n if len(iconList)>1 and len(iconList)>i:\r\n if iconList[i]<>None:\r\n icon = QtGui.QIcon()\r\n icon.addPixmap(QtGui.QPixmap(iconList[i]), QtGui.QIcon.Normal, QtGui.QIcon.On)\r\n newmenuitem.setIcon(icon)\r\n\r\n callingClassObject.connect(newmenuitem, QtCore.SIGNAL(\"triggered()\"), lambda passarg=(itm,i,additionalArguments,newmenuitem): funcToInvoke(passarg))\r\n\r\n if itm=='':\r\n Rmnu.addSeparator()\r\n else:\r\n Rmnu.addAction(newmenuitem)\r\n\r\n\r\n PopupPoint.setY(PopupPoint.y() + 30)\r\n PopupPoint.setX(PopupPoint.x() + 5)\r\n Rmnu.exec_(menuRequestingtObject.mapToGlobal(PopupPoint))\r\n del(Rmnu)", "def MainMenu():\n\n # You have to open an object container to produce the icons you want to appear on this page.\n oc = ObjectContainer()\n main_list = [('New Videos', ''), ('Best Videos', '/best'), ('Pornstars', '/pornstars')]\n for pt, h in main_list:\n oc.add(DirectoryObject(\n key=Callback(ShowHTML, pTitle=pt, href=h),\n title=pt, thumb=Callback(GetThumb, url=BASE_URL)))\n oc.add(DirectoryObject(\n key=Callback(ShowHTML, pTitle=\"Pornstars\", href='/channels'),\n title=\"Channels\", thumb=Callback(GetThumb, url=BASE_URL)))\n\n oc.add(InputDirectoryObject(key=Callback(Search), title='Search Videos', prompt='Search Videos'))\n\n return oc" ]
[ "0.7842248", "0.7701151", "0.72885704", "0.7128372", "0.7125885", "0.70448405", "0.70315874", "0.6985604", "0.68939555", "0.68497425", "0.6840268", "0.67227817", "0.6722002", "0.67210567", "0.6720659", "0.67192984", "0.6679403", "0.6652344", "0.6647002", "0.6645112", "0.66284496", "0.66163856", "0.66022116", "0.66022116", "0.65871423", "0.658135", "0.65767246", "0.65727925", "0.6569493", "0.65073496", "0.64907753", "0.6483208", "0.64663815", "0.6465592", "0.6457297", "0.6453533", "0.64533305", "0.64447176", "0.6428758", "0.641083", "0.64069206", "0.6385556", "0.63778055", "0.6375635", "0.63661665", "0.6355035", "0.63548833", "0.6340591", "0.63393116", "0.6324256", "0.6316954", "0.6315311", "0.6304125", "0.62990224", "0.62982017", "0.62970054", "0.62916875", "0.62844115", "0.6281764", "0.62791526", "0.62770194", "0.6271575", "0.6269902", "0.6262569", "0.6260311", "0.6253551", "0.625294", "0.62524366", "0.62522507", "0.623828", "0.62239045", "0.62026036", "0.6201103", "0.62010795", "0.61939144", "0.61907595", "0.6188939", "0.6185433", "0.618319", "0.61814964", "0.61769986", "0.61748785", "0.6170129", "0.6157784", "0.6157114", "0.6153903", "0.614812", "0.6140936", "0.61306065", "0.61298174", "0.61251515", "0.6121816", "0.6118717", "0.6117609", "0.61172444", "0.6115718", "0.6112167", "0.61103517", "0.61097294", "0.6108634" ]
0.65835065
25
Do something to the input and put the result in the output_quueue.
def process(self, input_element: Any) -> None: raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def output(self):\r\n self.logic ( )\r\n return self.output", "def run(self, input):\n print self.print_meep(input)", "def process(self, inputs):\n output = None\n return output", "def handle_output(self, workunit, label, s):\r\n pass", "def handle_output(self, workunit, label, s):\r\n pass", "def execute(self):\n \n self.outvar = self.invar + .01", "def store_output(output):\n\n pass", "def out(self, inputs):", "def writeInput(self):\n\n #self.collect.writeInput()", "def get_output(self):\n\n num, in_text = self.get_input()\n\n if not in_text:\n self.current_prompt.freeze(in_text=\"\", show_output=False)\n # Makes the prompt un-editable.\n self.current_prompt = self.iogrid.add_prompt(number=-1)\n else:\n output = self.execute_query(num, in_text)\n self.current_prompt.freeze(in_text=in_text, output=output)\n self.current_prompt = self.iogrid.add_prompt()", "def output(self):\r\n return self.result", "def process(self, output_data: Answer) -> bool:\n return True", "def processInputs(self):", "def exercise_4(inputs): # DO NOT CHANGE THIS LINE\n output = inputs\n\n return output # DO NOT CHANGE THIS LINE", "def input(self):", "def process_inputs(self, inputs):", "def writeOutput(self, output):", "def send_output(self, result, output):\n data = pickle.dumps((result, output))\n self.wfile.write('%d\\n' % len(data))\n self.wfile.write(data)\n self.wfile.flush()", "def output_function(**kwargs):\n\n\t\toutput_queue = kwargs['q']\n\t\twhile True:\n\t\t\titem = output_queue.get()\n\t\t\t# expects to get a string or None\n\t\t\tif item is None:\n\t\t\t\tbreak\n\t\t\toutfile.write(item)\n\t\t\t# outfile.write(\"output_function:: {item}\".format(item=item)+\"\\n\")\n\t\t\toutput_queue.task_done()", "async def async_process_input(self, inp: inputs.Input) -> None:\n raise NotImplementedError()", "def run(self):\n out_fd = self.output()\n out_dir = os.path.join(self.LOCAL_ROOT, self.SHARED_RELATIVE_PATH)\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n in_fd = self.input()\n\n with in_fd.open('r') as fd:\n result = fd.read()\n with out_fd.open('w') as o_fd:\n o_fd.write(result)", "def calculate_output(self):", "def output(self, output):\n self._output = output", "def process_output(self, data, output_prompt,\r\n input_lines, output, is_doctest, image_file):\r\n if is_doctest:\r\n submitted = data.strip()\r\n found = output\r\n if found is not None:\r\n found = found.strip()\r\n\r\n # XXX - fperez: in 0.11, 'output' never comes with the prompt\r\n # in it, just the actual output text. So I think all this code\r\n # can be nuked...\r\n\r\n # the above comment does not appear to be accurate... (minrk)\r\n\r\n ind = found.find(output_prompt)\r\n if ind<0:\r\n e='output prompt=\"%s\" does not match out line=%s' % \\\r\n (output_prompt, found)\r\n raise RuntimeError(e)\r\n found = found[len(output_prompt):].strip()\r\n\r\n if found!=submitted:\r\n e = ('doctest failure for input_lines=\"%s\" with '\r\n 'found_output=\"%s\" and submitted output=\"%s\"' %\r\n (input_lines, found, submitted) )\r\n raise RuntimeError(e)\r\n #print 'doctest PASSED for input_lines=\"%s\" with found_output=\"%s\" and submitted output=\"%s\"'%(input_lines, found, submitted)\r", "def input(self):\r\n pass", "def run_suite():\n print(\"*Input*\")\n input_str = get_input()\n stripped = strip(input_str)\n\n print(\"*Transform*\")\n operation, transformed = transform(stripped)\n\n print(\"*Output*\")\n output(operation, transformed)", "def exec_before_job( trans, inp_data, out_data, param_dict, tool=None):\n data_name = param_dict.get( 'name', 'Biomart query' )\n data_type = param_dict.get( 'type', 'text' )\n \n name, data = out_data.items()[0]\n data = datatypes.change_datatype(data, data_type)\n data.name = data_name\n out_data[name] = data", "def __ask_query(self):\n self.__output = list()\n return input(form('What do you want to search?\\n> '))", "def evaluate_output(self, output: int) -> Callable[[str], bool]:\n raise NotImplementedError", "def process_output(self, data):\n\n if self.interactive_result_stdout_writing:\n self.brief_logger.debug(data)\n if self.verbose_logger:\n self.verbose_logger.info(data)\n\n # f.write(data)\n # show results instantly in log file\n # f.flush()\n\n return data\n\n # TODO: #68: compile re for better performance\n # TODO: RENAME", "def output(self, output):\n\n self._output = output", "def output(self, output):\n\n self._output = output", "def output(self, output):\n\n self._output = output", "def process(self, do_print=True):\n\n target = self.result()\n # Run own action here\n\n self.reset()\n pass", "def get_output(self, X):\n pass", "def set_ouput(self, q_ouput):\n self.n_outputs = len(q_ouput) # It is also the quantity of required neurons \n self._output = q_ouput\n try: \n self._q_neuron.add_register(self._output)\n except exceptions.QiskitError:\n pass", "def _qprocess(self):\n while 1:\n t, args, kw = self.inq.get()\n ret = self.__call__(*args, **kw)\n self.outq.put((t, ret))", "def modify_input(self, raw_input_par):\r\n raise NotImplementedError", "def modify_input(self, raw_input_par):\r\n raise NotImplementedError", "def input_wrapper(msg):\n userinput = input(msg)\n if userinput != 'q':\n return userinput\n else:\n sys.exit()", "def input(self, data, output = None):\n\t\tif (self.isLoaded()):\n\t\t\tif output != None:\n\t\t\t\tself.loader.input(data, output)\n\t\t\telse:\n\t\t\t\tself.loader.input(data)", "def upd_output(self):\n try:\n while True:\n s = self.qout.get(block=False)\n if s == \"SIG_ICONIFY\":\n self.iconify()\n elif s == 'SIG_DEICONIFY':\n self.deiconify()\n else:\n self.present(s)\n except Empty:\n pass\n finally:\n self.master.after(10, self.upd_output)", "def output(d):\n try:\n current_worker().output(d)\n except AttributeError:\n pass", "def _output_update(self):\n self._outputtype = self.inputs.outputtype", "def run(self, input):\n return {}", "def trigger_output(self):\n\n EmptyPromise(self.q(css='div#ready').is_present, \"Click ready\").fulfill()\n self.q(css='div#fixture button').first.click()\n EmptyPromise(self.q(css='div#output').is_present, \"Output available\").fulfill()", "def process_input_in_python(inputf, outputf):\n value=-1\n with open(inputf, 'r') as f:\n value = int(f.readline().strip())\n print(outputf)\n time.sleep(5)\n with open(outputf, 'w') as f:\n f.write(str(value**2))", "def exec_(self, input, baseURL=None):\n # if baseURL is None:\n # baseURL = ''\n # else:\n # baseURL = baseURL.href\n raise NotImplementedError", "def handle_output(self, workunit, label, s):\r\n if not self.is_under_main_root(workunit):\r\n return\r\n\r\n if self._show_output_indented(workunit):\r\n self.emit(self._prefix(workunit, s))\r\n elif self._show_output_unindented(workunit):\r\n self.emit(s)\r\n self.flush()", "def _get_output(self):\n return self.__output", "def _get_output(self):\n return self.__output", "def _get_output(self):\n return self.__output", "def _get_output(self):\n return self.__output", "def _get_output(self):\n return self.__output", "def _get_output(self):\n return self.__output", "def execute(self, input_val):\n\n # The next state will be the result of the on_event function.\n if self.is_in_terminal:\n return None\n self._current_state, ret_val = self._current_state.execute(input_val)\n if ret_val is None:\n return None\n # if ret_val is size 1, directly return the value out\n return ret_val[0] if len(ret_val) == 1 else ret_val", "def input(self, input):\n\n self._input = input", "def test_normal_goes_normal(self):\n eq_(self.msg, output(self.msg,\"OUTPUT\"))", "def process_input(self, inp: inputs.Input) -> None:\n self.task_registry.create_task(self.async_process_input(inp))", "def act_on_input(self, input):\n if not input:\n return\n self.parse_input(input)\n commands = self.extract_commands()\n self.execute_commands(commands)", "def return_output(self):\n return self.output", "def out(self, out):\n\n self._out = out", "def out(value):\n output.append(value)\n return False", "def input_(self, op):\n value = input(\"Enter your input: \")\n self.set_value(op.address, value, op.type_, op.is_global)", "def process(self, input, is_processed=False):\n raise NotImplementedError", "def emit(data):", "def run(self, quiet=False):\n self.write_input()\n util.execute(self._bins[0], quiet)", "def output(self, _in, out, **kwds):\n out.write(_in.read())", "def __call__(self, result=None):\n self.check_for_underscore()\n if result is not None and not self.quiet():\n self.start_displayhook()\n self.write_output_prompt()\n result, result_repr = self.compute_result_repr(result)\n self.write_result_repr(result_repr)\n self.update_user_ns(result)\n self.log_output(result)\n self.finish_displayhook()", "def write_output(self):", "def calculate_output(self, input_par):\r\n raise NotImplementedError", "def inp(text):\r\n input(text)", "def calculate_output(self, input_par):\r\n\r\n raise NotImplementedError", "def flush_output():\n if len(buffered) == 1:\n code.add_line(\"append_result(%s)\" % buffered[0])\n elif len(buffered) > 1:\n code.add_line(\"extend_result([%s])\" % \", \".join(buffered))\n del buffered[:]", "def _queue_output(arguments, pidq, outputq):\n kwargs = arguments[\"process\"]\n input_data = arguments[\"input\"].encode(\"utf-8\") if arguments[\"input\"] else None\n\n try:\n proc = Popen(**kwargs)\n except OSError as e:\n # pid None is read by the main thread as a crash of the process\n pidq.put(None)\n\n outputq.put((\n \"\",\n (\"Unexpected exception caught during execution of taskw: '{0}' . \"\n \"If you are running out-of-tree tests set TASK_USE_PATH=1 \"\n \"in shell env before execution and add the \"\n \"location of the task(d) binary to the PATH\".format(e)),\n 255)) # false exitcode\n\n return\n\n # Put the PID in the queue for main process to know.\n pidq.put(proc.pid)\n\n # Send input and wait for finish\n out, err = proc.communicate(input_data)\n\n if sys.version_info > (3,):\n out, err = out.decode('utf-8'), err.decode('utf-8')\n\n # Give the output back to the caller\n outputq.put((out, err, proc.returncode))", "def handleInput(self, paramInput):\n MCMC.handleInput(self, paramInput)", "def prog_input(self, put_idx: int) -> None:\n self.write(int(self.stdin.pop()), put_idx)", "def get_output(self):\r\n x = self.query('OUTP?')\r\n if x == None: return None\r\n return int(x)", "def step(self,inp): ## function responsible for exciting the machine with a SINGLE INPUT VALUE\n (s, o) = self.getNextValues(self.state,inp)\n # will store the state and return the output\n self.state =s\n return o", "def user_input(self, op):\n params = 1\n a = self.read_memory(op, 0, params)\n \n input_value = self.input_value()\n if input_value is None:\n return {\"ptr\": self.ptr, \"yield\": True}\n else:\n self.write_memory(a, int(input_value))\n return self.ptr + params + 1", "def text_input():\n return input(\">>>\")", "def get_data(self, query):\n result = input(\"{}: \".format(query))\n return result", "def process_input(self, word):\n return", "def input(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"input\")", "def value_to_output(value, output):\n if value != \"\":\n output.append(value)\n return \"\"", "def execute_request(self, request: Request):\r\n print(\"Handler is validating output\")\r\n if request.output is not None:\r\n if not self.next_handler:\r\n return True\r\n return self.next_handler.execute_request(request)\r\n else:\r\n print(\"Output is not validated\")\r\n return False", "def final_result(self, hyp, uttid):\n\t\t# All this stuff appears as one single action\n\t\tprint \"Final Result:\", hyp\n\t\tself.run_saera(None, \"speech-event\", hyp)", "def output(self, msg):", "def process_output(self, text): # pylint: disable=no-self-use\n changed = False\n return changed, text", "def qoutput(self):\n jobid = self.jobid()\n ou = os.path.join(self.directory, jobid + '.OU')\n if not self.in_queue() and os.path.exists(ou):\n with open(ou) as f:\n return f.read()\n else:\n return \"In queue or no output found.\"", "def run(self):\n self.assign_inputs()\n self.execute()\n self.collect_outputs()", "def _queue_output(arguments, pidq, outputq):\n kwargs = arguments[\"process\"]\n input = arguments[\"input\"]\n\n try:\n proc = Popen(**kwargs)\n except OSError as e:\n # pid None is read by the main thread as a crash of the process\n pidq.put(None)\n\n outputq.put((\n \"\",\n (\"Unexpected exception caught during execution: '{0}' . \".format(e)),\n 255)) # false exitcode\n\n return\n\n # Put the PID in the queue for main process to know.\n pidq.put(proc.pid)\n\n # Send input and wait for finish\n out, err = proc.communicate(input)\n\n out, err = out.decode('utf-8'), err.decode('utf-8')\n\n # Give the output back to the caller\n outputq.put((out, err, proc.returncode))", "def process_output(self, state: str, data: SimData, tb_manager: TestbenchManager\n ) -> Tuple[bool, str, Dict[str, Any]]:\n return False, '', {}", "def standard_output(self) -> global___Statement.StandardOutput:", "async def async_process_input(self, inp: inputs.Input) -> None:\n if isinstance(inp, inputs.ModNameComment):\n command = inp.command\n block_id = inp.block_id\n text = inp.text\n\n if command == \"O\":\n self._oem_text[block_id] = f\"{text:12s}\"\n await self.cancel(block_id)\n if None not in self._oem_text:\n self.oem_text_known.set()\n await self.cancel()", "def run(self) -> Any:\n self.prepare()\n for step in self.stream:\n self.output = step\n return self.output", "def test_raw_input_ex(input_output):\n with mock.patch.object(builtins, 'input', lambda _: input_output):\n assert GC.raw_input_ex() == input_output", "def oppdater(self, input):\n return", "def result(self, result):\n self.stdout.write('RESULT {0}\\n{1}'.format(len(result), result))\n self.stdout.flush()", "def calculate_output(self, input_par):\r\n\r\n return self.meta_model.calculate_output(input_par)", "def op_convert_after_hook(\n self,\n op: Callable,\n output,\n global_op_idx: List[int],\n ) -> Any:\n # TODO(future PR): improve performance by moving this out of the\n # path of non-reference ops\n seen_q_op_info = self._get_cur_seen_q_op_info()\n\n if seen_q_op_info.is_reference_op_at_inference:\n # given the current reference module design,\n # we need to quantize to the target dtype\n output_tensor_info = seen_q_op_info.output_tensor_infos[0]\n tensor_id, inf_dtype = \\\n output_tensor_info.id, output_tensor_info.inf_dtype\n scale, zp = self.tensor_id_to_scale_zp[tensor_id]\n output = torch.quantize_per_tensor(\n output, scale, zp, inf_dtype)\n\n if self.log_op_outputs:\n output_clone = clone_detach_tensor_without_dispatch(output)\n seen_q_op_info = self._get_cur_seen_q_op_info()\n self.op_outputs[-1].append(\n (global_op_idx[0], seen_q_op_info.fqn, seen_q_op_info.type, output_clone))\n global_op_idx[0] += 1\n\n return output" ]
[ "0.62992793", "0.621587", "0.6168", "0.60797054", "0.60797054", "0.60701555", "0.6039619", "0.6010388", "0.6008327", "0.5957112", "0.59085846", "0.590205", "0.5835123", "0.5788597", "0.57883525", "0.5769604", "0.57273495", "0.57041085", "0.56851864", "0.56530696", "0.56336266", "0.56118286", "0.5603242", "0.55901206", "0.55854505", "0.5580383", "0.5524359", "0.55099106", "0.55030966", "0.5497854", "0.54971707", "0.54971707", "0.54971707", "0.54665434", "0.5462233", "0.5457121", "0.5455307", "0.54409325", "0.54409325", "0.5426973", "0.54229873", "0.54169035", "0.5415074", "0.54108495", "0.5405988", "0.5389606", "0.53873867", "0.53820974", "0.53733146", "0.5361127", "0.5361127", "0.5361127", "0.5361127", "0.5361127", "0.5361127", "0.53545755", "0.5346319", "0.53444886", "0.5338127", "0.5337606", "0.533283", "0.53283566", "0.5327446", "0.53192705", "0.53125304", "0.53111356", "0.5304029", "0.5303537", "0.52980787", "0.5293211", "0.52842486", "0.52826536", "0.52799314", "0.5267574", "0.5266131", "0.5266077", "0.5259362", "0.5258674", "0.5255394", "0.524335", "0.52360076", "0.52358097", "0.52312434", "0.5227684", "0.5223603", "0.52194494", "0.5215812", "0.5215209", "0.5210309", "0.5198512", "0.51980937", "0.51943743", "0.5193781", "0.51936424", "0.5189133", "0.51887363", "0.5184267", "0.5183563", "0.5180721", "0.51751", "0.5174947" ]
0.0
-1
Proces elements from the input queue until empty.
def run(self) -> None: while True: try: input_element = self.input_queue.get_nowait() self.process(input_element) except Empty: return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_queue_fast(self):\n while self.queue:\n self.queue.popleft()()", "def _wait_empty(self):\n while True:\n if self.queue.empty():\n # We still have to wait for the last queue item being processed\n # (queue.empty() returns True before queue.task_done() is\n # called).\n self.queue.join()\n return\n time.sleep(1)", "def get_all_from_queue(Q):\n try:\n while True:\n yield Q.get_nowait()\n except queue.Empty:\n raise StopIteration", "def get_all_from_queue(Q):\n try:\n while True:\n yield Q.get_nowait()\n except Queue.Empty:\n raise StopIteration", "def drainQueue(q):\n buf = []\n while True:\n # Get as much as possible without blocking\n try:\n while True:\n item = q.get_nowait()\n if item is None:\n return buf\n else:\n buf.append(item)\n except Queue.Empty:\n pass\n\n if buf:\n return buf\n\n # Nothing in the queue. Block for\n # one item, then go back and get any\n # that we can without blocking.\n item = q.get()\n if item is None:\n return buf\n else:\n buf.append(item)", "def AdvanceQueue(self):\r\n self.data.pop(0)\r\n return", "def get_all_nowait(queue: Queue) -> list:\n\n results = []\n\n while True:\n try:\n result = queue.get_nowait()\n results.append(result)\n except Empty:\n break\n\n return results", "def drain(queue):\n while not queue.is_empty():\n queue.remove()", "def _qprocess(self):\n while 1:\n t, args, kw = self.inq.get()\n ret = self.__call__(*args, **kw)\n self.outq.put((t, ret))", "def worker(self):\n while True:\n item,index = self.inbound.get()\n if index is None:\n self.buffer.append(item)\n self.index.value = self.index.value + 1 #index of next item for buffer\n if len(self.buffer)>self.size:\n del self.buffer[0]\n self.newitem.put(None)\n else:\n self.buffer[len(self.buffer)+(index - self.index.value)] = item", "def process_queue(self):\n while self.input_processing_running:\n\n # Process everything in the queue.\n while self.input_queue.qsize() > 0:\n try:\n _telem = self.input_queue.get_nowait()\n self.process_telemetry(_telem)\n\n except Exception as e:\n self.log_error(\"Error processing telemetry dict - %s\" % str(e))\n\n # Sleep while waiting for some new data.\n time.sleep(0.5)", "def use_queue():\n q = queue.Queue()\n for i in range(10):\n q.put_nowait(i)\n while q.qsize() > 0:\n element = q.get_nowait()\n sys.stdout.write(\"poping out from queue: {0}\\n\".format(element))", "def process_queue(self, queue):\n\n while queue:\n deferred, data = queue.popleft()\n deferred.callback(data)", "def get(self):\n while self.is_running():\n try:\n inputs = self.queue.get(block=True, timeout=5).get()\n if self.is_running():\n self.queue.task_done()\n if inputs is not None:\n yield inputs\n except queue.Empty:\n pass\n except Exception as e: # pylint: disable=broad-except\n self.stop()\n raise e", "def process_queue_slowly(self):\n start = time.process_time()\n while self.queue and time.process_time() - start < 1.0 / TICKS_PER_SECOND:\n self.queue.popleft()()", "def getAllFromQueue(self, Q):\n try:\n while True:\n yield Q.get_nowait()\n except Queue.Empty:\n raise StopIteration", "def processIncoming(self):\n while self.queue.qsize():\n try:\n # print 'queue'\n msg = self.queue.get(0)\n # Check contents of message and do what it says\n # As a test, we simply print it\n if msg == \"exit\":\n self.deviceError()\n if msg == \"error\":\n self.deviceError()\n else:\n self.decode(msg)\n except Queue.Empty:\n pass", "def my_consumer(q):\n while True:\n data = q.get()\n print('data found to be processed: {}'.format(data))\n processed = data * 2\n print(processed)\n\n if data is sentinel:\n break", "def process_queue(self):\n while not self.msg_queue.empty():\n addr, msg = self.msg_queue.get()\n if msg:\n print(msg)\n self.broadcast(addr, msg)\n else:\n self.clean(addr)", "def queue_loader(self, queue):\n for item in self.iterator():\n try:\n converted_item = self.converter(item)\n valid_item = self.validator(converted_item)\n except Exception as e:\n print(type(e), e)\n continue\n queue.put(valid_item)\n while queue.qsize() > 100:\n sleep(0.2)", "def poll(self):\n try:\n while True:\n self.handle(self.queue.get(block=False))\n except queue.Empty:\n pass\n\n [self.handle(x) for x in pygame.event.get()]", "def checkQueue( self ):\n if self.queue:\n yield self.writeToSerial( self.queue.pop( 0 ) )\n else:\n self.free = True", "def consume(iterator):\n deque(iterator, maxlen=0)", "def clear_queue(self):\n while not self.queue.empty():\n self.queue.get()", "def run(self) -> None:\n while self.data_incoming or len(self._queue):\n if not self._queue:\n logging.info(\"Consumer %d is sleeping since queue is empty\", self._name)\n time.sleep(0.75)\n print(self._queue.get())\n time.sleep(0.5)", "def feed (self, inputs):\n numinputs = len(inputs)\n outputs = [None for _ in xrange(numinputs)]\n for index, item in enumerate(inputs):\n self.queues[0].put((index, item))\n\n # get results\n finished = set()\n while len(finished) < len(outputs):\n index, result = self.queues[-1].get() # blocked\n outputs[index] = result\n finished.add(index)\n return outputs", "def _process_incoming_queue_messages(self):\n while self._queue.qsize():\n msg = self._queue.get()\n if msg == MAP_UPDATE:\n self._clear_measurement_progress_label()\n self._presenter.update_map(self.chosen_value.get())", "def dequeue(self):", "def _get_nowait(self):\n # Fulfills a waiting producer, returning its value, or raising Empty if\n # no fulfillable producers are waiting.\n def fulfill_waiting_producer():\n while True:\n if self._waiting_producers:\n produce_wish = self._waiting_producers.pop(0)\n with produce_wish.group.lock:\n if not produce_wish.group.fulfilled:\n return produce_wish.fulfill()\n else:\n raise Empty()\n\n if self._buf is not None and not self._buf.empty:\n value = self._buf.pop()\n try:\n # Cycles a producer's value onto the buffer\n produced = fulfill_waiting_producer()\n self._buf.push(produced)\n except Empty:\n pass\n return value\n else:\n return fulfill_waiting_producer()", "def schdule(self):\n while self.queue:\n if self.processing >= self.maxProcessing:\n # We have reached the maximum number of parallel\n # tasks.\n break\n\n item, completeDeferred = self.queue.pop(0)\n\n self.processing += 1 \n self.start(item).addBoth(self.done).chainDeferred(completeDeferred)", "def populatereadyqueue():\n readyQueue.put(Process(\"P1\", time(0, 0, 1), time(0, 0, 4)))\n readyQueue.put(Process(\"P2\", time(0, 0, 2), time(0, 0, 6)))\n readyQueue.put(Process(\"P3\", time(0, 0, 3), time(0, 0, 2)))", "def queue_inputs(self, iterable):\n self.input_queue.extend(iterable)", "def poll(self):\n if self.isEmpty():\n raise Exception('Queue is empty')\n \n self.qSize -= 1\n self.front = self.front % len(self.data)\n d = self.data[self.front]\n self.front += 1\n return d", "def get_from_queue(self):\n while not self.receive_queue.empty():\n cmd, kwargs = bcp.decode_command_string(\n self.receive_queue.get(False))\n self._process_command(cmd, **kwargs)", "def processq(self):\n\n while True:\n command = None\n lock = Locker(str(self.qlockfile))\n if lock.lockfile():\n if self.queuefile.exists():\n line = self.queuefile.read_text()\n q = line.split(',')\n if any(q):\n command = q.pop(0)\n # remember q has now changed\n if not any(q):\n self.queuefile.unlink()\n else:\n line = \",\".join(q)\n self.queuefile.write_text(line)\n lock.unlockfile()\n\n if command:\n self.execute(command)\n else:\n break", "def _getqueue(self):\n go = self.tickqueue.get()\n for index in range(len(self.outqueues)):\n if not self.outqueues[index].empty(): return self.outqueues[index]", "def _getqueue(self):\n\n go = self.tickqueue.get()\n for index in range(len(self.outqueues)):\n if not self.outqueues[index].empty():\n return self.outqueues[index]", "def _queue_search(self, population):\n [self._queue.put(p) for p in enumerate(population)]", "def clean_queue(self):\n self._stdin_queue.put_nowait(None) # Release thread", "def run(self):\n numbers = range(5)\n global queue\n while True:\n condition.acquire()\n if len(queue) == MAX_ITEMS:\n print(\"Queue is Full, producer is in Waiting state\")\n condition.wait() # This actually releases the lock and notifies other threads waiting on it - consumer in this case\n # if queue has space\n print(\"Space in Queue, Producer is adding numbers to queue\")\n number = random.choice(numbers)\n queue.append(number)\n print(\"Produced {}\".format(number))\n condition.notify()\n condition.release()\n time.sleep(random.random())", "def dequeue(self):\n pass", "def dequeue(self):\n pass", "def queue_iter(queue: Queue) -> Generator[T, None, None]:\n while True:\n val = queue.get()\n yield val", "def drain_results_queue(self):\n while len(self._scheduled) > 0:\n self.process_results()", "def checkQueue( self ):\n if self.queue:\n print 'clearing queue...(%d items)' % len( self.queue )\n yield self.writeToSerial( *self.queue.pop( 0 ) )\n else:\n print 'queue free for writing'\n self.free = True", "def maybe_enqueue(self):\n if len(self._vals) > 0:\n self.enqueued = True\n return self._queue.enqueue(self._vals)\n else:\n return None", "def _pull_batch_from_queue(self):\n rollout = self.explorer.queue.get( timeout = 600.0 )\n while not rollout.terminal:\n try: \n rollout.extend( self.explorer.queue.get_nowait() )\n except queue.Empty:\n break\n print(rollout.size())\n return rollout", "def _drain_queue(self):\n while self.queue:\n self._export_batch()", "def queued(values, qsize):\n values = [_normalize(v) for v in values]\n if qsize < 1:\n raise ValueError(\"qsize must be 1 or larger\")\n q = []\n it = iter(values)\n try:\n for i in range(qsize):\n q.append(next(it))\n for i in cycle(range(qsize)):\n yield q[i]\n q[i] = next(it)\n except StopIteration:\n pass", "def __iter__(self):\n return iter(self.queue)", "def dequeue(self):\r\n if self.size():\r\n self.queue.pop(0)\r\n else:\r\n raise IndexError(\"Queue is empty.\")", "def empty(self):\r\n return self.point_to_head.chi == None\r\n\r\n\r\n\r\n # Your MyQueue object will be instantiated and called as such:\r\n # obj = MyQueue()\r\n # obj.push(x)\r\n # param_2 = obj.pop()\r\n # param_3 = obj.peek()\r\n # param_4 = obj.empty()", "def empty(self):\r\n return self.queue == []\r\n\r\n\r\n\r\n # Your MyQueue object will be instantiated and called as such:\r\n # obj = MyQueue()\r\n # obj.push(x)\r\n # param_2 = obj.pop()\r\n # param_3 = obj.peek()\r\n # param_4 = obj.empty()\r", "def _wait_queue(self):\n while True:\n time.sleep(0.1)\n if self.queue.unfinished_tasks == 0 or self.stop_signal.is_set():\n return", "def _put_nowait(self, value):\n while True:\n if self._waiting_consumers:\n consume_wish = self._waiting_consumers.pop(0)\n with consume_wish.group.lock:\n if not consume_wish.group.fulfilled:\n consume_wish.fulfill(value)\n return\n elif self._buf is not None and not self._buf.full:\n self._buf.push(value)\n return\n else:\n raise Full()", "def monitor_queue(self):\n\n while True:\n job = self.queue.next()\n if job:\n # print(\"found %s\" % (job.job_id))\n\n job_name = job.payload[\"job_name\"]\n\n if job_name in self.mul_func_map:\n\n t = self.mul_func_map[job_name]\n p = multiprocessing.Process(target=t, args=(job,))\n p.daemon = True\n p.start()\n\n elif job_name in self.th_func_map:\n\n t = self.th_func_map[job_name]\n # create a thread to process the job\n p = threading.Thread(target=t, args=(job,))\n p.daemon = True\n # start the thread, going into the worker function\n p.start()\n\n elif job_name in self.fk_func_map:\n t = self.fk_func_map[job_name]\n if not os.fork():\n os.setsid()\n t(job)\n exit()\n else:\n # jobs in this queue that are unknown are presently being skipped\n # however they could probably get moved to a 'dead letter' queue\n # for closer examination\n print(\"unknown job name %s, skipping\" % (job_name))\n\n # throttle so that other worker subscribers get a chance\n time.sleep(self.queue_delay)\n else:\n time.sleep(self.poll_delay)\n\n # prints the number of threads\n # print len(threading.enumerate())", "def drain_call_queue(self):\n if len(self.call_queue) == 0:\n return\n self.apply(lambda x: x)", "def _consumer(self) -> None:\n while (data := self._q.get()) is not None:\n write_data(data, self.writer)\n self._q.task_done()\n else:\n logging.info(\"None received. Queue consumed.\")\n self._q.task_done()\n return", "def dequeue_loop():\n while True:\n result = dequeue_function()\n if not result:\n break\n print(result)", "def syncdequeue(self):\n #FIXME: Handle exceptions caused when some queue in the list might be empty\n temp=[]\n for itr, contextqueue in enumerate(self.queues):\n try:\n temp.append(self.queues[itr].get())\n except:\n Queue.Empty\n return temp", "def clearQueueAll():", "def empty_queue():\n return Queue()", "def empty_queue():\n return Queue()", "def _process_whisper_queue(self, whisper_queue):\n while True:\n if len(whisper_queue) > 0:\n whisper_tuple = (whisper_queue.pop())\n self.ts.send_whisper(whisper_tuple[0], whisper_tuple[1])\n time.sleep(.5)", "def consume(q):\n while not q.empty():\n name = threading.currentThread().getName()\n LOG.debug(f\"Thread: {name} getting host from queue[current size = {q.qsize()}] {time.strftime('%H:%M:%S')}\")\n host = q.get()\n LOG.debug(f\"Pinging host: {host}\")\n res = os.system(f'ping -c 1 {host} >/dev/null 2>&1')\n if res == 0:\n LOG.info(f\"Host {host} is alive.\")\n LOG.debug(f\"Thread: {name} finished queue[current size = {q.qsize()}] {time.strftime('%H:%M:%S')}\")\n q.task_done()", "def queue_peek(queue_instance, timeout=60):\r\n while True:\r\n try:\r\n yield queue_instance.get(timeout=timeout)\r\n except Empty:\r\n break", "def _input_callback(self, _, blocks: numpy.array) -> None:\n try:\n self._output_queue.put_nowait(blocks)\n except queue.Full:\n self._output_queue.get_nowait()\n self._output_queue.put_nowait(blocks)", "def next(self):\n while True: # waiting\n item = self.get_next_if_any()\n if item is not None: # feature: value None is filtered out\n return item\n\n if self.nomore: # if nothing else is coming\n break # stop waiting\n\n time.sleep(0.1) # wait before checking again\n\n raise StopIteration() # tell next worker nothing else is coming", "def pop_sm(self):\r\n while True:\r\n # wait to receive a read request\r\n req = yield self.r_in_pipe.get()\r\n # model read latency\r\n #for i in range(self.read_latency):\r\n yield self.wait_sys_clks(self.read_latency)\r\n # try to read head element\r\n if len(self.items) > 0:\r\n data = self.items[0]\r\n self.items = self.items[1:]\r\n else:\r\n print >> sys.stderr, \"ERROR: FIFO pop_sm: attempted to read from empty FIFO\"\r\n data = None\r\n # write data back\r\n self.r_out_pipe.put(data)", "def test_the_queue_dequeue_multi_values_phase_one(the_queue):\n the_queue.enqueue(2)\n the_queue.enqueue(3)\n the_queue.enqueue(4)\n the_queue.enqueue(5)\n the_queue.dequeue()\n assert the_queue._new_dll.tail.data == 3", "def threadWorker(self):\n while True:\n row = self.queue.get() #get a row of data\n if row is None: #ending criterium\n break\n self.similarityQuestions(row) #the actual working function\n self.queue.task_done() #inform the queue one task is done", "def empty_queue(queue):\n return queue.front is None", "def yield_img(img_queue):\n global acq_running\n \n while acq_running:\n time.sleep(sleep_time)\n # get elements from queue while there is more than one element\n # playing it safe: I'm always leaving one element in the queue\n while img_queue.qsize() > 1:\n #print(\"reading from queue \", img_queue.qsize())\n yield img_queue.get(block = False)\n\n # read out last remaining elements after end of acquisition\n while img_queue.qsize() > 0:\n yield img_queue.get(block = False)\n print(\"acquisition done\")", "def wait_until_empty(self):\n while not self.is_empty():\n self.sleep(10)", "def __init__(self,size=10):\n \n self.inbound = Queue() #an internal queue to manage the class properly in a thread safe manner.\n self.index = Value('i',0) #index of next item to be added.\n self.manager = Manager()\n \n self.buffer = self.manager.list() #the buffer we will store things in.\n self.size = size #the maximum size of the buffer\n self.newitem = Queue() #a blocking event to control the pop method\n t = threading.Thread(target=self.worker) #the worker that will run when items are added.\n t.start() #start the worker\n self.newitemindex = 0 #index of items to pop", "def processIncoming(self):\r\n while self.queue.qsize():\r\n try:\r\n volume_T101 = self.queue.get(0)\r\n self.var_pb_progress.set(volume_T101/100) #scale to 100\r\n self.var_T101.set(\"T101: \" + str(round(volume_T101,4)))\r\n self.var_LIT101.set(self.take_reading(volume_T101))\r\n self.update_physical(volume_T101)\r\n self.PLC_command()\r\n self.check_attack(volume_T101)\r\n self.output_results()\r\n self.master.update_idletasks()\r\n except queue.Empty:\r\n pass", "def pop_sm(self):\r\n while True:\r\n # wait to receive a read request\r\n req = yield self.r_in_pipe.get()\r\n # model read latency\r\n # for i in range(self.read_latency):\r\n yield self.wait_sys_clks(self.read_latency)\r\n # try to read head element\r\n if len(self.items) > 0:\r\n data = self.items[0]\r\n self.items = self.items[1:]\r\n else:\r\n print >> sys.stderr, \"ERROR: PIFO pop_sm: attempted to read from empty PIFO\"\r\n data = None\r\n # write data back\r\n self.r_out_pipe.put(data)", "def pop_all(self):\n with self.lock:\n output = list(self.queue)\n self.queue.clear()\n\n return output", "def __iter__(self):\n\n collector = FIFOArray(self.chunksize, self.axis)\n for arr, maskarr in zip(self.data, self.mask):\n\n if not np.any(maskarr):\n continue\n\n filtered = np.take(arr, np.flatnonzero(maskarr), axis=self.axis)\n collector.put(filtered)\n\n while collector.full():\n\n yield collector.get()\n\n # else runs after normal loop exit -- required here\n else: #pylint: disable=useless-else-on-loop\n\n if collector.qsize() > 0:\n\n yield collector.get()", "def queue_input(self, value):\n self.input_queue.append(value)", "def _producer(self) -> None:\n while (gtex_path := self.gtex.pop(0)) is not None and (\n bm_path := self.bm.pop(0)\n ) is not None:\n data = merge_data(gtex_path, bm_path, self.mane)\n self._q.put(data)\n logger.info(f\"Contents of file {gtex_path} added to queue\")\n else:\n self._q.put(None) # Send end signal to consumer\n logger.info(\"All files added. None signal sent. Producer returns\")\n return", "def processIncoming(self):\n while (self.queue.qsize()):\n try:\n message = self.queue.get_nowait()\n \n self.terminal.insert(END,message)\n\n # Autoscroll the terminal if set\n if (self.autoscroll_value.get()):\n self.terminal.yview(END)\n\n except Queue.Empty:\n pass", "def queue_processor(self):\n\n while self.state != consts.SMPP_CLIENT_STATE_CLOSED:\n try:\n p = self.queue.get(timeout=1)\n self._request_handler(p)\n self.queue.task_done()\n except Empty:\n pass", "def popMsg(self):\n\n if not self.queue:\n return []\n returned_msgs = []\n for msg, delay in self.queue:\n delay -= 1\n if delay < 1:\n returned_msgs.append(msg)\n else:\n self.pushMsg(msg, delay)\n self.queue = []\n return returned_msgs", "def testPushPopItem(self):\n test_queue = multi_process.MultiProcessingQueue()\n\n for item in self._ITEMS:\n test_queue.PushItem(item)\n\n test_queue.SignalEndOfInput()\n test_queue_consumer = test_lib.TestQueueConsumer(test_queue)\n test_queue_consumer.ConsumeItems()\n\n self.assertEqual(test_queue_consumer.number_of_items, len(self._ITEMS))", "def dequeue(self):\n if self.is_empty():\n raise Exception(\"Queue is empty !!! Please add data to the Queue :) \")\n else:\n return self.data.pop(0)", "def get_nowait(self):\r\n if self.empty():\r\n raise QueueEmpty\r\n item = self._get()\r\n self._wakeup_next(self._putters)\r\n return item", "def worker(self):\n while True: # Feed forever. Enqueue will block when queue is full.\n while len(self.memory) < self.min_memory:\n time.sleep(1)\n batch = self.memory.sample(self.batchsize)\n states, actions, rewards, terminals = zip(*batch)\n self.session.run(self.enqueue_op, {\n self.states: states, self.actions: actions,\n self.rewards: rewards, self.terminals: terminals,\n })", "def empty(self):\r\n return self.queue == []", "def check_ack_queue(self):\r\n try:\r\n while True:\r\n ack = self.ack_queue.get_nowait()\r\n self.handle_ack(ack)\r\n except queue.Empty:\r\n pass", "def __init__(self): \n self.queue = []", "def collect_data(self):\n self.logger.info(\"Waiting for incoming data ...\")\n while True:\n item = self.in_queue.get()\n self.logger.info(\"Received data!\")\n self.collector_process_data(item)", "def __iter__(self):\n if not self.is_alive() and not self.finished.is_set():\n self.start()\n # if there is an item in the queue, yield it, otherwise wait\n while not self.finished.is_set():\n try:\n yield self.queue.get(True, 0.25)\n except queue.Empty:\n pass\n except KeyboardInterrupt:\n self.stop()", "def take_all(self) -> List[T]:\n self.prefetch_all()\n return self.take(len(self._queue))", "def get_queue(queue_limits):\n\n queues, limits = queue_limits.items()\n queues.pop('')\n\n while(True): \n \n queued_jobs = qstat_plain()\n jobs = {queue : [j for j in queued_jobs if j.queue == queue] for queue in queues} \n jobs[''] = [j for j in queued_jobs if j.queue not in queues]\n\n for queue in queues:\n if len(jobs[queue]) < queue_limits[queue]:\n yield queue\n else:\n time.sleep(30)", "def next(self):\n try:\n return self.queue.get()\n except Empty:\n raise StopIteration", "def reader(handle, input_queue):\n input_queue.put(handle.read())", "def __post_init__(self) -> None:\n self.gtex += [None]\n self.bm += [None]\n self._q: queue.Queue = queue.Queue(maxsize=self.maxsize)", "def __init__(self):\r\n self.queue = []", "def __init__(self):\r\n self.queue = []" ]
[ "0.7123321", "0.6928471", "0.6863272", "0.68230313", "0.681069", "0.67634106", "0.6683323", "0.6645927", "0.66053593", "0.66042024", "0.65760016", "0.65450245", "0.65428", "0.65332437", "0.64878625", "0.6477527", "0.6431174", "0.63786554", "0.6355708", "0.6354153", "0.63382304", "0.6287964", "0.6271525", "0.6248273", "0.6200562", "0.6196528", "0.6194125", "0.61878234", "0.61864585", "0.61863744", "0.6165464", "0.6134669", "0.6131481", "0.6130909", "0.6119406", "0.6106125", "0.6098514", "0.60982955", "0.6094255", "0.6090943", "0.60792476", "0.60792476", "0.60683894", "0.6064192", "0.6061759", "0.6011663", "0.6010523", "0.5999879", "0.599727", "0.59932256", "0.59733725", "0.5946322", "0.5938303", "0.59373915", "0.59324396", "0.5924675", "0.59098494", "0.5905754", "0.5902315", "0.5884798", "0.5860477", "0.5854626", "0.5854626", "0.584914", "0.58442044", "0.5835664", "0.5835471", "0.5834734", "0.5833807", "0.58292246", "0.58276665", "0.58271444", "0.5819444", "0.58034855", "0.5801618", "0.57926536", "0.5763053", "0.57630163", "0.5760861", "0.57605326", "0.5757267", "0.5749315", "0.57483965", "0.5745953", "0.5741173", "0.5734558", "0.573054", "0.57250774", "0.57097185", "0.57077175", "0.5701947", "0.56991446", "0.5693553", "0.5687959", "0.5686874", "0.5684352", "0.5681902", "0.568161", "0.5678879", "0.5678879" ]
0.71853554
0
Download a single file from the web, singlethreaded.
def single_file_download(url: str, encoding: str = "utf-8") -> str: recipient = BytesIO() # the stream we will write into # print("Opening %r . . ." % url) curl = pycurl.Curl() curl.setopt(curl.URL, url) curl.setopt(curl.WRITEDATA, recipient) curl.perform() curl.close() # print("Closed %r." % url) return recipient.getvalue().decode(encoding)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _download_single(url, to, id):\n if os.path.exists(to):\n error_flags[id] = 1\n return\n\n try:\n request = rq.Request(url=url, headers=forge_agent_header)\n info = rq.urlopen(request).read()\n\n except urllib.error.URLError as e:\n print(url, 'urllib error')\n error_flags[id] = 2\n return\n\n except Exception as e:\n print(url, e)\n error_flags[id] = 2\n return\n\n with open(to, \"wb\") as file:\n print(url, 'writing')\n file.write(info)\n\n error_flags[id] = 1", "def download_file(self, url, path):\n print('\\tDownloading: ', path)\n with open(path, 'w') as outfile:\n try:\n response = self._http_client.get(url)\n outfile.write(response.text)\n finally:\n response.close()\n outfile.close()\n gc.collect()", "def web_get_file(self, url):\n try:\n print(url)\n response = requests.get(url, verify=False)\n file_buffer = BytesIO(response.content)\n file_buffer.seek(0)\n return file_buffer\n except:\n print(traceback.print_exc())\n return None", "def download_file(self, url, filename):\n r = requests.get(url, stream=True)\n r.raise_for_status()\n\n with open(filename, 'wb') as f:\n for chunk in r.iter_content():\n if chunk:\n f.write(chunk)\n f.flush()", "def __download_file(self, filename):\r\n \r\n respons = requests.get(self.__url + filename, stream=True)\r\n save_filename = os.path.join(self.__folder, os.path.basename(filename))\r\n with open(save_filename, 'wb') as output_file:\r\n for chunk in respons.iter_content(chunk_size=128):\r\n output_file.write(chunk)", "def fetch(file_url):\n\n tmp_file_handle = NamedTemporaryFile(delete=True)\n headers = {'User-Agent': 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36'}\n\n # download file and save to temp object\n with requests.get(file_url, headers=headers, stream=True) as r:\n tmp_file_handle.write(r.content)\n\n tmp_file_handle.flush()\n\n return tmp_file_handle", "def download(self, url, filename):\n print(\"url\", url)\n print(\"filename\", filename)\n # open in binary mode\n with open(filename, \"wb\") as file:\n # get request\n try:\n r = requests.get(url)\n if r.status_code == 404:\n raise NotFoundException(\n \"URL: \", url, \" is not working. Status code 404\")\n # write to file\n file.write(r.content)\n print(\"file downloaded\")\n except ConnectionError as ex:\n print(ex)\n except NotFoundException as ex:\n print(ex)\n except Exception as ex:\n print(ex)", "def get_file(url):\n helpers.make_workdir() # create temp working directory\n file_url = url + constant.MALICIOUS_LOCATION\n print(file_url)\n filename = wget.download(file_url, out=constant.WORKDIR)\n return filename", "def _download(url, file_name):\n # File length can only be approximated from the resulting GET, unfortunately\n r = requests.get(url, stream=True)\n if 'Content-Length' in r.headers:\n file_len = int(r.headers['Content-Length'])\n elif 'X-Original-Content-Length' in r.headers:\n file_len = int(r.headers['X-Original-Content-Length'])\n else:\n file_len = 0\n r.raw.decode_content = True\n with open(file_name, 'wb') as f:\n _copyfileobj(r.raw, f, chunks=(file_len / (64. * 1024)))\n r.close()\n\n return file_name", "def download(url, filename):\n response = requests.get(url, stream=True)\n with open(filename, \"wb\") as handle:\n for data in response.iter_content():\n handle.write(data)", "def download_file(filename, url):\n print(\"downloading {0}\".format(url))\n with open(filename, \"wb\") as fout:\n response = requests.get(url, stream=True, verify=False)\n response.raise_for_status()\n # Write response data to file\n iblock = 0\n for block in response.iter_content(4096):\n if iblock % 10000 == 0:\n sys.stdout.write(\".\")\n sys.stdout.flush()\n iblock += 1\n fout.write(block)", "def getfile(url):\n try:\n return urlreq.urlopen(url)\n except urlreq.HTTPError as e:\n safeprint(\"Sever returned with response code \" + str(e.getcode()) + \", download failed.\")", "def download_file_nowget(url, fn, cookiejar):\n\tprint \"Downloading %s -> %s\" % (url, fn)\n\turlfile = get_opener(cookiejar).open(url)\n\tchunk_sz = 1048576\n\tbytesread = 0\n\tf = open(fn, \"wb\")\n\n\twhile True:\n\t\tdata = urlfile.read(chunk_sz)\n\t\tif not data:\n\t\t\tprint \".\"\n\t\t\tbreak\n\n\t\tf.write(data)\n\t\tbytesread += len(data)\n\t\tprint \"\\r%d bytes read\" % bytesread,\n\t\tsys.stdout.flush()", "def main(url, localfile):\n ph.download_file(url, localfile)", "def download_file(filename, url):\n with open(filename, 'wb') as fout:\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Write response data to file\n for block in response.iter_content(4096):\n fout.write(block)", "def _download_file(file_url: str, file_path: str) -> str:\n if os.path.exists(file_path):\n return file_path\n op_desc = f\"Downloading {os.path.basename(file_path)}\"\n try:\n with requests.Session() as req_sess:\n req_res = req_sess.get(file_url, stream=True)\n total_length = int(req_res.headers.get(\"Content-Length\"))\n with tqdm.wrapattr(req_res.raw, \"read\", total=total_length, desc=op_desc) as raw:\n with open(file_path , \"wb\") as file:\n shutil.copyfileobj(raw,file)\n return file_path\n except Exception as network_error:\n if os.path.exists(file_path):\n os.remove(file_path)\n raise network_error", "def get_file(url, file_name=None):\n cache_dir = os.path.join(os.path.expanduser(\"~\"), \".jhML\")\n\n if file_name is None:\n file_name = url[url.rfind('/') + 1:]\n file_path = os.path.join(cache_dir, file_name)\n\n if not os.path.exists(cache_dir):\n os.mkdir(cache_dir)\n\n if os.path.exists(file_path):\n return file_path\n\n print(\"Downloading: \" + file_name)\n try:\n urllib.request.urlretrieve(url, file_path, show_progress)\n except (Exception, KeyboardInterrupt) as e:\n if os.path.exists(file_path):\n os.remove(file_path)\n raise\n print(\" Done\")\n\n return file_path", "def _download_epw_file(url):\n r = requests.get(url)\n if r.ok:\n # py2 and 3 compatible: binary write, encode text first\n log.debug(\" ... OK!\")\n return io.StringIO(r.text)\n else:\n log.error(\" connection error status code: %s\" % r.status_code)\n r.raise_for_status()", "def download_file(url, outputfile):\r\n try:\r\n req = requests.get(url, stream=True, timeout=120)\r\n try:\r\n with open(outputfile, 'wb') as file_download:\r\n for chunk in req.iter_content(chunk_size=1024): \r\n if chunk: \r\n file_download.write(chunk)\r\n except IOError as error:\r\n print error\r\n except requests.exceptions.RequestException as err:\r\n print err\r\n except socket.error as err:\r\n print err\r\n return None", "def __download_file(file_path, url, extension=''):\r\n auth = (DaemonServer._user['_email'], DaemonServer._user['_token'])\r\n res = requests.get(DaemonServer._base_url + url, auth=auth, stream=True)\r\n with open(file_path + extension, 'wb') as dfile:\r\n for chunk in res.iter_content(chunk_size=1024):\r\n if chunk:\r\n dfile.write(chunk)", "def download(self):\n logging.debug('start thread:%s at %s' % (self.getName(), get_current_time()))\n headers = {'Range': 'bytes=%s-%s' % (self.start_pos, self.end_pos)}\n res = requests.get(self.url, headers=headers)\n self.fd.seek(self.start_pos)\n self.fd.write(res.content)\n\n logging.debug('Stop thread:%s at%s' % (self.getName(), get_current_time()))\n self.fd.close()", "async def _download(self) -> None:\n\n # do request\n async with aiohttp.ClientSession() as session:\n async with session.get(self.url, auth=self._auth, timeout=self._timeout) as response:\n # check response\n if response.status == 200:\n # get data and return it\n self._buffer = await response.read()\n elif response.status == 401:\n log.error(\"Wrong credentials for downloading file.\")\n raise FileNotFoundError\n else:\n log.error(\"Could not download file from filecache.\")\n raise FileNotFoundError", "def download_file(url, filename):\n with requests.get(url, stream=True) as res:\n if res.status_code == 200:\n with open(filename, 'wb') as f:\n for chunk in res.iter_content(chunk_size=8192): \n f.write(chunk)\n else:\n raise ValueError(\"{} {}\".format(res.status_code, url))\n return filename", "def download_from_url(path, url):\n filename = url.split(\"/\")[-1]\n found_file = find_file(path, filename, max_depth=0)\n if found_file is None:\n filename = os.path.join(path, filename)\n logging.info(\"Downloading from %s to %s.\" % (url, filename))\n inprogress_filepath = filename + \".incomplete\"\n inprogress_filepath, _ = urllib.request.urlretrieve(\n url, inprogress_filepath, reporthook=download_report_hook)\n # Print newline to clear the carriage return from the download progress.\n print()\n tf.gfile.Rename(inprogress_filepath, filename)\n return filename\n else:\n logging.info(\"Already downloaded: %s (at %s).\" % (url, found_file))\n return found_file", "def single_download(self, url, meta_mode=False):\n self.println(DL_HEAD)\n try:\n if self.djs_core is None or self.analyzer is None:\n print(\"Download failed, enter `help` for help.\")\n else:\n if meta_mode:\n self._meta_download([url, ])\n else:\n self._download([url, ])\n os.chdir(self.home)\n except Exception as e:\n self.println(\"Download failed and stopped.\")\n print(str(e))\n self.println(DL_TAIL)", "def _Download(url):\n response = urllib2.urlopen(url)\n if response.code != 200:\n raise RuntimeError('Failed to download \"%s\".' % url)\n return response.read()", "def fetch(thread=False):\r\n if thread:\r\n Fetch.start()\r\n else:\r\n urlretrieve(OBSURL,ZFILE)", "def download(filename):\n print \"Downloading\", filename\n file_content = urlopen(\n urljoin(URL_PATH, filename)\n )\n write_data_to_file(\n file_content.read(),\n os.path.join(\n '/tmp',\n filename\n )\n )", "def download_file (url):\n\n '''\n Try and download the file given in the url,\n throw up an error if not possible.\n '''\n try:\n ret = urllib2.urlopen (url)\n except urllib2.HTTPError:\n return None\n except urllib2.URLError:\n return None\n\n print \"Downloaded \" + url\n\n return ret", "def download_file(url: str) -> str:\n\n assert len(url) > 0\n\n filename = url.split('/')[-1]\n\n with open(filename, 'wb') as output_file:\n response = requests.get(url, stream=True)\n total = response.headers.get('content-length')\n\n if total is None:\n output_file.write(response.content)\n else:\n downloaded = 0\n total = int(total)\n for data in response.iter_content(chunk_size=max(int(total / 1000), 1024 * 1024)):\n downloaded += len(data)\n output_file.write(data)\n done = int(50 * downloaded / total)\n sys.stdout.write('\\r[{}{}]'.format('█' * done, '.' * (50 - done)))\n sys.stdout.flush()\n sys.stdout.write('\\n')\n\n return filename", "def DownloadFile(url, theFile, quietMode):\n # open in binary mode\n with open(theFile, \"wb\") as file:\n if not quietMode:\n print(\"[-] - Downloading -> [{0}] ...\".format(url))\n response = requests.get(url)\n if not quietMode:\n print(\"[-] - Saving -> [{0}] ...\".format(theFile))\n file.write(response.content)", "def send_get_request(url, file_name=None):\r\n request = urllib.request.Request(url, headers={'User-Agent': AGENT})\r\n with urllib.request.urlopen(request) as response:\r\n response_context = response.read()\r\n if file_name is None:\r\n return response_context\r\n with open(file_name, 'bw+') as f:\r\n f.write(response_context)\r\n return response_context", "def download_file():\n data = c.recv(BUFFER)\n \n if data == b\"terminate\":\n print(\"DOWNLOADING FAILED !!!\")\n return\n\n file = open(FILE_NAME,\"wb\")\n while True:\n if data == b\"DONE\":\n break\n \n print(\"Receiving. . . \")\n file.write(data)\n data = c.recv(BUFFER)\n \n file.close()\n print(\"Successfully received!!!\")\n \n print(\"Webpage saved as {} at {}\".format(FILE_NAME, getcwd())) \n return None", "def download_file(url, file_name):\n conn = urllib3.PoolManager(\n cert_reqs='CERT_REQUIRED',\n ca_certs=certifi.where())\n\n with conn.request('GET', url, preload_content=False) as resp, open(file_name, 'wb') as out:\n shutil.copyfileobj(resp, out)", "def download(self, url):\n try:\n webFile = urllib.urlopen(url)\n localFile = open(self.workdir + \"/\" + url.split('/')[-1], 'w')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n except IOError:\n print(\"could not get url \" + url)", "def download (httpfile, path_unzip = None, outfile = None) :\n if path_unzip is None : path_unzip = GetPath ()\n file = _check_source (httpfile, path_unzip = path_unzip, outfile = outfile)\n return file", "def download(self, url):\n if url is None:\n return\n user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'\n headers = {'User-Agent': user_agent}\n r = requests.get(url, headers=headers, verify=False)\n if r.status_code == 200:\n r.encoding = 'utf-8'\n return r.text\n return None", "def download_file(url, fname):\n urllib.request.urlretrieve(url, fname)", "def url_retrieve(url, output_file):\n r = requests.get(url, allow_redirects=True)\n if r.status_code != 200:\n raise ConnectionError(f\"Could not download {url}\\nError code: {r.status_code}\")\n\n output_file.write_bytes(r.content)", "def download_file(url, target_path):\n\n r = requests.get(url, stream=True)\n\n with open(target_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024):\n if chunk:\n f.write(chunk)", "def download_file(url_path):\n local_filename = url_path.split('/')[-3] + \"-\" + url_path.split('/')[-1]\n local_filename = OUT_DIR + local_filename\n print local_filename\n url = \"https://commoncrawl.s3.amazonaws.com/\" + url_path\n # NOTE the stream=True parameter\n req = requests.get(url, stream=True)\n with open(local_filename, 'wb') as write_f:\n for chunk in req.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n write_f.write(chunk)\n write_f.close()\n return local_filename", "def download():\n try:\n cli.run(\n [URL, '--output', TEMP_DIR],\n )\n except SystemExit:\n return None", "def __getFile_httplib(self, _src, _dst):\n\n #-------------------- \n # Pre-download callbacks\n #-------------------- \n self.runEventCallbacks('downloadStarted', _src, -1)\n self.runEventCallbacks('downloading', _src, 0)\n\n\n\n #-------------------- \n # Download\n #-------------------- \n response = self.__httpsRequest('GET', _src)\n data = response.read() \n with open(_dst, 'wb') as f:\n f.write(data) \n\n\n\n #-------------------- \n # Post-download callbacks\n #-------------------- \n self.removeFromDownloadQueue(_src)\n self.runEventCallbacks('downloadFinished', _src)", "def start_download(self) -> NoReturn:\n if self.threaded:\n self.threaded_download()\n else:\n self.regular_download()", "def download(url, filename=None):\n\t# requirements os, shutil, urllib.parse, urllib.request\n\tif not filename:\n\t\turl_parts = urllib.parse.urlparse(url)\n\t\tfilename = os.path.basename(url_parts.path)\n\turl_h = urllib.request.urlopen(url)\n\twith open(filename, 'wb') as file_h:\n\t\tshutil.copyfileobj(url_h, file_h)\n\turl_h.close()\n\treturn", "def fetch_save(url):\n\n name = url.split(\"/\")[-1]\n response = requests.get(url, stream=True)\n if response.status_code == 200:\n with open(f\"{DATA_PATH}/{name}\", \"wb\") as f:\n f.write(response.raw.read())\n else:\n logging.info(f\"Failed {url} download\")", "def _download_file(url: str, output_path: str):\n\n def write_to_file(response: requests.Response, output_path: str) -> int:\n \"\"\"Write the response content to the given file.\n\n :param response: Response to be written to the output file.\n :param output_path: Path to the output file.\n :returns: Number of bytes read from the response content.\n \"\"\"\n read_bytes = 0\n with open(output_path, \"wb\") as output_file:\n # Use the same chunk size of `urlretrieve`\n for chunk in response.iter_content(chunk_size=1024 * 8):\n read_bytes += len(chunk)\n output_file.write(chunk)\n if read_bytes > FETCHER_MAXIMUM_FILE_SIZE:\n break\n return read_bytes\n\n try:\n with requests.get(\n url, stream=True, timeout=FETCHER_REQUEST_TIMEOUT\n ) as response:\n response.raise_for_status()\n\n content_length = int(response.headers.get(\"Content-Length\", 0))\n if content_length > FETCHER_MAXIMUM_FILE_SIZE:\n raise REANAFetcherError(\"Maximum file size exceeded\")\n\n read_bytes = write_to_file(response, output_path)\n\n if read_bytes > FETCHER_MAXIMUM_FILE_SIZE:\n os.remove(output_path)\n raise REANAFetcherError(\"Maximum file size exceeded\")\n except HTTPError as e:\n error = f\"Cannot fetch the workflow specification: {e.response.reason} ({response.status_code})\"\n if response.status_code == 404:\n error = \"Cannot find the given workflow specification\"\n raise REANAFetcherError(error)\n except Timeout:\n raise REANAFetcherError(\n \"Timed-out while fetching the workflow specification\"\n )\n except RequestException:\n raise REANAFetcherError(\n \"Something went wrong while fetching the workflow specification\"\n )", "def download_file(url, local_filename, update=False):\n if os.path.isfile(local_filename):\n if not update:\n return\n else:\n os.remove(local_filename)\n\n r = requests.get(url, stream=True)\n # http://stackoverflow.com/questions/15352668/download-and-decompress-gzipped-file-in-memory\n with open(local_filename, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n f.write(chunk)", "def download_simple(url): # url(str)\n html = urlopen(url).read().decode()\n return html", "def download(url, target):\n # Add progress bar via:\n # http://stackoverflow.com/a/22776/317916\n if not url:\n return None\n urlretrieve(url, target)\n return target", "def fetch(file):\n\tprint \"Fetching {0}...\".format(file.split(\"/\")[-1])\n\tsubprocess.call(\"wget {0} > /dev/null 2>&1\".format(file), shell=True)", "def download_file(url,file_name):\n #http://stackabuse.com/download-files-with-python/\n filedata = urllib2.urlopen(url)\n datatowrite = filedata.read()\n with open(file_name, 'wb') as f:\n f.write(datatowrite)", "def download(self, url):\n url = URL(url)\n downloader = getattr(self, 'download_%s' % url.scheme, None)\n if downloader is None:\n msg = \"We haven't implemented the '%s' protocol yet.\" % url.scheme\n raise NotImplementedError(msg)\n fp = None\n else:\n fp = downloader(url)\n return fp", "def download_file(url, local_filename):\n response = requests.get(url, stream=True)\n with open(local_filename, \"wb\") as outfile:\n for chunk in response.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n outfile.write(chunk)", "def _download_from_url(self, url):\n target_file_name = self.dir + \"/\" + url.split('/')[-1].split('?')[0]\n urllib.urlretrieve (url, target_file_name)", "def pywget(url, first_time=True):\n if not isinstance(url, str):\n print(\"Error: url is not a string\")\n return None\n\n filename = url[url.rfind('/')+1:]\n extension = os.path.splitext(filename)[1][1:].strip().lower()\n name_without_extension = os.path.splitext(filename)[0]\n\n # handle name collision\n i = 1;\n while os.path.isfile(filename):\n filename = name_without_extension + '.' + str(i) + '.' + extension\n i += 1\n\n try:\n urllib.request.urlretrieve(url, filename)\n if first_time:\n pywget_inside_crawler(url)\n except:\n pass", "def download_file_from_url(url, PATH, file_name):\n with requests.get(url) as r:\n with open(PATH+'/'+file_name, 'wb') as f:\n f.write(r.content)", "def download():\n raise NotImplementedError", "def download_file(url, output_filename):\n print(\"Downloading\", url, \"to\", output_filename)\n r = requests.get(url)\n r.raise_for_status()\n with open(output_filename, 'wb') as f:\n f.write(r.content)", "def _download_file(self, file_id, file_name, path):\n request = self.service.files().get_media(fileId=file_id)\n fh = io.FileIO(path + file_name, 'wb')\n downloader = MediaIoBaseDownload(fh, request)\n done = False\n print('Start download ' + file_name)\n while not done:\n status, done = downloader.next_chunk()\n print(\"Download %d%%.\" % int(status.progress() * 100))", "def download_file(url, filename):\n\n with DownloadProgressBar(unit=\"B\",\n unit_scale=True,\n miniters=1,\n desc=url.split(\"/\")[-1]\n ) as t:\n urllib.request.urlretrieve(url, filename=filename, reporthook=t.update_to)", "def download_to_file(url, filename):\n with browser_spoof_open(url) as download_conn:\n with open(filename, \"wb\") as out_file:\n shutil.copyfileobj(download_conn, out_file, 1024 * 8)", "def download(url, target):\n r = requests.get(url, stream=True)\n r.raise_for_status()\n with open(target, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024): \n if chunk:\n f.write(chunk)", "def downloadTempGrab(self, url):\n if os.path.exists(\"temp.dat\"):\n os.remove(\"temp.dat\")\n cmd = \"wget -q -T 3 -t 1\" # 1 attempt (no retries)\n cmd += \" -O %s %s\" % (\"temp.dat\", url)\n self.log(cmd)\n process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\n process.wait()", "def torrent_download(download_url, torrent):\n webFile = urllib.urlopen(download_url)\n localFile = open(torrent, 'wb')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()", "def _download_from_url(self) -> bytes:\n response = requests.get(self.url, allow_redirects=True)\n return response.content", "def fetch(self, url: furl) -> str:\n try:\n contents = self._download(url)\n except requests.ConnectionError as err:\n logger.exception(f\"Request failed with {err}\")\n click.secho(\n f\"The URL {url} could not be downloaded. Either your network is unreachable or the URL is broken.\"\n f\" Check the URL, fix your connection, or use \"\n f\" {OptionEnum.OFFLINE.as_flake8_flag()} / {OptionEnum.OFFLINE.as_envvar()}=1\",\n fg=\"red\",\n err=True,\n )\n return \"\"\n return contents", "def download(url, path):\n response = requests.get(url)\n\n if response.ok:\n print(\"response is ok file is downloading ... \")\n # start to download file from url.\n with open(path, \"wb\") as f:\n f.write(response.content)\n else:\n print(\"Error!\", response.status_code)\n return False\n\n print(\"File downloaded succusfully.\")\n return True", "def download_from_url(url, output_path):\n\n print('Pulling data from {} to {}'.format(url, output_path))\n wget.download(url, output_path)\n print('done')", "def download():\n response = requests.get(URL, stream=True)\n\n file = open(FILE_NAME, 'wb')\n file.write(response.content)\n\n with zipfile.ZipFile(FILE_NAME, 'r') as zip_ref:\n zip_ref.extractall()\n\n file.close()\n os.remove(FILE_NAME)", "def download_file(driver, link, filename):\n download_path = os.path.join(os.environ['HOME'], \"Downloads\", filename)\n # TODO: copy cookies, user agent, ect to session\n s = requests.session()\n r = s.get(link, stream=True)\n with open(download_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024):\n if chunk:\n f.write(chunk)\n return download_path", "def _DownloadFile(self, url, local_filename = None, modifiers = \"\",\n force = False):\n try:\n if local_filename == None:\n local_filename = url.split('/')[-1]\n if os.path.isfile(local_filename) and not force:\n if self.verbose:\n print \"File at %s already exists.\" % local_filename\n return local_filename\n if self.dont_download:\n return local_filename\n webFile = urllib2.urlopen(url)\n localFile = open(local_filename, (\"w%s\" % modifiers))\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n os.chmod(local_filename, 0777)\n except urllib2.HTTPError:\n return None\n except urllib2.URLError:\n print \"The url %s is malformed.\" % url\n return None\n return localFile.name", "def download_file(filename, url):\n block_size = 10240 * 1024 # 10 MB\n tmp_filename = filename + '.part'\n first_byte = os.path.getsize(tmp_filename) if os.path.exists(tmp_filename) else 0\n file_mode = 'ab' if first_byte else 'wb'\n file_size = int(requests.head(url).headers['Content-length'])\n headers = { \"Range\": \"bytes=%s-\" % first_byte }\n r = requests.get(url, headers=headers, stream=True)\n\n if os.path.getsize(filename) > file_size:\n return False\n\n print('Downloading: %s' % url)\n print('Starting download at %.0f MB' % (first_byte / 1e6))\n\n with open(tmp_filename, file_mode) as f:\n for chunk in r.iter_content(chunk_size=block_size):\n if chunk:\n # filter out keep-alive new chunks\n f.write(chunk)\n\n shutil.move(tmp_filename, filename)\n print(\"Saved: %s\" % filename)", "def download_single(data):\n url = data[0]\n image_id = data[1]\n target_path = data[2]\n\n if os.path.exists(target_path):\n return\n\n try:\n response = requests.get(url, timeout=30)\n response.raise_for_status()\n except:\n LOGGER.warning('Failed to fetch url %s (id=%d)', url, image_id)\n return\n\n try:\n content = response.content\n image = Image.open(BytesIO(content))\n except:\n LOGGER.warning('Failed to capture image at url %s (id=%d)', url, image_id)\n return\n\n if not image.format == 'JPEG':\n try:\n image = image.convert('RGB')\n except:\n logging.warning('Failed to convert RGB, %s (id=%d)', url, image_id)\n return\n\n try:\n image.save(target_path, format='JPEG', quality=100)\n except:\n LOGGER.warning('Failed to save url %s (id=%d)', url, image_id)\n return\n\n return", "def _download_to_file(session: requests.Session, url: str, pyfile: Path):\n with session.get(url, stream=True) as r:\n r.raise_for_status()\n pyfile.parent.mkdir(parents=True, exist_ok=True)\n with pyfile.open(mode=\"wb\") as f:\n for chunk in r.iter_content(chunk_size=40960):\n f.write(chunk)", "def download_song(url, filename):\n page = requests.get(url, headers=HEADERS)\n if page.status_code == 200: # OK\n with open(filename, 'w') as outf:\n outf.write(page.text)\n else:\n print(f'download failed with status code {page.status_code}!')", "def download(self, download_path):\n return", "def downloadFile(self, base_url, file_name):\n url = os.path.join(base_url, file_name)\n req = urllib2.Request(url)\n try:\n f = urllib2.urlopen(req, timeout=self.timeout)\n local_file = open(os.path.join(self.config.get('PATHS', 'pdfdir'), file_name), \"w\")\n local_file.write(f.read())\n local_file.close()\n except Exception, err:\n print \"[ Failed ]\"\n print \"\\n***ERROR in downloadFile: %s\" % err\n sys.exit(0)", "def download(ctx, file, stream):\n if not check_main_conf(ctx):\n return\n\n file = int(file)\n\n resp = ctx.obj['api'].client.file.file_download(id=file).result()\n\n if 'error_code' in resp:\n click.echo(resp['error_message'])\n return\n\n if stream:\n r = requests.get(resp['download_url'])\n stdout_binary = click.get_binary_stream('stdout')\n\n for chunk in r.iter_content(chunk_size=512 * 1024):\n stdout_binary.write(chunk)\n else:\n click.echo(resp['download_url'])", "def download(self, url: str, directory: str = '', overwrite: bool = False) -> Future:\n future = self.executor.submit(self._download_file, url, directory, overwrite)\n return future", "def wind3dp_single_download(file, path=None):\n\n # add a OS-specific '/' to end end of 'path'\n if path:\n if not path[-1] == os.sep:\n path = f'{path}{os.sep}'\n else:\n path = sunpy.config.get('downloads', 'download_dir') + os.sep\n\n data = file.split('_')[1] # e.g. 'sfsp'\n year = file.split('_')[3][:4]\n base = f\"https://sprg.ssl.berkeley.edu/wind3dp/data/wi/3dp/{data}/{year}/\"\n\n url = base+'/'+file\n\n try:\n downloaded_file = pooch.retrieve(url=url, known_hash=None, fname=file, path=path, progressbar=True)\n except ModuleNotFoundError:\n downloaded_file = pooch.retrieve(url=url, known_hash=None, fname=file, path=path, progressbar=False)\n except requests.HTTPError:\n print(f'No corresponding data found at {url}')\n downloaded_file = []\n\n return downloaded_file", "def t_getfile(self, link, filename, session):\n\n self.sema.acquire()\n\n filepath = os.path.join(os.getcwd() + '/Downloads/' + str(filename))\n os.makedirs(os.path.dirname(filepath), exist_ok=True)\n\n if not os.path.isfile(filepath):\n self.download_new_file(link, filepath, session)\n else:\n\n current_bytes = os.stat(filepath).st_size\n\n headers = requests.head(link).headers\n\n print(headers)\n if 'content-length' not in headers:\n print(f\"server doesn't support content-length for {link}\")\n self.sema.release()\n return\n\n total_bytes = int(requests.head(link).headers['content-length'])\n\n print(total_bytes)\n\n if current_bytes < total_bytes:\n #\n self.continue_file_download(link, filepath, session, current_bytes, total_bytes)\n print(f\"Current byte < total - remaining {total_bytes - current_bytes}\")\n else:\n print(f\"already done: {filename}\")\n\n self.sema.release()", "def getTile( self, url, pathname ):\n \n # retry counters\n tries = 1; max_tries = 3\n while tries <= max_tries:\n\n try:\n\n # setup curl object - include ssl certificates\n curl = pycurl.Curl()\n curl.setopt(pycurl.CAINFO, certifi.where())\n curl.setopt(pycurl.URL, url )\n\n # write binary data to file\n fp = open( pathname, \"wb\" )\n curl.setopt(pycurl.WRITEDATA, fp)\n curl.perform()\n\n # close object and file\n curl.close()\n fp.close()\n\n print ( '{}: {} -> {}'. format( self._idx, url, pathname ))\n break\n\n except Exception as e:\n\n # increment retry counter - wait for random interval\n print ( 'Download Exception {}: {} -> {}'.format( str( e ), url, pathname ) )\n tries += 1\n time.sleep ( random.randrange( 5 ) )\n\n # delete file if download failed \n if tries > max_tries:\n os.remove( pathname )\n\n return", "def download(self):\n\n # os.open *should* give a thread-safe way to exlusivly open files\n filepath = self.film\n try:\n # os.O_BINARY is only avilable and needed on windows\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY | os.O_BINARY\n except:\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY\n try:\n fd = os.open(filepath, flags)\n except:\n return\n\n try:\n response = self.session.get(self.filmurl, stream=True)\n if response.status_code == 200:\n for chunk in response.iter_content(1024):\n os.write(fd, chunk)\n except:\n # Remove partial img file if request or stream fails\n os.close(fd)\n os.remove(filepath)", "def fetch(self, url):\r\n fname = os.path.join(self._cachedir, self._formatter(url))\r\n if not os.path.exists(fname):\r\n time.sleep(self._sleep)\r\n html = urllib.urlopen(url).read()\r\n with codecs.open(fname, 'w', 'utf-8') as f:\r\n soup = BeautifulSoup(html)\r\n f.write(unicode(soup))\r\n return fname", "def download_file(client, file_id):\n\n file_content = client.file(file_id).content()\n print(file_content)", "def _download(url):\n \n filename = url.split('/')[-1]\n if os.path.isfile(filename):\n info('Using pre-existed file {} from local system.'.format(filename))\n else:\n info('Downloading {} from OMA Database.'.format(url.split('/')[-1]))\n filename, _ = urlretrieve(url, filename)\n return filename", "def download_small_file(data_url, temp_file):\r\n r = requests.get(data_url)\r\n with open(temp_file, \"w\") as f:\r\n f.write(r.text)", "def download_file(url, outfile=None):\n if not outfile:\n outfile = url.split(\"/\")[-1]\n info(\"Downloading %s to %s\" % (url, outfile))\n with requests.get(url, stream=True) as r:\n r.raise_for_status()\n with open(outfile, \"wb\") as f:\n for chunk in r.iter_content(chunk_size=8192):\n f.write(chunk)\n return outfile", "def to_file(self, filename):\n resp = urlopen(self.url)\n self.file_size = self._get_content_length(resp.headers)\n block_size = 8192\n self.bytes_read = 0\n with open(filename, 'wb') as f:\n while True:\n buf = resp.read(block_size)\n if not buf:\n break\n self.bytes_read += len(buf)\n f.write(buf)\n self._dl_progress_bar()\n if self.show_progress:\n print(' ✓')", "def urlretrieve(url, filename, reporthook=None, data=None):\n\n def chunk_read(response, chunk_size=8192, reporthook=None):\n content_type = response.info().get(\"Content-Length\")\n total_size = -1\n if content_type is not None:\n total_size = int(content_type.strip())\n count = 0\n while True:\n chunk = response.read(chunk_size)\n count += 1\n if reporthook is not None:\n reporthook(count, chunk_size, total_size)\n if chunk:\n yield chunk\n else:\n break\n\n response = urlopen(url, data)\n with open(filename, \"wb\") as fd:\n for chunk in chunk_read(response, reporthook=reporthook):\n fd.write(chunk)", "def _download_file(self, url, local_filepath, timeout=None, auth=None,\n continuation=True, cache=False, method=\"GET\",\n head_safe=False, **kwargs):\n\n if head_safe:\n response = self._session.request(\"HEAD\", url,\n timeout=timeout, stream=True,\n auth=auth, **kwargs)\n else:\n response = self._session.request(method, url,\n timeout=timeout, stream=True,\n auth=auth, **kwargs)\n\n response.raise_for_status()\n if 'content-length' in response.headers:\n length = int(response.headers['content-length'])\n if length == 0:\n log.warn('URL {0} has length=0'.format(url))\n else:\n length = None\n\n if ((os.path.exists(local_filepath)\n and ('Accept-Ranges' in response.headers)\n and continuation)):\n open_mode = 'ab'\n\n existing_file_length = os.stat(local_filepath).st_size\n if length is not None and existing_file_length >= length:\n # all done!\n log.info(\"Found cached file {0} with expected size {1}.\"\n .format(local_filepath, existing_file_length))\n return\n elif existing_file_length == 0:\n open_mode = 'wb'\n else:\n log.info(\"Continuing download of file {0}, with {1} bytes to \"\n \"go ({2}%)\".format(local_filepath,\n length - existing_file_length,\n (length-existing_file_length)/length*100))\n\n # bytes are indexed from 0:\n # https://en.wikipedia.org/wiki/List_of_HTTP_header_fields#range-request-header\n end = \"{0}\".format(length-1) if length is not None else \"\"\n self._session.headers['Range'] = \"bytes={0}-{1}\".format(existing_file_length,\n end)\n\n response = self._session.request(method, url,\n timeout=timeout, stream=True,\n auth=auth, **kwargs)\n response.raise_for_status()\n del self._session.headers['Range']\n\n elif cache and os.path.exists(local_filepath):\n if length is not None:\n statinfo = os.stat(local_filepath)\n if statinfo.st_size != length:\n log.warning(f\"Found cached file {local_filepath} with size {statinfo.st_size} \"\n f\"that is different from expected size {length}\")\n open_mode = 'wb'\n else:\n log.info(\"Found cached file {0} with expected size {1}.\"\n .format(local_filepath, statinfo.st_size))\n response.close()\n return\n else:\n log.info(\"Found cached file {0}.\".format(local_filepath))\n response.close()\n return\n else:\n open_mode = 'wb'\n if head_safe:\n response = self._session.request(method, url,\n timeout=timeout, stream=True,\n auth=auth, **kwargs)\n response.raise_for_status()\n\n blocksize = astropy.utils.data.conf.download_block_size\n\n log.debug(f\"Downloading URL {url} to {local_filepath} with size {length} \"\n f\"by blocks of {blocksize}\")\n\n bytes_read = 0\n\n # Only show progress bar if logging level is INFO or lower.\n if log.getEffectiveLevel() <= 20:\n progress_stream = None # Astropy default\n else:\n progress_stream = io.StringIO()\n\n with ProgressBarOrSpinner(length, f'Downloading URL {url} to {local_filepath} ...',\n file=progress_stream) as pb:\n with open(local_filepath, open_mode) as f:\n for block in response.iter_content(blocksize):\n f.write(block)\n bytes_read += len(block)\n if length is not None:\n pb.update(bytes_read if bytes_read <= length else length)\n else:\n pb.update(bytes_read)\n response.close()\n return response", "def urlretrieve(url, filename, reporthook=None, data=None):\n\n def chunk_read(response, chunk_size=8192, reporthook=None):\n content_type = response.info().get('Content-Length')\n total_size = -1\n if content_type is not None:\n total_size = int(content_type.strip())\n count = 0\n while True:\n chunk = response.read(chunk_size)\n count += 1\n if reporthook is not None:\n reporthook(count, chunk_size, total_size)\n if chunk:\n yield chunk\n else:\n break\n\n response = urlopen(url, data)\n with open(filename, 'wb') as fd:\n for chunk in chunk_read(response, reporthook=reporthook):\n fd.write(chunk)", "def download_file(file_id, filename):\n # httplib2 library is not thread-safe, need a new http for each thread\n drive_service = discovery.build('drive', 'v3', http=get_http())\n request = drive_service.files().get_media(fileId=file_id)\n\n fh = io.FileIO(filename, 'wb')\n downloader = MediaIoBaseDownload(fh, request)\n\n done = False\n while done is False:\n try:\n status, done = downloader.next_chunk()\n except Exception as ex:\n print (\"User rate limit exceeded for %s\" % filename)\n return False\n print (\"Download %d%%.\" % int(status.progress() * 100))\n return True", "async def get_file(self, link, name, md5, session):\n if os.path.exists(name) or md5 in opts.archived_md5:\n self.count += 1\n return\n\n async with session.get(link) as media:\n # Open file initially with .part suffix\n with open(f\"{name}.part\", \"wb\") as f:\n while True:\n chunk = await media.content.read(1024)\n if not chunk:\n break\n f.write(chunk)\n\n # Remove .part suffix once complete\n # After this point file won't get removed if script gets interrupted\n os.rename(f\"{name}.part\", name)\n\n if opts.archive:\n log_hash(md5)\n self.count += 1\n msg(f\"{self.fetch_progress()} {self.board}/{self.dir}/{name}\")", "def get_file(self):\n while not (self.is_connection_working()):\n print('Connection is not working. Reason should be printed above. Sleeping 5 minutes and retrying.')\n time.sleep(300)\n i = 0\n while True:\n if i >= 3:\n print('Looks like file {} is really not on FTP. Skipping.'.format(self.url))\n return\n if self.file_exists_on_ftp():\n with closing(request.urlopen(self.url, )) as r:\n with open(self.save_filepath, 'wb') as f:\n shutil.copyfileobj(r, f)\n if i > 0:\n print('Download succeeded on attempt {}'.format(i+1))\n return\n else:\n print(\n 'requests.urlopen error. This sometimes means that file {} \"not exists\" on FTP '\n 'but sometimes it is just \"erruption on the Sun\" and file is downloaded on second attempt. '\n 'Sleeping 1 minute and retrying download. Retry will be done {} more times'.format(self.url,\n 3 - (i + 1)))\n time.sleep(60)\n i += 1\n continue\n # print('WARNING: Connection is OK, but system was not able to get file. Skipping.')", "def webdl(url):\n print('Downloading...{}'.format(url))\n try:\n r = requests.get(url)\n r.raise_for_status()\n return r\n except:\n print('[Error webdl]: Download failed for {}'.format(url))\n return None", "def download(self, url: str, dest: PathLike, force: bool = False):", "def download_file():\n\n if 'POST' == request.method:\n file_id = request.form['file_id']\n else:\n file_id = request.args.get('file_id')\n\n # 1 ==> example_1.tgz\n file_path = file_manager.get_file_path_from_id(file_id)\n print \"serving file: \" + file_path\n return send_file(file_path, as_attachment=True)", "def downloader(thread_num):\n tid = 'Thread ' + numprefix.format(thread_num) + ': '\n for i in range(thread_num, len(self.titles), thread_count):\n title, link = self.titles[i], self.download_urls[i]\n name = vidprefix.format(i) + ' ' + title + '.mp4'\n tries = 0\n while (not os.path.exists(name) or os.path.getsize(name) == 0) \\\n and tries <= trycount:\n if os.path.exists(name): os.remove(name)\n self.log(tid + 'Calling wget for ' + name)\n subprocess.call(['wget', '--output-document=' + name, link])\n tries += 1\n if (not os.path.exists(name) or os.path.getsize(name) == 0):\n self.log(tid + 'wget failed for ' + name)\n else:\n self.log(tid + 'wget successfully downloaded ' + name)" ]
[ "0.7207766", "0.7165215", "0.71526456", "0.7114009", "0.7071731", "0.70129436", "0.6987536", "0.6952817", "0.6934653", "0.6932239", "0.6927163", "0.6924666", "0.6897304", "0.68810105", "0.68735075", "0.68536335", "0.6847325", "0.68178904", "0.681241", "0.6794002", "0.6791782", "0.6786619", "0.676097", "0.67390996", "0.6725004", "0.6718398", "0.66984904", "0.6680623", "0.6675514", "0.66572267", "0.66515255", "0.664803", "0.6647157", "0.664258", "0.66365266", "0.6621266", "0.66087294", "0.6605612", "0.6600356", "0.6587633", "0.65813965", "0.65752214", "0.6560133", "0.65597576", "0.65591365", "0.6556556", "0.6548185", "0.65448743", "0.6535062", "0.65323514", "0.65160686", "0.65154094", "0.6512261", "0.6494428", "0.6482805", "0.64815634", "0.64747036", "0.6468412", "0.64660996", "0.6465351", "0.6456775", "0.6453495", "0.6444557", "0.6442706", "0.6440312", "0.6414252", "0.6397293", "0.63959944", "0.63916916", "0.63851047", "0.63789874", "0.6372494", "0.63656366", "0.6363634", "0.63616186", "0.6360812", "0.635884", "0.6352238", "0.6352088", "0.63493526", "0.6347743", "0.63462806", "0.63345647", "0.63327134", "0.63283366", "0.6324331", "0.63243216", "0.6321538", "0.63204044", "0.6318518", "0.6316396", "0.6315991", "0.6310727", "0.6309338", "0.62992936", "0.6293642", "0.6290939", "0.6286106", "0.6285935", "0.6280737" ]
0.6760712
23
Get the contents of the file at the given url.
def process(self, url: str) -> None: text = single_file_download(url, encoding="utf-8") self.output_queue.put(text)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def web_get_file(self, url):\n try:\n print(url)\n response = requests.get(url, verify=False)\n file_buffer = BytesIO(response.content)\n file_buffer.seek(0)\n return file_buffer\n except:\n print(traceback.print_exc())\n return None", "def get(self, url):\n \n content = \"\"\n if hasattr(http.client, \"HTTPSConnection\"): \n url_options = urlparse(url)\n\n conn = http.client.HTTPSConnection(url_options.netloc)\n conn.request('GET', url_options.path + '?' + url_options.query)\n content = conn.getresponse().read().decode('utf-8')\n conn.close()\n else: \n p = os.popen('curl -k \"' + url + '\"')\n content = p.read()\n p.close() \n\n return content", "def retrieve_content(self, url):\n page = requests.get(url)\n content = page.content\n return content", "def read_url(url):\n response = requests.get(url)\n return response.text", "def getfile(url):\n try:\n return urlreq.urlopen(url)\n except urlreq.HTTPError as e:\n safeprint(\"Sever returned with response code \" + str(e.getcode()) + \", download failed.\")", "def get_url(url):\r\n response = requests.get(url)\r\n content = response.content.decode(\"utf8\")\r\n return content", "def read_url(url):\n return requests.get(url).text", "def ReadRemoteFile(url) -> bytes:\n local_url = download_util.DownloadResource(url)\n return file_util.OpenFile(local_url).read()", "def read_web(url):\n f = urllib.request.urlopen(url)\n contents = f.read()\n return contents", "def fetch(self, url) -> bytes:\n buffer = self.download(url)\n zfs = ZipFileSystem(buffer, \"r\")\n return zfs.open(zfs.glob(\"*\")[0]).read()", "def load_url_content(url):\n try:\n r = requests.get(url)\n if r.ok:\n return r.text\n else:\n return None\n except Exception:\n return None", "def _get(url):\n url = urlparse(url)\n conn = HTTPConnection(url.hostname, url.port)\n conn.request('GET', url.path+url.query)\n return conn.getresponse().fp.read()", "def http_get_contents(url) -> str:\n\n # Clean url\n url = str(url).strip('\\\\')\n url = str(url).strip('\\n')\n\n try:\n # Fixed SSL bug on MacOS: /Applications/Python\\ 3.8/Install\\ Certificates.command\n http = urllib3.PoolManager()\n http_response = http.request('GET', url, timeout=5)\n http_response_content = http_response.data\n\n if http_response.status == 200:\n return http_response_content.decode('utf-8')\n\n return ''\n\n # pylint: disable=W0703\n except Exception as error:\n # pylint: disable=W1202\n LOGGER.error('Error. Could not connect to: {0}. Error message: {1}'.format(url, error))\n\n return ''", "def downloadData(url):\n \n content = urllib2.urlopen(url)\n return content", "def load_file_from_url(self, url: str) -> bytes:\n cached_content = self.cache_get(url)\n if cached_content is not None:\n return cached_content\n try:\n req = requests.get(url, timeout=self.requests_timeout)\n req.raise_for_status()\n content = req.content\n self.cache_set(url, content)\n except requests.RequestException as err:\n self.log_error(err)\n repl_content = self.get_replacement_file(url)\n if repl_content is None:\n raise ImageNotFound(err)\n content = repl_content\n return content", "def getcontents(self, fs_url, mode='rb', encoding=None, errors=None, newline=None):\n fs, path = self.parse(fs_url)\n return fs.getcontents(path, mode, encoding=encoding, errors=errors, newline=newline)", "def getfilehttps(self, url):\n ctx = ssl.create_default_context()\n ctx.check_hostname = False\n ctx.verify_mode = ssl.CERT_NONE\n response = urllib.request.urlopen(url, context=ctx)\n result = response.read()\n return result", "def _fs_get_file(url, working_dir):\n if not os.path.isabs(url) and working_dir:\n url = os.path.join(working_dir, url)\n\n try:\n with codecs.open(url, 'r', encoding='utf-8') as f:\n return f.read()\n except Exception as e:\n raise ScrBaseException(\"Could not load file from {0}: {1}\".format(url, e))", "def fetch(self, url: furl) -> str:\n try:\n contents = self._download(url)\n except requests.ConnectionError as err:\n logger.exception(f\"Request failed with {err}\")\n click.secho(\n f\"The URL {url} could not be downloaded. Either your network is unreachable or the URL is broken.\"\n f\" Check the URL, fix your connection, or use \"\n f\" {OptionEnum.OFFLINE.as_flake8_flag()} / {OptionEnum.OFFLINE.as_envvar()}=1\",\n fg=\"red\",\n err=True,\n )\n return \"\"\n return contents", "def _s3_get_file(url):\n try:\n return S3().get_contents_from_url(url)\n except Exception as e:\n raise ScrBaseException(\"Could not load file from {0}: {1}\".format(url, e))", "def read_page(url):\n\n return urllib.request.urlopen(url).read()", "def fetch(url):\n content = requests.get(url).text\n if \"Error\" in content:\n raise ValueError(f\"Cannot read from: {url}\")\n return content", "def get(self, url):\n\n\t\ttry:\n\t\t\trequest = urllib2.Request(url)\n\t\t\trequest.add_header('User-Agent', self.user_agent)\n\n\t\t\tlogging.debug('Get.get - getting url ' + url)\n\n\t\t\tresult = urllib2.urlopen(request)\n\n\t\texcept: raise RuntimeError('unable to open url')\n\n\t\treturn result", "def get_file(url):\n helpers.make_workdir() # create temp working directory\n file_url = url + constant.MALICIOUS_LOCATION\n print(file_url)\n filename = wget.download(file_url, out=constant.WORKDIR)\n return filename", "def read_url(self, url: str) -> str:\n return requests.get(url, headers=self.headers).text", "def download(self, url):\n if url is None:\n return\n user_agent = 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'\n headers = {'User-Agent': user_agent}\n r = requests.get(url, headers=headers, verify=False)\n if r.status_code == 200:\n r.encoding = 'utf-8'\n return r.text\n return None", "def page_read(url):\n LOGGER.debug('Reading %s', url)\n return urlopen(url).read().decode('utf-8')", "def get_remote_file(url):\n # Disable the proxies by not trusting the env\n session = requests.Session()\n session.trust_env = False\n\n # Make the request\n requests.packages.urllib3.disable_warnings()\n try:\n r = session.get(url, verify=False)\n except requests.exceptions.RequestException as e:\n # catastrophic error. bail.\n print(e)\n sys.exit(1)\n\n r = session.get(url, verify=False)\n remote_file = r.text\n return remote_file", "def downloadString(url):\n filein = urllib2.urlopen(url)\n data = filein.read()\n filein.close()\n return data", "def fetch(file_url):\n\n tmp_file_handle = NamedTemporaryFile(delete=True)\n headers = {'User-Agent': 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.61 Safari/537.36'}\n\n # download file and save to temp object\n with requests.get(file_url, headers=headers, stream=True) as r:\n tmp_file_handle.write(r.content)\n\n tmp_file_handle.flush()\n\n return tmp_file_handle", "def get_url_content(url):\n try:\n print(\"HTTP request to the URL {}\".format(url))\n page = requests.get(url, headers=http_headers, timeout=10)\n except requests.exceptions.Timeout:\n print(\"Timeout exceeded for URL {}\".format(url))\n except requests.exceptions.RequestException:\n print(\"Broken connection for URL {}\".format(url))\n finally:\n return page", "def get_file(cls, url, working_dir):\n if url.lower().startswith(\"s3://\"):\n return cls._s3_get_file(url)\n elif url.lower().startswith(\"http\"):\n return cls._http_get_file(url)\n else:\n return cls._fs_get_file(url, working_dir)", "def simple_get(url):\n try:\n with closing(requests.get(url, stream=True)) as resp:\n if(is_good_response(resp)):\n return resp.content\n\n except:\n return None", "def download_simple(url): # url(str)\n html = urlopen(url).read().decode()\n return html", "def url_read(self, url):\n if 'raise' in url:\n raise urllib.error.HTTPError(None, None, None, None, None)\n else:\n return self.contents", "def get_from_net(self, url):\n print 'opening', url\n ty = urlopen(url)\n print 'reading...'\n s = ty.read()\n print 'done'\n return s", "def send_get_request(url, file_name=None):\r\n request = urllib.request.Request(url, headers={'User-Agent': AGENT})\r\n with urllib.request.urlopen(request) as response:\r\n response_context = response.read()\r\n if file_name is None:\r\n return response_context\r\n with open(file_name, 'bw+') as f:\r\n f.write(response_context)\r\n return response_context", "def simple_get(url):\n\ttry:\n\t\twith closing(get(url, stream=True)) as resp:\n\t\t\tif is_good_response(resp):\n\t\t\t\treturn resp.content\n\t\t\telse:\n\t\t\t\treturn None\n\n\texcept RequestException as e:\n\t\tlog_error('Error during requests to {0} : {1}'.format(url, str(e)))\n\t\treturn None", "def _download_from_url(self) -> bytes:\n response = requests.get(self.url, allow_redirects=True)\n return response.content", "def fetch_file(url, filename):\n from clinica.utils.exceptions import ClinicaException\n from urllib.request import Request, urlopen\n from urllib.error import URLError\n import shutil\n import ssl\n import os.path\n from clinica.utils.stream import cprint\n\n head_tail = os.path.split(filename)\n if not os.path.exists(head_tail[0]):\n cprint('Path to the file does not exist')\n cprint('Stop Clinica and handle this error')\n\n # Download the file from `url` and save it locally under `file_name`:\n cert = ssl.get_server_certificate((\"aramislab.paris.inria.fr\", 443))\n gcontext = ssl.SSLContext()\n req = Request(url)\n try:\n response = urlopen(req, context=gcontext)\n except URLError as e:\n if hasattr(e, 'reason'):\n cprint('We failed to reach a server.')\n cprint(['Reason: ' + e.reason])\n elif hasattr(e, 'code'):\n cprint('The server could not fulfill the request.')\n cprint(['Error code: ' + e.code])\n else:\n try:\n with open(filename, 'wb') as out_file:\n shutil.copyfileobj(response, out_file)\n except OSError as err:\n cprint(\"OS error: {0}\".format(err))", "def FetchUrlContent(url):\n content = memcache.get(url)\n if content:\n return content\n\n request = urlfetch.fetch(url)\n\n if request.status_code == 200:\n content = request.content\n memcache.add(url, content, 60 * 60)\n return content\n\n raise LookupError('Unable to fetch URL. Response code: ' +\n str(request.status_code))", "def get_file_lines(url):\n\n # Download the file over the internet\n response = requests.get(url, stream=True)\n lines = []\n\n for line in response.iter_lines():\n lines.append(line.decode(\"UTF-8\"))\n return lines", "def simple_get(url):\n\ttry:\n\t\twith closing(get(url, stream=True)) as resp:\n\t\t\tif is_good_response(resp):\n\t\t\t\treturn resp.content # pylint: disable=no-member\n\t\t\telse:\n\t\t\t\treturn None\n\n\texcept RequestException as e:\n\t\tlog_error('Error during requests to {0} : {1}'.format(url, str(e)))\n\t\treturn None", "def fetch_url(url):\n try:\n soup = bs(urlopen(url).read(), 'html.parser')\n return soup\n except:\n print \"Couldnot download the content from the URL\", url\n return \"\"", "def read(url, encoding=None, cache=None, mode=\"rb\"):\n with read_handle(url, cache, mode=mode) as handle:\n data = handle.read()\n\n if encoding:\n data = data.decode(encoding)\n\n return data", "def fetch(self, url):\n self.log.info(\"Fetching URL: \" + url)\n\n r = requests.get(url, verify=False)\n # raise an HTTPError on badness\n r.raise_for_status()\n\n # this decodes r.content using a guessed encoding\n return r.text", "def get_file_by_url(url, params=None, **kwargs):\n\n try:\n req = requests.get(url=url, params=params, **kwargs)\n except requests.exceptions.RequestException:\n print(\"Error retrieving data from {}\".format(url))\n return None\n\n req.encoding = req.apparent_encoding\n res_text = \"\\n\".join([domain_to_idna(line) for line in req.text.split(\"\\n\")])\n return res_text", "def get_bytes_content(url: str) -> bytes:\n\n url = _fix_url(url)\n\n return get(url).content", "def load_url(url):\n\n req = urllib2.Request(url = url)\n f = urllib2.urlopen(req)\n return f.read()", "def get(url, headers=None):\n headers = headers if headers else {}\n request = urllib2.Request(url, headers=headers)\n handle = urllib2.urlopen(request)\n return handle.read()", "def get_remote_bytes(file_url) -> io.BytesIO:\n result = urlfetch.fetch(file_url)\n return io.BytesIO(result.content)", "def get_file_contents(filename):\n with open(filename, 'r') as f:\n content = f.read()\n return content", "def get_remote_content(self, path):\n if path.startswith(\"http\"):\n page_path = path\n elif path.startswith(\"www\"):\n page_path = \"https://\" + path\n else:\n page_path = self.source + path\n \n print(\"Getting \" + page_path)\n \n try:\n resp = requests.get(page_path)\n except:\n print(\"Unable to get \" + page_path)\n return None\n \n if resp.status_code == 200:\n return resp.content\n else:\n print(\"Unable to get \" + page_path + \" Response = \" + str(resp.status_code))\n return None", "def get_html(url):\n print('fetching', url)\n try:\n re = requests.get(url, timeout=1, stream=True)\n print('success!')\n # limit file size to 1mb\n html = re.raw.read(1000000+1, decode_content=True)\n if len(html) > 1000000:\n raise ValueError('response too large')\n return html\n except:\n raise TimeoutError('request timed out')", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n return None", "def get(self, url):\r\n response = self.requestHelper.get(url)\r\n return self.process(response)", "def get(self, url):\n return self._request('GET', url)", "def simple_get(url):\r\n try:\r\n with closing(get(url, stream=True)) as resp:\r\n if is_good_response(resp):\r\n return resp.content\r\n else:\r\n return None\r\n\r\n except RequestException as e:\r\n log_error('Error during requests to {0} : {1}'.format(url, str(e)))\r\n return None", "def get_text_content(url: str) -> str:\n\n url = _fix_url(url)\n\n return get(url).text", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n log_error('Error during requests to {0} : {1}'.format(url, str(e)))\n return None", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n log_error('Error during requests to {0} : {1}'.format(url, str(e)))\n return None", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n log_error('Error during requests to {0} : {1}'.format(url, str(e)))\n return None", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n log_error('Error during requests to {0} : {1}'.format(url, str(e)))\n return None", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n log_error('Error during requests to {0} : {1}'.format(url, str(e)))\n return None", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n log_error('Error during requests to {0} : {1}'.format(url, str(e)))\n return None", "def get_page(url):\n try:\n with closing(get(url, stream=True)) as res:\n if is_good_response(res):\n return res.content\n except RequestException as e:\n log_error(e)", "def request_url(url, display, file=None):\n if file is not None:\n r = requests.get(url, stream=True)\n r.raise_for_status()\n with open(file, \"wb\") as fd:\n for chunk in r.iter_content(chunk_size=128):\n fd.write(chunk)\n return r.raise_for_status()\n else:\n r = requests.get(url)\n r.raise_for_status()\n if display == \"xml\":\n return xmltodict.parse(r.text)\n elif display == \"fasta\" or display == \"fastq\":\n return format_seq_content(r.text, display)\n else:\n return r.text", "def simpleGet(url):\r\n try:\r\n with closing(get(url, stream=True)) as resp:\r\n if isGoodResponse(resp):\r\n return resp.content\r\n else:\r\n return None\r\n\r\n except RequestException as e:\r\n logError('Error during requests to {0} : {1}'.format(url, str(e)))\r\n return None", "def get(self, url):\n\t\treturn self.session.get(url, headers=self.headers)", "def read_text(self, url: str) -> str:\n response = self._session().get(url)\n if not response.ok:\n response.raise_for_status()\n return response.text", "def get_source(url):\r\n\r\n headers = {}\r\n headers['X-Requested-With'] = 'XMLHttpRequest'\r\n\r\n return requests.get(url, headers=headers).text", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n print('Error during requests to {0} : {1}'.format(url, str(e)))\n return None", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n print('Error during requests to {0} : {1}'.format(url, str(e)))\n return None", "def get_page_contents(url, headers):\n result = urlopen(Request(url, None, headers))\n try:\n charset = result.headers.get_content_charset(failobj=\"utf-8\") # for python3\n except:\n charset = result.info().getparam('charset') or 'utf-8'\n return result.read().decode(charset)", "def get_page_contents(url, headers):\n result = urlopen(Request(url, None, headers))\n try:\n charset = result.headers.get_content_charset(failobj=\"utf-8\") # for python3\n except:\n charset = result.info().getparam('charset') or 'utf-8'\n return result.read().decode(charset)", "def get_content(url):\n img=requests.get(url).content\n return img", "def get_file_lines(url):\n\n\t# Download the file over the internet\n\tresponse = requests.get(url, stream=True)\n\n\t# Decode all lines into strings\n\tlines = []\n\tfor line in response.iter_lines():\n\t\tlines.append(line.decode(\"UTF-8\"))\n\treturn lines", "def _download_epw_file(url):\n r = requests.get(url)\n if r.ok:\n # py2 and 3 compatible: binary write, encode text first\n log.debug(\" ... OK!\")\n return io.StringIO(r.text)\n else:\n log.error(\" connection error status code: %s\" % r.status_code)\n r.raise_for_status()", "def simple_get(url):\n\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n\n except RequestException as e:\n log_error(\"Error during requests to {0} : {1}\".format(url, str(e)))\n return None", "def get(self, url, path):\n rpath = urllib.parse.urlparse(url).path\n try:\n self.sftp.get(rpath, path)\n except Exception as e:\n osaka.utils.LOGGER.warning(\n \"Encountered exception: {}\\n{}\".format(e, traceback.format_exc())\n )\n raise osaka.utils.OsakaFileNotFound(\"File {} doesn't exist.\".format(url))", "def fetch(self, url):\r\n fname = os.path.join(self._cachedir, self._formatter(url))\r\n if not os.path.exists(fname):\r\n time.sleep(self._sleep)\r\n html = urllib.urlopen(url).read()\r\n with codecs.open(fname, 'w', 'utf-8') as f:\r\n soup = BeautifulSoup(html)\r\n f.write(unicode(soup))\r\n return fname", "def get_content(url, headers={}, decoded=True):\n\n # print('get_content: %s' % url)\n\n req = request.Request(url, headers=headers)\n if cookies:\n cookies.add_cookie_header(req)\n req.headers.update(req.unredirected_hdrs)\n\n for i in range(10):\n try:\n response = request.urlopen(req)\n break\n except socket.timeout:\n print('request attempt %s timeout' % str(i + 1))\n\n data = response.read()\n\n\n if is_py2:\n response = response.info()\n\n # Handle HTTP compression for gzip and deflate (zlib)\n content_encoding = response.getheader('Content-Encoding')\n if content_encoding == 'gzip':\n data = ungzip(data)\n elif content_encoding == 'deflate':\n data = undeflate(data)\n\n # Decode the response body\n if decoded:\n charset = match1(response.getheader('Content-Type'), r'charset=([\\w-]+)')\n if charset is not None:\n data = data.decode(charset)\n else:\n data = data.decode('utf-8')\n\n return data", "def download_file(self, url, filename):\n r = requests.get(url, stream=True)\n r.raise_for_status()\n\n with open(filename, 'wb') as f:\n for chunk in r.iter_content():\n if chunk:\n f.write(chunk)\n f.flush()", "def get_raw_data(url):\n\n req = requests.get(url, stream=True)\n req.raw.decode_content = True\n return req.raw", "def simple_get(url):\r\n try:\r\n with closing(get(url, stream=True, timeout=10)) as resp:\r\n if is_good_response(resp):\r\n return resp #.content\r\n else:\r\n return None\r\n\r\n except RequestException as e:\r\n log_error('Error during requests to {0} : {1}'.format(url, str(e)))\r\n return None", "def _Download(url):\n response = urllib2.urlopen(url)\n if response.code != 200:\n raise RuntimeError('Failed to download \"%s\".' % url)\n return response.read()", "def fetch(self, url, timeout=None):\n\n # ISO-8859-1 is the default encoding for text files per the specs for\n # HTTP 1.0 (RFC 1945 sec 3.6.1) and HTTP 1.1 (RFC 2616 sec 3.7.1).\n # ref: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1\n encoding = \"iso-8859-1\"\n content = \"\"\n expires_header = None\n content_type_header = None\n self._response_code = 0\n self._source_url = url\n\n if self.user_agent:\n req = urllib_request.Request(url, None, \n { 'User-Agent' : self.user_agent })\n else:\n req = urllib_request.Request(url)\n\n try:\n if timeout:\n f = urllib_request.urlopen(req, timeout=timeout)\n else:\n f = urllib_request.urlopen(req)\n\n content = f.read(MAX_FILESIZE)\n if VERBOSE:\n print 'Response Headers:'\n print f.info()\n\n # As of Python 2.5, f.info() looks like it returns the HTTPMessage\n # object created during the connection. \n expires_header = f.info().get(\"expires\")\n content_type_header = f.info().get(\"Content-Type\")\n # As of Python 2.4, this file-like object reports the response \n # code, too. \n if hasattr(f, \"code\"):\n self._response_code = f.code\n else:\n self._response_code = 200\n f.close()\n except urllib_error.URLError:\n # This is a slightly convoluted way to get the error instance,\n # but it works under Python 2 & 3. \n error_instance = sys.exc_info()\n if len(error_instance) > 1:\n error_instance = error_instance[1]\n if hasattr(error_instance, \"code\"):\n self._response_code = error_instance.code\n if VERBOSE:\n print 'Code:%d\\nConnect to %s timeout.'%(self._response_code, url)\n \n # MK1996 section 3.4 says, \"...robots should take note of Expires \n # header set by the origin server. If no cache-control directives \n # are present robots should default to an expiry of 7 days\".\n \n # This code is lazy and looks at the Expires header but not \n # Cache-Control directives.\n self.expiration_date = None\n if self._response_code >= 200 and self._response_code < 300:\n # All's well.\n if expires_header:\n self.expiration_date = email_utils.parsedate_tz(expires_header)\n \n if self.expiration_date:\n # About time zones -- the call to parsedate_tz() returns a\n # 10-tuple with the time zone offset in the 10th element. \n # There are 3 valid formats for HTTP dates, and one of \n # them doesn't contain time zone information. (UTC is \n # implied since all HTTP header dates are UTC.) When given\n # a date that lacks time zone information, parsedate_tz() \n # returns None in the 10th element. mktime_tz() interprets\n # None in the 10th (time zone) element to mean that the \n # date is *local* time, not UTC. \n # Therefore, if the HTTP timestamp lacks time zone info \n # and I run that timestamp through parsedate_tz() and pass\n # it directly to mktime_tz(), I'll get back a local \n # timestamp which isn't what I want. To fix this, I simply\n # convert a time zone of None to zero. It's much more \n # difficult to explain than to fix. =)\n # ref: http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.3.1\n if self.expiration_date[9] == None: \n self.expiration_date = self.expiration_date[:9] + (0,)\n \n self.expiration_date = email_utils.mktime_tz(self.expiration_date)\n if self.use_local_time: \n # I have to do a little more converting to get this \n # UTC timestamp into localtime.\n self.expiration_date = time.mktime(time.gmtime(self.expiration_date)) \n #else:\n # The expires header was garbage.\n\n if not self.expiration_date: self.expiration_date = self._now() + SEVEN_DAYS\n\n if (self._response_code >= 200) and (self._response_code < 300):\n # All's well.\n media_type, encoding = _parse_content_type_header(content_type_header)\n # RFC 2616 sec 3.7.1 -- \n # When no explicit charset parameter is provided by the sender, \n # media subtypes of the \"text\" type are defined to have a default\n # charset value of \"ISO-8859-1\" when received via HTTP.\n # http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7.1\n if not encoding: \n encoding = \"iso-8859-1\"\n elif self._response_code in (401, 403):\n # 401 or 403 ==> Go away or I will taunt you a second time! \n # (according to MK1996)\n content = \"User-agent: *\\nDisallow: /\\n\"\n elif self._response_code == 404:\n # No robots.txt ==> everyone's welcome\n content = \"\"\n else: \n # Uh-oh. I punt this up to the caller. \n _raise_error(urllib_error.URLError, self._response_code)\n\n if ((PY_MAJOR_VERSION == 2) and isinstance(content, str)) or \\\n ((PY_MAJOR_VERSION > 2) and (not isinstance(content, str))):\n # This ain't Unicode yet! It needs to be.\n \n # Unicode decoding errors are another point of failure that I punt \n # up to the caller.\n try:\n content = content.decode(encoding)\n except UnicodeError:\n _raise_error(UnicodeError,\n \"Robots.txt contents are not in the encoding expected (%s).\" % encoding)\n except (LookupError, ValueError):\n # LookupError ==> Python doesn't have a decoder for that encoding.\n # One can also get a ValueError here if the encoding starts with \n # a dot (ASCII 0x2e). See Python bug 1446043 for details. This \n # bug was supposedly fixed in Python 2.5.\n _raise_error(UnicodeError,\n \"I don't understand the encoding \\\"%s\\\".\" % encoding)\n if VERBOSE:\n print 'Response:'\n print content\n\n\n if not content:\n # 响应为空,清空自身数据集,跳过解析步骤\n self._sitemaps = [ ]\n self.__rulesets = [ ]\n return False\n else:\n # Now that I've fetched the content and turned it into Unicode, I \n # can parse it.\n self.parse(content)\n return True", "def get_binary(url):\n a = requests.get(url, stream=True)\n return a.content", "def get(self, url):\n return json.loads(self.as_source.urlopen(url).read())", "async def http_get(url):\n async with aiohttp.ClientSession() as session:\n async with session.get(url) as r:\n return await r.text()", "def get_file_contents(path):\n try:\n with open(path) as f:\n return f.read()\n except IOError:\n return None", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n except RequestException as e:\n print('The following error occurred during HTTP GET request \\\n to {0} : {1}'.format(url, str(e)))\n return None", "def get_html_source(url):\n # import urllib\n try:\n sock = urllib.urlopen(url)\n html_source = sock.read()\n sock.close()\n return html_source\n except IOError:\n print \"IOError: Not a valid URL\"", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n except RequestException as e:\n print('The following error occurred during HTTP GET request to {0} : {1}'.format(url, str(e)))\n return None", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n except RequestException as e:\n print('The following error occurred during HTTP GET request to {0} : {1}'.format(url, str(e)))\n return None", "def simple_get(url):\n try:\n with closing(get(url, stream=True)) as resp:\n if is_good_response(resp):\n return resp.content\n else:\n return None\n except RequestException as e:\n print('The following error occurred during HTTP GET request to {0} : {1}'.format(url, str(e)))\n return None", "def get_content_from_file(path):\n\n\t\tPathUtil.ensure_path_exists(path)\n\t\twith open(path) as file:\n\t\t\tfile_content = file.read()\n\t\treturn file_content", "def fetch_file_from_web(server_url, path, transform_func=json.loads):\n artifact_url = \"{0}/{1}\".format(server_url, path)\n r = requests.get(artifact_url)\n r.raise_for_status()\n if transform_func:\n return transform_func(r.text)\n else:\n return r.text", "async def fetch_file(self, download_url: str) -> bytes:\n log.debug(f\"Fetching file from branding repository: '{download_url}'.\")\n\n async with self.bot.http_session.get(download_url, params=PARAMS, headers=HEADERS) as response:\n if response.status != 200:\n raise RuntimeError(f\"Failed to fetch file due to status: {response.status}\")\n\n log.debug(\"Fetch successful, reading payload.\")\n return await response.read()", "def get_file(url):\n # Make request\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Read fits\n iofile = io.BytesIO(response.content)\n content_type = response.headers['Content-Type']\n if content_type == 'image/fits':\n obj = fits.open(iofile)\n else:\n raise Exception('Unknown content type: {0}.'.format(content_type))\n return obj", "def download (url):\n path, url = url\n r = requests.get (url, stream = True)\n content = r.text\n #print (content)\n with open (path + '.txt', 'w') as f:\n f.write (content)" ]
[ "0.7789234", "0.7628709", "0.7561159", "0.75399375", "0.7516315", "0.73461986", "0.73405373", "0.7318421", "0.7309704", "0.7293151", "0.72760564", "0.71967506", "0.7170775", "0.71379685", "0.713316", "0.7123545", "0.71138036", "0.70557725", "0.7036462", "0.70032036", "0.6997669", "0.6969415", "0.6953698", "0.6934595", "0.6899018", "0.6886919", "0.688", "0.6877098", "0.68412685", "0.683834", "0.68302333", "0.68059945", "0.6788828", "0.67369235", "0.6731764", "0.67199475", "0.6712432", "0.6710984", "0.6706428", "0.66990167", "0.66959083", "0.66928047", "0.66915286", "0.66909647", "0.6686621", "0.6684201", "0.66642225", "0.6659047", "0.6654489", "0.6654364", "0.66355795", "0.66067433", "0.6583799", "0.6580824", "0.65790695", "0.65779454", "0.65772605", "0.6559842", "0.6548458", "0.6532622", "0.6532622", "0.6532622", "0.6532622", "0.6532622", "0.6532622", "0.65307987", "0.65242726", "0.65198594", "0.6516906", "0.6513714", "0.65125495", "0.65107024", "0.65107024", "0.6507176", "0.6507176", "0.6506906", "0.6505222", "0.65029866", "0.6498622", "0.6485912", "0.6476365", "0.64741164", "0.64713216", "0.6465986", "0.64592654", "0.64512914", "0.6433813", "0.6430391", "0.64293647", "0.6427826", "0.6410077", "0.6403004", "0.6397294", "0.63859296", "0.63859296", "0.63859296", "0.63823897", "0.6372552", "0.6370625", "0.6365399", "0.6359394" ]
0.0
-1
Pop all elements out of a queue using .get_nowait().
def get_all_nowait(queue: Queue) -> list: results = [] while True: try: result = queue.get_nowait() results.append(result) except Empty: break return results
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def use_queue():\n q = queue.Queue()\n for i in range(10):\n q.put_nowait(i)\n while q.qsize() > 0:\n element = q.get_nowait()\n sys.stdout.write(\"poping out from queue: {0}\\n\".format(element))", "def process_queue_fast(self):\n while self.queue:\n self.queue.popleft()()", "def pop_all(self):\n with self.lock:\n output = list(self.queue)\n self.queue.clear()\n\n return output", "def pop(self):\r\n return self.queue.pop(0)", "def drain(queue):\n while not queue.is_empty():\n queue.remove()", "def _dequeue(self):\n return self._queue.popleft()", "def dequeue(self):\r\n return self.queue.pop(0)", "def pop(self):\n return self.queue.pop(0)", "def pop_from_deque(self):", "def drainQueue(q):\n buf = []\n while True:\n # Get as much as possible without blocking\n try:\n while True:\n item = q.get_nowait()\n if item is None:\n return buf\n else:\n buf.append(item)\n except Queue.Empty:\n pass\n\n if buf:\n return buf\n\n # Nothing in the queue. Block for\n # one item, then go back and get any\n # that we can without blocking.\n item = q.get()\n if item is None:\n return buf\n else:\n buf.append(item)", "def dequeue(self):\n return self.queue.popleft()", "def dequeue(self):\n return self.queue.pop(0)", "def dequeue(self):\n return self.the_queue.pop(0)", "def pop(self):\n self.queue.insert(len(self.queue), self.queue[0])\n self.queue.remove(self.queue[0])\n return self.queue.pop()", "def pop(self):\n return self.q1.dequeue()", "def dequeue(self):\n return self.__queue.pop()", "def syncdequeue(self):\n #FIXME: Handle exceptions caused when some queue in the list might be empty\n temp=[]\n for itr, contextqueue in enumerate(self.queues):\n try:\n temp.append(self.queues[itr].get())\n except:\n Queue.Empty\n return temp", "def pop(self):\n self.move()\n return self.queue2.pop()", "def AdvanceQueue(self):\r\n self.data.pop(0)\r\n return", "def clear_queue(self):\n while not self.queue.empty():\n self.queue.get()", "def pop(self):\n if not self.empty():\n self.size -= 1\n return heapq.heappop(self.queue)\n else:\n return None", "def pop(self):\n while not self.queue[self.tag].empty():\n temp = self.queue[self.tag].get()\n if not self.queue[self.tag].empty():\n self.queue[1 - self.tag].put(temp)\n else:\n self.tag = 1 - self.tag\n return temp", "def dequeue(self):\n\t\treturn self.items.pop()", "def dequeue(self):\n try:\n return self._container.pop()\n except IndexError:\n raise IndexError(\"Cannot dequeue from empty queue.\")", "def get_nowait(self):\r\n if self.empty():\r\n raise QueueEmpty\r\n item = self._get()\r\n self._wakeup_next(self._putters)\r\n return item", "def dequeue(self):\r\n if self.size():\r\n self.queue.pop(0)\r\n else:\r\n raise IndexError(\"Queue is empty.\")", "def pop(self):\n if not self.empty():\n return self.queue.pop()\n return None", "def process_queue_slowly(self):\n start = time.process_time()\n while self.queue and time.process_time() - start < 1.0 / TICKS_PER_SECOND:\n self.queue.popleft()()", "def dequeue(self):\n return self.items.pop()", "def dequeue(self):\n return self.items.pop()", "def remove(self):\n return self.queue.popleft()", "def dequeue(Q):\n # x = Q.pop(0) # default is to pop from end (LIFO stack), param 0 indicates FIFO queue\n x = Q.get_nowait() # default is to pop from end (LIFO stack), param 0 indicates FIFO queue\n if debug: \n print(\"dequeue :\", end=\" \")\n show_queue(Q)\n return(Q, x)", "def dequeue(self):\n\n # del self._queue[0]\n return self._queue.pop(0)", "def get_nowait(self) -> _T:\n self._consume_expired()\n if self._putters:\n assert self.full(), \"queue not full, why are putters waiting?\"\n item, putter = self._putters.popleft()\n self.__put_internal(item)\n future_set_result_unless_cancelled(putter, None)\n return self._get()\n elif self.qsize():\n return self._get()\n else:\n raise QueueEmpty", "def pop(self): # O(1)\n if not self.queue:\n return None\n return self.queue.popleft()", "def dequeue(self):\n pass", "def dequeue(self):\n pass", "def popMsg(self):\n\n if not self.queue:\n return []\n returned_msgs = []\n for msg, delay in self.queue:\n delay -= 1\n if delay < 1:\n returned_msgs.append(msg)\n else:\n self.pushMsg(msg, delay)\n self.queue = []\n return returned_msgs", "def dequeue(self):\n if len(self) == 1:\n self.tail = None\n return self.pop()", "def dequeue(self):\n if self.items:\n return self.items.pop()\n return None", "def pop(self):\r\n tep = []\r\n res = -1\r\n cur = self.num\r\n cache = 0\r\n while self.queue and cur>1:\r\n cache = self.queue.pop(0)\r\n tep.append(cache)\r\n cur-=1\r\n res = self.queue.pop(0)\r\n self.topele = cache\r\n #print tep,res\r\n self.num-=1\r\n while tep:\r\n self.queue.append(tep.pop(0))\r\n return res", "def get_all_from_queue(Q):\n try:\n while True:\n yield Q.get_nowait()\n except queue.Empty:\n raise StopIteration", "def dequeue(self):", "def get_all_from_queue(Q):\n try:\n while True:\n yield Q.get_nowait()\n except Queue.Empty:\n raise StopIteration", "def process_queue(self, queue):\n\n while queue:\n deferred, data = queue.popleft()\n deferred.callback(data)", "def dequeue(self):\n if not self.is_empty():\n return self._queue_items.pop()\n else:\n raise QueueException('dequeue operation not supported on an empty queue')", "def remove(self) -> T:\n if not self.is_empty():\n return self._queue.pop()", "def pop():", "def dequeue(self):\n return self._queue.dequeue()", "def _get_nowait(self):\n # Fulfills a waiting producer, returning its value, or raising Empty if\n # no fulfillable producers are waiting.\n def fulfill_waiting_producer():\n while True:\n if self._waiting_producers:\n produce_wish = self._waiting_producers.pop(0)\n with produce_wish.group.lock:\n if not produce_wish.group.fulfilled:\n return produce_wish.fulfill()\n else:\n raise Empty()\n\n if self._buf is not None and not self._buf.empty:\n value = self._buf.pop()\n try:\n # Cycles a producer's value onto the buffer\n produced = fulfill_waiting_producer()\n self._buf.push(produced)\n except Empty:\n pass\n return value\n else:\n return fulfill_waiting_producer()", "def pop(self):\n if self._size > 0:\n elem = self.first.data\n self.first = self.first.next\n self._size = self._size - 1\n return elem\n \n raise IndexError('The queue is empty! ')", "def dequeue(self):\n\n return self._data.pop(0)", "def dequeue(self):\n if self.is_empty():\n raise Exception(\"Queue is empty !!! Please add data to the Queue :) \")\n else:\n return self.data.pop(0)", "def pop(self, timeout=None):\n item = super(ExclusiveQueue, self).pop(timeout)\n try:\n self.remove(item)\n except ValueError:\n pass\n return item", "def _getqueue(self):\n go = self.tickqueue.get()\n for index in range(len(self.outqueues)):\n if not self.outqueues[index].empty(): return self.outqueues[index]", "def Pop(self):\n popped = self.jobQueue.pop(0)\n\n # If the queue is empty, set the isWorking flag to false\n if self.Length() == 0:\n self.isWorking = False\n \n return popped", "def pop(self):\r\n while self.pq:\r\n priority, count, task = heapq.heappop(self.pq)\r\n if task is not self.REMOVED:\r\n del self.entry_finder[task]\r\n return task\r\n raise KeyError('pop from an empty priority queue')", "def dequeue(self):\n try:\n temp = self.front\n self.front = self.front.next\n temp.next = None\n return temp.value\n except Exception:\n return \"the queue is empty\"", "def pop(self):\r\n # 队列的pop操作\r\n self.stack.pop(0)", "def clearQueueAll():", "def _getqueue(self):\n\n go = self.tickqueue.get()\n for index in range(len(self.outqueues)):\n if not self.outqueues[index].empty():\n return self.outqueues[index]", "def dequeue(self):\n temp = self.front\n self.front = self.front.getPtr()\n return temp.getData()", "def dequeue(self):\n if self.isEmpty():\n raise Exception(\"Queue underflow\")\n item = self._q[self._first]\n self._q[self._first] = None # to avoid loitering\n self._N -= 1\n self._first += 1\n if self._first == len(self._q):\n self._first = 0 # wrap-around\n # shrink size of array if necessary\n if self._N > 0 and self._N == len(self._q)/4:\n self._resize(len(self._q)/2)\n return item", "def dequeue(self):\n\n item = self.__items__.pop(0)\n return item", "def getAllFromQueue(self, Q):\n try:\n while True:\n yield Q.get_nowait()\n except Queue.Empty:\n raise StopIteration", "def dequeue(queue):\n item = front(queue)\n queue.front = queue.front.next\n if empty_queue(queue):\n queue.back = None\n\n queue.size = queue.size - 1\n\n return item", "def pop(self, till=None):\n if till is not None and not isinstance(till, Signal):\n Log.error(\"expecting a signal\")\n\n with self.lock:\n while True:\n if self.queue:\n value = self.queue.popleft()\n return value\n if self.please_stop:\n break\n if not self.lock.wait(till=till | self.please_stop):\n if self.please_stop:\n break\n return None\n (DEBUG or not self.silent) and Log.note(self.name + \" queue stopped\")\n return THREAD_STOP", "def dequeue(self):\n if not self.front:\n raise AttributeError(\"Can't dequeue from an empty queue\")\n\n removed = self.front\n self.front = self.front.next\n return removed.value\n # try:\n # removed = self.front\n # self.front = self.front.next\n # return removed.value\n # except AttributeError:\n # return \"Can't dequeue from an empty queue\"", "def pop(self):", "def pop(self):", "def dequeue(self) -> Any:\n if len(self.queue) <= 1:\n task = self.queue[0]\n self.queue = []\n\n return task\n \n last_index = len(self.queue) - 1\n self._swap_tasks(0, last_index)\n\n task = self.queue.pop()\n\n self._bubble_down_task()\n\n return task", "def pop(self) -> T:\n while self.priority_queue:\n _, _, (item,) = heapq.heappop(self.priority_queue)\n if item is not None:\n del self.entry_finder[item] # type: ignore\n return cast(T, item)\n raise KeyError('pop from an empty priority queue')", "def dequeue(self):\n raise NotImplementedError(\"dequeue: You should have implemented this method!\")", "def pop(self) -> int:\n last = self.queue.popleft()\n while self.queue:\n self.aux_queue.append(last)\n last = self.queue.popleft()\n self.queue, self.aux_queue = self.aux_queue, self.queue\n return last", "def dequeue_rear(self):\n try:\n return self._items.pop()\n except:\n raise IndexError('The deque is empty')", "def clean_queue(self):\n self._stdin_queue.put_nowait(None) # Release thread", "def dequeue(self):\n\n temp = self.front\n self.front = self.front.next\n return temp.data", "def dequeue(self):\n\t\tif self.is_empty():\n\t\t\traise Empty('Queue is empty')\n\t\tanswer = self._head._element\n\t\tself._head = self._head._next\n\t\tself._size -= 1\n\t\tif self.is_empty():\n\t\t\tself._tail = None\n\t\treturn answer", "def pop(self) -> int:\n cur = None\n if(not self.empty()):\n cur = self.queue[0] \n self.queue = self.queue[1:] \n return cur", "def pop(self):\n msgs = ''\n if len(self.queue) > 0:\n for i in self.queue:\n msgs += \"[{}]: {} at {}()\\n\".format(i[0], i[1], i[2])\n self.queue = list()\n sys.exit(msgs)\n # if msg_type != '' and msg_value == '':\n # raise TypeError(msg_type)\n # elif msg_type == '' and msg_value != '':\n # raise ValueError(msg_value)\n # elif msg_type == '' and msg_value == '':\n # print(\"No errors\")\n # else:\n # try:\n # raise (msg_type)\n # except:\n # raise ValueError(msg_value)", "def remove_to_deletes(self):\n go = True\n while go:\n go = False\n for op in self.queue:\n if op.delete:\n self.queue.remove(op)\n go = True\n break", "def consume(iterator):\n deque(iterator, maxlen=0)", "def popleft(self, timeout=None):\n item = super(ExclusiveQueue, self).popleft(timeout)\n try:\n self.remove(item)\n except ValueError:\n pass\n return item", "def dequeue(self):\n if self.is_empty():\n raise Empty('Queue is empty')\n answer = self._head._element\n self._head = self._head._next\n self._size -= 1\n if self.is_empty(): # special case as queue is empty\n self._tail = None # removed head had been the tail\n return answer", "def dequeue(self) -> object:\n return self._data.pop(0)", "def dequeue(self) -> object:\n return self._data.pop(0)", "def pop(self, blocking = False, timeout = TIMEOUT_CURRENT):\n if len(self) == 0 and blocking:\n self.channel.receive(timeout)\n return collections.deque.pop(self)", "def clear_queue(self):\n self.queue = deque()", "def deQueue(self):\n\t\tif self.isEmpty():\n\t\t\tprint(\"Queue already empty: Queue Empty\")\n\t\t\texit(1)\n\t\tprint(\"Dequeueing: \", self.queue[self.front])\n\t\tself.queue[self.front] = None\n\t\tself.front = self.front + 1\n\t\tself.size = self.size - 1", "def pop(self):\r\n try:\r\n key = heapq.heappop(self.heap)\r\n return self.elements[key]\r\n except:\r\n raise StopIteration", "def dequeue(self):\n if self.is_empty():\n raise Empty(\"Queue is empty\")\n answer = self._data[self._front]\n self._data[self._front]\n self._data = (self._front+1)%len(self._data)\n self._size-=1\n return answer", "def pop(self,n):\r\n\t\treturn self.queue.pop(0)[1]", "def dequeue(self): # total O(1)\n topItem = self._queue[self._start] #O(1)\n self._queue[self._start] = None #O(1)\n self._start = (self._start+1)% self._capacity #O(1)\n self._size -= 1 #O(1)\n return topItem #O(1)", "def pop_one(self):\n with self.lock:\n if self.please_stop:\n return [THREAD_STOP]\n elif not self.queue:\n return None\n else:\n v =self.queue.pop()\n if v is THREAD_STOP: # SENDING A STOP INTO THE QUEUE IS ALSO AN OPTION\n self.please_stop.go()\n return v", "def dequeue(self):\n if not self.outbound_stack:\n while self.inbound_stack:\n self.outbound_stack.append(self.inbound_stack.pop())\n return self.outbound_stack.pop()", "def dequeue(self):\n if self.size() < 1:\n raise ValueError('Priority queue is empty and has no front item')\n else:\n # TODO: Remove and return min item from heap, if any\n ...", "def clean(self):\r\n # Note that we do not close the connection here -- somebody\r\n # may still be reading from it.\r\n while len(self.queue) > 0 and self._pair_stale(self.queue[0]):\r\n self.queue.pop(0)", "def empty_queue():\n return Queue()", "def empty_queue():\n return Queue()", "def dequeue(self):\r\n if self.is_empty():\r\n raise Empty(\"Queue is empty\")\r\n answer = self._head._element\r\n self._head = self._head._next\r\n self._size -= 1\r\n if self.is_empty():\r\n self._tail = None\r\n return answer" ]
[ "0.7690526", "0.7386941", "0.73390293", "0.730532", "0.7302115", "0.71763545", "0.71752375", "0.714329", "0.70479155", "0.70476866", "0.7042465", "0.7022835", "0.7013579", "0.7004436", "0.6989313", "0.69627863", "0.6954468", "0.6933592", "0.6921749", "0.69049203", "0.6895917", "0.68903357", "0.6866246", "0.6861866", "0.6826163", "0.67720807", "0.67696315", "0.6761527", "0.67606235", "0.67606235", "0.6760254", "0.6727865", "0.67075783", "0.6675596", "0.66665864", "0.6637768", "0.6637768", "0.6628382", "0.66248447", "0.66210544", "0.6591398", "0.65911967", "0.65705436", "0.6569005", "0.6566919", "0.6560498", "0.6552529", "0.6546689", "0.65376043", "0.64886683", "0.6475585", "0.6446181", "0.64338505", "0.6425174", "0.6417356", "0.64173204", "0.64094895", "0.6404787", "0.64033246", "0.6395083", "0.63911486", "0.63823026", "0.63769794", "0.63767916", "0.63661313", "0.6362066", "0.6347786", "0.6347712", "0.6346323", "0.6346323", "0.63426083", "0.63308585", "0.6328501", "0.63248104", "0.6317467", "0.6309256", "0.63010925", "0.62943524", "0.6287136", "0.62659657", "0.6247763", "0.6243988", "0.6229333", "0.62185365", "0.62184066", "0.62184066", "0.6216946", "0.6216023", "0.6212643", "0.6212136", "0.6210145", "0.6209339", "0.61929846", "0.61889225", "0.6167734", "0.6162703", "0.6156035", "0.6154212", "0.6154212", "0.6152362" ]
0.7315528
3
Process every input using the given worker class.
def multiprocess(inputs: list, worker_class: Any, num_threads: int = 40): input_queue = Queue() # type: ignore output_queue = Queue() # type: ignore for input_elm in inputs: input_queue.put(input_elm) threads = [worker_class(input_queue, output_queue) for _ in range(num_threads)] for thread in threads: thread.start() for thread in threads: thread.join() return get_all_nowait(output_queue)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def processInputs(self):", "def process_inputs(self, inputs):", "def run(self):\n self.class_inst_obj.processor(self.msg)", "def process(self):\n raise NotImplementedError", "def run(self) -> None:\n\n while True:\n try:\n input_element = self.input_queue.get_nowait()\n self.process(input_element)\n except Empty:\n return", "def process(self):\n pass", "def process(self, input, is_processed=False):\n raise NotImplementedError", "def process(self):\n if len(self.inputs):\n self._process_input()\n while len(self.servers) > 0:\n self._process_input()\n self._write_file()", "def process(self):\n raise NotImplementedError('Method must be implemented by subclass.')", "def process(self):", "def process(self):", "def process(self):", "def process():", "def process(self, results):\n raise NotImplementedError", "def _worker(self, args):\n pass", "def worker(self, request):\n try:\n for processor in self.processors:\n if processor.accepted(request):\n processor.process(request)\n except Exception as e:\n #TODO print e\n print e\n pass\n finally:\n #waiter be awakened\n request.notify()", "def process(self):\n\n # validate processing\n if self.is_acceptable():\n # handle data and write log\n self.handle()", "def _spawn_workers(self):\n self._event.set()\n self._workers = [ClassifierWorker(self._event, self._queue, self._results) for x in range(self._NUM_WORKERS)]\n [worker.start() for worker in self._workers]", "def _map_to_workers(self, iterable, result_getter):\n if not self.is_started:\n raise RuntimeError(\"Cannot process inputs: must call start() first.\")\n\n tasks = TaskIterator(iterable)\n task = next(tasks)\n\n while True:\n try:\n self._send_task(task)\n task = next(tasks)\n except Queue.Full:\n for result in result_getter(): # I wish I had `yield from` :(\n yield result\n except StopIteration:\n break\n\n while not self.is_completed:\n for result in result_getter():\n yield result", "def run(self):\n while self.inputs:\n readable, writeable, exceptions = select(self.inputs,\n self.outputs,\n self.inputs)\n for s in readable:\n if s is self.server and self.accepting:\n self.accept(s)\n else:\n data = s.recv(1024)\n if data:\n self.parse(data.rstrip(), s)\n else:\n self.remove(s)\n\n # Writeable\n for s in writeable:\n self.send(s)\n\n # Exceptions\n for s in exceptions:\n self.remove(s)", "def process_class_list(self, module, classes):", "def __hgs_worker(classifier_type, features, labels, results_file_name,\n train_ratio, param_names, params):\n \n # Train the classifier.\n classifier = classifier_type(features, labels, training_ratio=0.7,\n **params)\n \n # Print the metrics for the classifier to the file.\n accuracy = classifier.accuracy()\n logloss = classifier.logloss()\n \n row = [params[name] for name in param_names]\n row += [accuracy, logloss]\n \n # Print results to file.\n with open(results_file_name, 'a') as file:\n csv.writer(file, lineterminator='\\n').writerow(row)\n \n # Indicate that a classifier has finished.\n print('Finished a classifier...')", "def process_queue(self):\n while self.input_processing_running:\n\n # Process everything in the queue.\n while self.input_queue.qsize() > 0:\n try:\n _telem = self.input_queue.get_nowait()\n self.process_telemetry(_telem)\n\n except Exception as e:\n self.log_error(\"Error processing telemetry dict - %s\" % str(e))\n\n # Sleep while waiting for some new data.\n time.sleep(0.5)", "def process(self, data_batch: Any, predictions: Sequence[dict]) -> None:\n self.results.extend(_to_cpu(predictions))", "def process(self, answers_probs):\n\n raise NotImplementedError(\"Subclass Responsibility\")", "def process(self, inputs):\n output = None\n return output", "def _process(self):\n self.kwargs[\"collect\"].process_scan_form_data(self.kwargs[\"data\"])", "def processing(self):\n pass", "def process_input(self):\n print(\"========================Start of Process_Input() Method*\")\n request_data = [\"name\", 0, 0, 0] # initialing th object variables\n req_data_counter = 0 # refers to an index in a list\n\n with open(self.__file_name) as input_file:\n whole_file = input_file.read().splitlines()\n for i in range(len(whole_file)):\n whole_file[i] = whole_file[i].split(',') # use comma as a delimiter\n for j in range(len(whole_file[i])):\n whole_file[i][j] = whole_file[i][j].strip()\n if req_data_counter < 4: # we will break the data into units\n request_data[req_data_counter] = whole_file[i][j]\n req_data_counter = req_data_counter + 1\n if req_data_counter > 3:\n # create object, having read all values for a single req\n new_request_object = Request.Request(request_data[0], request_data[1], request_data[2],\n request_data[3])\n self.input_list.append(new_request_object)\n assert isinstance(new_request_object, object) # asserting if item added is object request\n req_data_counter = 0 # resetting index counter to start reading new request data\n print(\"========================file reading finished*\")\n self.display_contents(self.input_list)\n print(\"========================End of Process_Input() Method *\")", "async def async_process_input(self, inp: inputs.Input) -> None:\n raise NotImplementedError()", "def process(self, input_element: Any) -> None:\n\n raise NotImplementedError", "def run(self):\r\n while True:\r\n try:\r\n processor, iprot, oprot, otrans, callback = self.queue.get()\r\n if processor is None:\r\n break\r\n processor.process(iprot, oprot)\r\n callback(True, otrans.getvalue())\r\n except Exception:\r\n logging.exception(\"Exception while processing request\")\r\n callback(False, '')", "def worker(nums, out_q):\n outdict = {}\n print(threading.current_thread().name)\n print (\"pid:\", os.getpid())\n print (\"data size:\", nums)\n for n in nums:\n outdict[n] = factorize_naive(n)\n out_q.put(outdict)", "def worker_duty():\n\n while True:\n batch = queue.get()\n if batch is None:\n break\n examples, labels, alphas = batch\n for example, label, alpha in batch:\n self._train_one_example(example, label, alpha)", "def batch_worker(minibatch_info, frozen_params):\r\n imagedirs = frozen_params[0]\r\n classes = frozen_params[1]\r\n offset_percent = frozen_params[2]\r\n output_size = frozen_params[3]\r\n nclass = len(classes)\r\n nfish = len(minibatch_info)\r\n class_onehot = np.zeros((nfish, nclass), dtype=np.int8)\r\n imdata = np.zeros((nfish, int(np.prod(output_size))))\r\n for i in range(nfish):\r\n current_fishtuple = minibatch_info[i]\r\n fish_type = current_fishtuple[-1]\r\n fish_type = fish_type.strip(\"'\")\r\n fish_directory = imagedirs[fish_type]\r\n imdata[i, :] = read_single_image(current_fishtuple, fish_directory, offset_percent, output_size)\r\n if fish_type != 'NoF':\r\n fish_type = 'FISH'\r\n fish_class = int(classes[fish_type])\r\n class_onehot[i, fish_class] = 1\r\n return imdata, class_onehot", "def process(self):\n self.extract()\n self.transform()\n self.load()", "def worker(problemDir, configDir, work_queue, done_queue):\n while True:\n problemID, configID = work_queue.get()\n print(\"received\")\n if problemID == STOP:\n # Poison pill\n print(\"Exiting worker process.\")\n done_queue.put(STOP)\n break\n testproblemList = TestProblem.get_all_from_file(problemID, problemDir)\n solverconfig = SolverConfiguration.from_file(configID, configDir)\n for testproblem in testproblemList:\n test_instance = TestInstance(testproblem, solverconfig)\n result = test_instance.run()\n done_queue.put(result)\n return", "def process_input(self, inp: inputs.Input) -> None:\n self.task_registry.create_task(self.async_process_input(inp))", "def process_thread(self):", "def do_work(self):", "def worker(self, **options):\n pass", "def worker():\n while True:\n image, objects = queue.get()\n\n # Call on_image Event Function\n self.on_image(image)\n\n # Call on_image Callback Functions\n for callback in self.on_image_callbacks:\n callback(image)\n\n if objects:\n # Call on_object Event Function\n self.on_object(image, objects)\n\n # Call on_object Callback Functions\n for callback in self.on_object_callbacks:\n callback(image, objects)", "def process():\n pass", "def input(cls):\n return find_class_instances(cls, TaskInput)", "def process(self) -> List['Event']:\n raise NotImplementedError", "def run(self):\n while True:\n try:\n processor, iprot, oprot, otrans, callback = self.queue.get()\n if processor is None:\n break\n callback.getContext().setProtocols(iprot, oprot)\n processor.process(iprot, oprot, callback.getContext())\n callback.success(reply=otrans.getvalue())\n except Exception:\n logging.exception(\"Exception while processing request\")\n callback.failure()", "def worker(nums, outdict):\n print(threading.current_thread().name)\n print (\"pid:\", os.getpid())\n for n in nums:\n outdict[n] = factorize_naive(n)", "def input(self, *input):\n for i in input:\n self._parser.feed(i)", "def _process_data(f, work_queue, results_queue):\n for element in iter(work_queue.get, FINISHED):\n try:\n results_queue.put(f(element))\n except Exception, work_error:\n LOG.critical('parallel_pc Error: {0}\\n\\n\\tconfig settings {1}\\n'.format(work_error, element))\n results_queue.put(FINISHED)", "def run(self):\n # FILE INPUT\n if self.text_type == \"file\":\n self.process_files()\n\n # STRING INPUT\n else:\n self.process_strings()\n\n if self.json:\n self.save_json()\n\n if self.errors:\n print(\"\\nThe following file(s) could not be opened:\")\n for error in self.errors:\n print(f\"\\t{error}\")", "def process(self, xaf):\n raise NotImplementedError(\"process() method must be overriden\")", "def run(self, worker, evaluator=None):\n pass", "def apply(self, inputs):\n raise NotImplementedError()", "def process_class(self, parent, cls):\n if cls.typemap.flat_name in self.class_map:\n raise RuntimeError(\"process_class: class {} already exists in class_map\"\n .format(cls.typemap.flat_name))\n self.class_map[cls.typemap.flat_name] = cls\n for var in cls.variables:\n self.add_var_getter_setter(parent, cls, var)\n cls.functions = self.define_function_suffix(cls.functions)", "def _worker(pipelines: List[Pipeline], source: Queue, sink: Queue):\n pipelines = list(pipelines)\n for i, p in enumerate(pipelines):\n if isinstance(p, ConvertT2S):\n pipelines[i] = ConvertT2S()\n\n def processor(article):\n for p in pipelines:\n article = p(article)\n return article\n\n while True:\n article = source.get()\n if article == 'EXIT':\n return\n article = list(processor(article))\n sink.put(article)", "def process_input(self,r,g,b):\n pass", "def process(self, player, **kwargs):\n raise NotImplementedError()", "def _run(self):\n self._algorithm(self._list, self)", "def process_classified_message(self, message, classification): \n pass", "def iter_process(self, input_tape=None, initial_state=None,\n process_iterator_class=None,\n iterator_type=None,\n automatic_output_type=False, **kwargs):\n if automatic_output_type and 'format_output' in kwargs:\n raise ValueError(\"Parameter 'automatic_output_type' set, but \"\n \"'format_output' specified as well.\")\n if automatic_output_type:\n try:\n kwargs['format_output'] = input_tape.parent()\n except AttributeError:\n kwargs['format_output'] = type(input_tape)\n\n if process_iterator_class is None:\n process_iterator_class = FSMProcessIterator\n it = process_iterator_class(self,\n input_tape=input_tape,\n initial_state=initial_state,\n **kwargs)\n if iterator_type is None:\n return it\n elif iterator_type == 'simple':\n simple_it = self._iter_process_simple_(it)\n try:\n return kwargs['format_output'](simple_it)\n except KeyError:\n return simple_it\n else:\n raise ValueError('Iterator type %s unknown.' % (iterator_type,))", "def do_work(self):\n raise NotImplementedError", "def handle(self):\n\n global batch_no\n\n if self.client_address[0] in workers: # Established worker\n # self.request is a TCP socket connected to the client\n print('Worker connected... handling')\n self.data = recv_all(self.request, bytes_expected)\n\n try:\n network_weight_updates = deque(struct.unpack('{}f'.format(num_weights), self.data))\n print('\\tReceived data unpacked')\n except:\n print('\\tError unpacking data')\n else:\n print('\\tTraining master network')\n nn.train_master(network_weight_updates)\n batch_no += 1\n print('\\tCompleted batch #{} of {}'.format(batch_no, total_batches))\n\n print('\\tGetting network weights')\n network_weights = nn.get_weights()\n print('\\tPacking response')\n response = struct.pack('{}f'.format(num_weights), *network_weights)\n print('\\tSending back weights')\n self.request.sendall(response)\n\n elif self.client_address[0] in validators: # Established validator\n print('Validator connected... handling')\n print('\\tGetting network weights')\n network_weights = nn.get_weights()\n print('\\tPacking response')\n response = struct.pack('{}f'.format(num_weights), *network_weights)\n print('\\tSending back weights')\n self.request.sendall(response)\n\n else: # New node connected\n # Initialize client into set of workers\n print('Contacted by new node...')\n self.data = self.request.recv(1024).strip()\n print('\\tReceived init string')\n\n # New nodes should send b'w' to become a worker, or b'v' to become a validator\n if self.data == b'w':\n print('\\tNew WORKER node... handling')\n workers.add(self.client_address[0]) # Add IP to list of workers\n elif self.data == b'v':\n print('\\tNew VALIDATOR node... handling')\n validators.add(self.client_address[0]) # Add IP to list of validators\n else:\n print('\\tUnknown node request! Moving on...')\n return\n\n # Gather current network weights, pack as bytes, and send back to client\n network_weights = nn.get_weights()\n response = struct.pack('{}f'.format(num_weights), *network_weights)\n print('\\tSending network weights')\n self.request.sendall(response)\n\n print('\\tDONE with node')\n\n if batch_no > total_batches:\n # Done with desired number of batches! Finalize network training\n save_pkl(nn, 'saved_nn/iris_nn_relu_16n8n_100e_16b_l2.pkl') # Save trained network\n\n print('Eval training...')\n eval_tr = nn.eval_classification(train)\n print('Eval validation...')\n eval_v = nn.eval_classification(validation)\n print('Eval testing...')\n eval_t = nn.eval_classification(test)\n\n print('Batches: {}\\tTrain: {:.3f}\\tValidation: {:.3f}\\tTest: {:.3f}'\n .format(batch_no, eval_tr, eval_v, eval_t))\n exit()", "def run_map(self):\n # Split input into chunks for processing\n files = self.split_list()\n # Make processing pool\n pool = Pool(processes=self.args.ncore)\n # Map processing to _run function\n self.output = pool.map(_run, files)\n # Close and join pool\n pool.close()\n pool.join()", "def process_loop(entries: List[StudentEntry]):\n pass", "def process(self, args):\n for benchmark_file in args.benchmark_files:\n self.process_individual_file(benchmark_file)\n self.total_files += 1", "def apply_classifier(self):\n for detected_object in self.detected_objects:\n detected_object.predict_class(self.original_image)", "def call(self, inputs):\n result = self.call_features(inputs)\n result = self.classifier(result)\n return result", "def process_queue(self):\n self.log_info(\"Started Telemetry Logger Thread.\")\n\n while self.input_processing_running:\n\n # Process everything in the queue.\n while self.input_queue.qsize() > 0:\n try:\n _telem = self.input_queue.get_nowait()\n self.write_telemetry(_telem)\n except Exception as e:\n self.log_error(\"Error processing telemetry dict - %s\" % str(e))\n\n # Close any un-needed log handlers.\n self.cleanup_logs()\n\n # Sleep while waiting for some new data.\n time.sleep(0.5)\n\n self.log_info(\"Stopped Telemetry Logger Thread.\")", "def execute(self):\n for line in fileinput.input():\n line = line.rstrip()\n self._process_command(line)", "def process_input(self):\n print('--- IxaMedTagger: processing input ---')\n start_time = time.time()\n os.system('java -jar {0} {1}'.format(self.__ixamedtagger, self.__input_formatted_filepath))\n end_time = time.time() - start_time\n print('--- {} seconds ---'.format(end_time))", "def do_process(self):\n for k in self.processor.process():\n self._progress = k\n\n self.output_container = self.processor.target_container # type: converter.containers.Container", "def queue_inputs(self, iterable):\n self.input_queue.extend(iterable)", "def process_sample_train(self):\n raise NotImplementedError", "def process(self, data, channel = None):\n\t\traise NotImplementException()", "def executor(self):", "def _run(self):\n emulators = self.create_emulators()\n try:\n while True:\n command = self.queue.get()\n if command == self.Command.NEXT:\n for i, (emulator, action) in enumerate(zip(emulators, self.action)):\n new_s, reward, is_done, info = emulator.next(action)\n if is_done:\n self.state[i], info = emulator.reset()\n else: #so we never return terminal states\n self.state[i] = new_s\n self.reward[i] = reward\n self.is_done[i] = is_done\n for k in self.info:\n self.info[k][i] = info[k]\n self.barrier.put(True)\n elif command == self.Command.RESET:\n\n for i, emulator in enumerate(emulators):\n self.state[i], info = emulator.reset()\n for k in self.info:\n self.info[k][i] = info[k]\n self.barrier.put(True)\n elif command == self.Command.CLOSE:\n break\n else:\n raise WorkerError(\"{} has received unknown command {}\".format(type(self),command))\n finally:\n for emulator in emulators: emulator.close()\n logging.debug('WorkerProcess#{} finished!'.format(self.id+1))", "def compute_parallel(self, inputs, communicator):\n self.compute_sequential([inputs], [communicator])", "def _process(self):\n # choose the correct transform model before processing TI data\n self._select_transform()\n\n # process type first, fail early\n self._process_type()\n\n # process type specific data\n if isinstance(self.transform, GroupTransformModel):\n self._process_group()\n elif isinstance(self.transform, IndicatorTransformModel):\n self._process_indicator()\n\n # self.process_associations(self.transform.associations)\n self._process_associated_group(self.transform.associated_groups)\n self._process_attributes(self.transform.attributes or [])\n self._process_security_labels(self.transform.security_labels or [])\n self._process_tags(self.transform.tags or [])\n\n # date added\n self._process_metadata_datetime('dateAdded', self.transform.date_added)\n\n # last modified\n self._process_metadata_datetime('lastModified', self.transform.last_modified)\n\n # xid\n self._process_metadata('xid', self.transform.xid)", "def unify_input():\n\n global versions\n\n # Add classes to unified in wire-format order so that it is easier \n # to generate things later\n keys = versions.keys()\n keys.sort(reverse=True)\n for version in keys:\n wire_version = versions[version][\"wire_version\"]\n classes = versions[version][\"classes\"]\n for cls in of_g.ordered_classes[wire_version]:\n add_class(wire_version, cls, classes[cls])", "def run(self, player=None, **kwargs):\n self._pre_process_hook(player, **kwargs)\n self.process(player, **kwargs)\n self._post_process_hook(player, **kwargs)", "def process_files(self):\n matcher = self.choose_algorithm()\n # process one file at the time for better memory management\n for i, element in enumerate(self.input):\n filepath, _ = element\n\n try:\n with open(filepath, \"r\", encoding=\"utf-8\") as readfile:\n for line in readfile:\n matcher.find_match(line, self.case_insensitive)\n\n # collect unreadeable files for error log\n except Exception:\n self.errors.append(str(filepath))\n\n # copy results and reset matcher for next file\n self.__results = matcher.results\n\n if self.counter:\n self.__results = matcher.counts\n\n matcher.reset()\n\n # output - print or json\n if self.results:\n self.output(element)\n\n # if json print progress bar\n if self.json:\n self.progress_bar(i+1, len(self.input), prefix=\"Matching:\",\n fixed_len=True, length=40)", "def stage_input(workdir_path, input_dict):\n for input_name, input_value in input_dict.items():\n if not isinstance(input_value, list):\n input_value = [input_value]\n for obj in input_value:\n if isinstance(obj, dict):\n if 'class' not in obj:\n exit_perm_fail('Error: missing class in input ' + input_name)\n if obj['class'] == 'Directory':\n exit_system_error('Sorry: I don''t know how to deal with directories yet')\n if obj['class'] == 'File':\n stage_input_file(workdir_path, obj)", "def _threaded(self, *args, **kwargs):\n\n for target in self.targets:\n result = target(*args, **kwargs)\n self.queue.put(result)", "def next(self, *input):\n self.log.info(\"Starting next for task %s\" % self.__class__.__name__)\n\n self.comm.Barrier()\n\n # This should only be called once.\n try:\n if self.done:\n raise pipeline.PipelineStopIteration()\n except AttributeError:\n self.done = True\n\n # Extract a list of the tags for all input arguments\n input_tags = [\n (\n str(icont.attrs.get(\"tag\"))\n if isinstance(icont, memh5.MemDiskGroup)\n else \"\"\n )\n for icont in input\n ]\n\n # Process input and fetch output\n if self._no_input:\n if len(input) > 0:\n # This should never happen. Just here to catch bugs.\n raise RuntimeError(\"Somehow `input` was set.\")\n output = self.process()\n else:\n output = self.process(*input)\n\n # Return immediately if output is None to skip writing phase.\n if output is None:\n return\n\n # Insert the input tags into the output container\n output.attrs[\"input_tags\"] = input_tags\n\n output = self._process_output(output)\n\n # Increment internal counter\n self._count = self._count + 1\n\n self.log.info(\"Leaving next for task %s\" % self.__class__.__name__)\n\n # Return the output for the next task\n return output", "def process(self, cmd):\n logger.info(\"Spawner processing new command:{}\".format(cmd))\n actor_id = cmd['actor_id']\n worker_ids = cmd['worker_ids']\n image = cmd['image']\n tenant = cmd['tenant']\n stop_existing = cmd.get('stop_existing', True)\n num_workers = cmd.get('num', self.num_workers)\n logger.info(\"command params: actor_id: {} worker_ids: {} image: {} stop_existing: {} mum_workers: {}\".format(\n actor_id, worker_ids, image, tenant, stop_existing, num_workers))\n try:\n new_channels, anon_channels, new_workers = self.start_workers(actor_id,\n worker_ids,\n image,\n tenant,\n num_workers)\n except SpawnerException as e:\n # for now, start_workers will do clean up for a SpawnerException, so we just need\n # to return back to the run loop.\n logger.info(\"Spawner returning to main run loop.\")\n return\n logger.info(\"Created new workers: {}\".format(new_workers))\n\n # stop any existing workers:\n if stop_existing:\n logger.info(\"Stopping existing workers: {}\".format(worker_ids))\n self.stop_workers(actor_id, worker_ids)\n\n # add workers to store first so that the records will be there when the workers go\n # to update their status\n if not stop_existing:\n # if we're not stopping the existing workers, we need to add each worker to the\n # actor's collection.\n for _, worker in new_workers.items():\n logger.info(\"calling add_worker for worker: {}.\".format(worker))\n Worker.add_worker(actor_id, worker)\n else:\n # since we're stopping the existing workers, the actor's collection should just\n # be equal to the new_workers.\n workers_store[actor_id] = new_workers\n logger.info(\"workers_store set to new_workers: {}.\".format(new_workers))\n\n # Tell new worker to subscribe to the actor channel.\n # If abaco is configured to generate clients for the workers, generate them now\n # and send new workers their clients.\n generate_clients = Config.get('workers', 'generate_clients').lower()\n logger.info(\"Sending messages to new workers over anonymous channels to subscribe to inbox.\")\n for idx, channel in enumerate(anon_channels):\n if generate_clients == 'true':\n logger.info(\"Getting client for worker {}\".format(idx))\n client_ch = ClientsChannel()\n try:\n client_msg = client_ch.request_client(tenant=tenant,\n actor_id=actor_id,\n # new_workers is a dictionary of dictionaries; list(d) creates a\n # list of keys for a dictionary d. hence, the idx^th entry\n # of list(ner_workers) should be the key.\n worker_id=new_workers[list(new_workers)[idx]]['id'],\n secret=self.secret)\n except ChannelTimeoutException as e:\n logger.error(\"Got a ChannelTimeoutException trying to generate a client: {}\".format(e))\n # put actor in an error state and return\n self.error_out_actor(actor_id, [], str(e))\n client_ch.close()\n return\n client_ch.close()\n # we need to ignore errors when generating clients because it's possible it is not set up for a specific\n # tenant. we log it instead.\n if client_msg.get('status') == 'error':\n logger.info(\"Error generating client: {}\".format(client_msg.get('message')))\n channel.put({'status': 'ok',\n 'actor_id': actor_id,\n 'tenant': tenant,\n 'client': 'no'})\n logger.debug(\"Sent OK message over anonymous worker channel.\")\n # else, client was generated successfully:\n else:\n logger.info(\"Got a client: {}, {}, {}\".format(client_msg['client_id'],\n client_msg['access_token'],\n client_msg['refresh_token']))\n channel.put({'status': 'ok',\n 'actor_id': actor_id,\n 'tenant': tenant,\n 'client': 'yes',\n 'client_id': client_msg['client_id'],\n 'client_secret': client_msg['client_secret'],\n 'access_token': client_msg['access_token'],\n 'refresh_token': client_msg['refresh_token'],\n 'api_server': client_msg['api_server'],\n })\n logger.debug(\"Sent OK message AND client over anonymous worker channel.\")\n else:\n logger.info(\"Not generating clients. Config value was: {}\".format(generate_clients))\n channel.put({'status': 'ok',\n 'actor_id': actor_id,\n 'tenant': tenant,\n 'client': 'no'})\n logger.debug(\"Sent OK message over anonymous worker channel.\")\n # @TODO -\n # delete the anonymous channel from this thread but sleep first to avoid the race condition.\n time.sleep(1.5)\n channel.delete()\n\n # due to the race condition deleting channels (potentially before all workers have received all messages)\n # we put a sleep here.\n time.sleep(1)\n for ch in new_channels:\n try:\n # the new_channels are the spawnerworker channels so they can be deleted.\n ch.delete()\n except Exception as e:\n logger.error(\"Got exception trying to delete spawnerworker channel: {}\".format(e))\n logger.info(\"Done processing command.\")", "def _start_input_file_worker(\n q_in: Queue, input_data_path: str, batch_size: int\n) -> None:\n input_data_file = open(input_data_path, \"r\")\n enum_idx = 0\n batch = []\n for line in input_data_file:\n unique_id = str(uuid.uuid1())\n batch.append((enum_idx, unique_id, line))\n # If the batch to send is the size then push to queue and rest batch\n if len(batch) == batch_size:\n q_in.put(batch)\n batch = []\n enum_idx += 1\n if batch:\n q_in.put(batch)", "def caller(self, objs: Iterable[drgn.Object]) -> Iterable[drgn.Object]:\n\n out_type = sdb.prog.type(self.output_type)\n has_input = False\n for i in objs:\n has_input = True\n\n # try subclass-specified input types first, so that they can\n # override any other behavior\n try:\n for (_, method) in inspect.getmembers(self, inspect.ismethod):\n if not hasattr(method, \"input_typename_handled\"):\n continue\n\n # Cache parsed type by setting an attribute on the\n # function that this method is bound to (same place\n # the input_typename_handled attribute is set).\n if not hasattr(method, \"input_type_handled\"):\n method.__func__.input_type_handled = sdb.prog.type(\n method.input_typename_handled)\n\n if i.type_ == method.input_type_handled:\n yield from method(i)\n raise StopIteration\n except StopIteration:\n continue\n\n # try passthrough of output type\n # note, this may also be handled by subclass-specified input types\n if i.type_ == out_type:\n yield i\n continue\n\n # try walkers\n try:\n # pylint: disable=import-outside-toplevel\n #\n # The reason we do the above is that putting\n # the import at the top-level hits a cyclic\n # import error which pretty-much breaks\n # everything. We should reconsider how we\n # handle all our imports.\n from sdb.commands.walk import Walk\n for obj in Walk().call([i]):\n yield drgn.cast(out_type, obj)\n continue\n except sdb.CommandError:\n pass\n\n # error\n raise sdb.CommandError(\n self.name, 'no handler for input of type {}'.format(i.type_))\n if not has_input:\n yield from self.no_input()", "def work(self):\n while True:\n message = self.get()\n self.handle(message)", "def process(self, *args, **kwargs):\n from copy import copy\n\n # set default values\n options = copy(self._process_default_options_)\n options.update(kwargs)\n\n # perform iteration\n it = self.iter_process(*args, **options)\n for _ in it:\n pass\n\n # process output: filtering accepting results\n only_accepted = options['only_accepted']\n it_output = [result for result in it.result()\n if not only_accepted or result[0]]\n\n # process output: returning a list output\n if (len(it_output) > 1 and options['list_of_outputs'] is None or\n options['list_of_outputs']):\n return [self._process_convert_output_(out, **options)\n for out in it_output]\n\n # process output: cannot return output to due input parameters\n if options['list_of_outputs'] is False:\n if not it_output and only_accepted:\n raise ValueError('No accepting output was found but according '\n 'to the given options, an accepting output '\n 'should be returned. Change only_accepted '\n 'and/or list_of_outputs options.')\n elif len(it_output) > 1:\n raise ValueError('Got more than one output, but only allowed '\n 'to show one. Change list_of_outputs option.')\n # At this point it_output has length 0 or 1.\n\n # process output: create non-accepting output if needed\n if not it_output:\n if only_accepted:\n return []\n NoneState = FSMState(None, allow_label_None=True)\n it_output = [(False, NoneState, None)]\n\n return self._process_convert_output_(it_output[0], **options)", "def __iter__(self):\n for classresult in self.classresults:\n yield classresult", "def train(self):\r\n for class_ in set(self.train_classes):\r\n data = map(lambda (ind, datum): datum, filter(lambda (ind, datum): self.train_classes[ind] == class_, enumerate(self.train_data)))\r\n self.distribution.index_data(data, class_)", "def process(self, preprocess_args: Dict) -> None:\n dataset2preprocessor = {\n 'vhi': VHIPreprocessor,\n }\n\n for dataset, variables in preprocess_args.items():\n\n # check the format is as we expected\n assert dataset in dataset2preprocessor, \\\n f'{dataset} is not supported! Supported datasets are {dataset2preprocessor.keys()}'\n\n assert type(variables) is list, \\\n f'Expected {dataset} values to be a list. Got {type(variables)} instead'\n\n preprocessor = dataset2preprocessor[dataset](self.data)\n\n for variable in variables:\n preprocessor.preprocess(**variable)", "def run_inner(self):\n for event in self.inotify.event_gen():\n self.process_inotify_event(event)", "def __call__(self, inputs):\n self.inputs = inputs\n self.process_inputs()\n self.init_mesh()\n mesh_modified = self.run_tasks()\n self.write_output_mesh(mesh_modified)", "def run_skim(self):\n # Split input into chunks for processin\n skim_files = glob.glob(self.args.input + \"*.root\")\n # Make processing pool\n pool = Pool(processes=self.args.ncore)\n # Map processing to _run function\n pool.imap(_run_skim, skim_files)\n # Close and join pool\n pool.close()\n pool.join()", "def __call__(self, *inputs):\n raise NotImplementedError", "def populate_list(self, input_filename):\r\n f = open(input_filename, 'r')\r\n\r\n for line in f:\r\n # Process the input line\r\n line_split = line.strip().split('(')\r\n line_split[-1] = line_split[-1][:-1] # Removes the extra bracket at the end\r\n\r\n class_name = line_split[0]\r\n parameters = line_split[1].split(',')\r\n self.metrics.append(self.instantiate_class(class_name, *parameters))\r\n\r\n f.close()", "def runAll(self):\n \n worker = worker()\n if self.FileFolder.text() == \"\":\n self.makeWarningPopup(\"Please Select a file or Files to run\") \n elif self.OutputFolder.text() == \"\":\n self.makeWarningPopup(\"Please select an output folder\")\n else:\n TheFiles = self.FileFolder.text()\n TheOutPutFolder = self.OutputFolder.text()\n \n runArt = worker.MakeUITeamConversion(self,TheFiles,TheOutPutFolder)", "def run(self, *args, **kwargs):\n # @@@ async? (consider how args and kwargs should be serialized;\n # that may change things quite a bit)\n self.process((args, kwargs))", "def _process_mixer(self, instance):\n self.log.debug('setting up mixer scraper')\n endpoint = instance.get('mixer_endpoint')\n scraper = self._get_mixer_scraper(instance)\n self.log.debug('processing mixer metrics')\n scraper.process(\n endpoint,\n send_histograms_buckets=instance.get('send_histograms_buckets', True),\n instance=instance,\n ignore_unmapped=True\n )" ]
[ "0.62040085", "0.6178794", "0.5952422", "0.59248585", "0.5817719", "0.56970406", "0.5652122", "0.56006", "0.5596137", "0.555472", "0.555472", "0.555472", "0.55169404", "0.54495186", "0.5403452", "0.5399591", "0.5373088", "0.5360921", "0.5338759", "0.53068113", "0.53002506", "0.5289116", "0.52639675", "0.5218911", "0.51783663", "0.517185", "0.5152557", "0.5150168", "0.51367575", "0.5124231", "0.51217455", "0.5115584", "0.51080245", "0.5104019", "0.5101037", "0.5097477", "0.50952774", "0.5094068", "0.5093417", "0.5086979", "0.5078353", "0.50774986", "0.5068594", "0.50677025", "0.50654274", "0.50646424", "0.5046752", "0.5024652", "0.5017355", "0.5014403", "0.5005108", "0.50003266", "0.49936575", "0.4990308", "0.49754608", "0.49657026", "0.49649677", "0.49609944", "0.49390063", "0.49339333", "0.4928576", "0.4920426", "0.49082452", "0.4904718", "0.49021852", "0.4885245", "0.48640656", "0.48637098", "0.48550707", "0.48120445", "0.4806497", "0.48057616", "0.48043698", "0.47859952", "0.47843164", "0.47695726", "0.4769017", "0.47685808", "0.47681686", "0.4765386", "0.47625542", "0.47621703", "0.4756803", "0.47407183", "0.4735083", "0.472943", "0.47293788", "0.47276905", "0.47250402", "0.47143573", "0.47137797", "0.47059134", "0.4705477", "0.47035888", "0.47018424", "0.47002795", "0.46983966", "0.46968424", "0.4695124", "0.46938708" ]
0.64647937
0
Read a file from the internet, and put it in a folder on disk.
def download(urls: List[str], num_threads: int = 40) -> List[str]: num_files = len(urls) start = perf_counter() print("Starting download of %s files . . ." % num_files) results = multiprocess(urls, Downloader, num_threads=num_threads) dur = perf_counter() - start print("Completed download of %s files after %.3f seconds." % (num_files, dur)) return results
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def download(self, url):\n try:\n webFile = urllib.urlopen(url)\n localFile = open(self.workdir + \"/\" + url.split('/')[-1], 'w')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n except IOError:\n print(\"could not get url \" + url)", "def donwload_file_to_local(url, filename, timeout=1):\n print 'downloading with urllib2'\n data = None\n try:\n f = urllib2.urlopen(url, timeout=timeout)\n data = f.read()\n except Exception as e:\n logger.error(e)\n\n if data:\n with open(filename, 'wb') as code:\n code.write(data)", "def download_file(url: str, local_dir: str = '.', local_filename: str = '') -> str:\n os.makedirs(f'{local_dir}', exist_ok=True)\n local_filename = local_filename if local_filename else url.split('/')[-1]\n if os.path.exists(f'{local_dir}/{local_filename}'):\n print(\"{0}/{1} already exists. Skipping download.\".format(local_dir, local_filename))\n else:\n print(\"Downloading file from {0} to {1}/{2}.\".format(url, local_dir, local_filename))\n with requests.get(url, stream=True) as r:\n r.raise_for_status()\n with open(f'./{local_dir}/{local_filename}', 'wb') as f:\n for chunk in r.iter_content(chunk_size=128):\n f.write(chunk)\n print(\"Finished saving file from {0} to {1}/{2}.\".format(url, local_dir, local_filename))\n return f'{local_dir}/{local_filename}'", "def download_file_from_url(url, PATH, file_name):\n with requests.get(url) as r:\n with open(PATH+'/'+file_name, 'wb') as f:\n f.write(r.content)", "def fetch_file(url, filename):\n from clinica.utils.exceptions import ClinicaException\n from urllib.request import Request, urlopen\n from urllib.error import URLError\n import shutil\n import ssl\n import os.path\n from clinica.utils.stream import cprint\n\n head_tail = os.path.split(filename)\n if not os.path.exists(head_tail[0]):\n cprint('Path to the file does not exist')\n cprint('Stop Clinica and handle this error')\n\n # Download the file from `url` and save it locally under `file_name`:\n cert = ssl.get_server_certificate((\"aramislab.paris.inria.fr\", 443))\n gcontext = ssl.SSLContext()\n req = Request(url)\n try:\n response = urlopen(req, context=gcontext)\n except URLError as e:\n if hasattr(e, 'reason'):\n cprint('We failed to reach a server.')\n cprint(['Reason: ' + e.reason])\n elif hasattr(e, 'code'):\n cprint('The server could not fulfill the request.')\n cprint(['Error code: ' + e.code])\n else:\n try:\n with open(filename, 'wb') as out_file:\n shutil.copyfileobj(response, out_file)\n except OSError as err:\n cprint(\"OS error: {0}\".format(err))", "def download(self, url):\n try:\n logging.info(self.log_format((\"downloading \" + url)))\n webFile = urllib.urlopen(url)\n localFile = open(self.paths['workspace'] + \"/\" + url.split('/')[-1], 'w')\n localFile.write(webFile.read())\n webFile.close()\n localFile.close()\n except IOError:\n logging.error(self.log_format((\"could not get url \" + url)))", "def read(self, local_path): # noqa: D402\n data_location = self.download_url\n data_location = rewrite_s3_links_locally(data_location)\n response = requests.get(data_location)\n write_file_locally(response.content, local_path)", "def download(filename):\n print \"Downloading\", filename\n file_content = urlopen(\n urljoin(URL_PATH, filename)\n )\n write_data_to_file(\n file_content.read(),\n os.path.join(\n '/tmp',\n filename\n )\n )", "def load(url, file_name, folder):\n # Need special case for Stack Overflow (more than one 7z file)\n\n if not os.path.isfile(file_name):\n #downloads file from url; two url patterns are attempted\n testfile = request.URLopener()\n try:\n testfile.retrieve(url[0], file_name)\n except error.HTTPError as e:\n try:\n testfile.retrieve(url[1], file_name)\n except:\n print (\"Error: URL retrieval of \" + url[0] + \" and \" + url[1] + \" failed for reason: \" + e.reason)\n quit()\n\n #un-zips file and puts contents in folder\n a = py7z_extractall.un7zip(file_name)\n if not (os.path.isfile(os.path.join(folder, \"PostLinks.xml\")) and os.path.isfile(os.path.join(folder, \"Posts.xml\"))):\n a.extractall(folder)", "def downloadAndReplaceFile(file_path, download_url):\r\n file = urllib.request.urlopen(download_url)\r\n with open(file_path, 'wb') as output:\r\n output.write(file.read())", "def download_file(url, local_path):\n try:\n local_filename = normalizeFilenameToCommonDateFormat(url.split('/')[-1])\n \n destination_dir = local_path #os.path.join(local_path, os.path.splitext(os.path.basename(local_filename))[0])\n \n #if not os.path.exists(destination_dir):\n # os.makedirs(destination_dir)\n \n destination_file = os.path.join(destination_dir, local_filename)\n \n if not os.path.exists(destination_file):\n # NOTE the stream=True parameter \n r = requests.get(url, stream=True)\n with open(destination_file, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024): \n if chunk: # filter out keep-alive new chunks\n f.write(chunk)\n #f.flush() commented by recommendation from J.F.Sebastian\n # Sleep so that we aren't rude\n sleep(1)\n else:\n return destination_file + ' already '\n \n return destination_file\n except ValueError as err:\n return \"Skipping %s, not \" % (url.split('/')[-1])", "def download_and_save(url, file_name,file_extension):\n #make a request for the file\n response = requests.get(url, allow_redirects =True)\n\n #compose the file + extension\n file_to_be_saved = f\"{file_name}.{file_extension}\"\n \n #Create a new file with \"file_to_be_saved\" in the current directory\n # And save this file and print the directory with the OS module\n with open(file_to_be_saved, 'wb') as file:\n print(\"saving file.... \\n\")\n file.write(response.content)\n print('done....\\n')\n print('file saved as: ', file_to_be_saved )\n print('in: ', os.getcwd() )", "def download_file(url,file_name):\n #http://stackabuse.com/download-files-with-python/\n filedata = urllib2.urlopen(url)\n datatowrite = filedata.read()\n with open(file_name, 'wb') as f:\n f.write(datatowrite)", "def get_file(url, file_name=None):\n cache_dir = os.path.join(os.path.expanduser(\"~\"), \".jhML\")\n\n if file_name is None:\n file_name = url[url.rfind('/') + 1:]\n file_path = os.path.join(cache_dir, file_name)\n\n if not os.path.exists(cache_dir):\n os.mkdir(cache_dir)\n\n if os.path.exists(file_path):\n return file_path\n\n print(\"Downloading: \" + file_name)\n try:\n urllib.request.urlretrieve(url, file_path, show_progress)\n except (Exception, KeyboardInterrupt) as e:\n if os.path.exists(file_path):\n os.remove(file_path)\n raise\n print(\" Done\")\n\n return file_path", "def download_data(url, filename, dst_dir):\r\n fullpath = os.path.join(dst_dir, filename)\r\n if os.path.exists(fullpath):\r\n return\r\n\r\n # Try to open url\r\n try:\r\n page = urlopen(url)\r\n except Exception:\r\n shutil.copy(PLACEHOLDER, fullpath)\r\n return\r\n\r\n f = open(fullpath, 'wb')\r\n while True:\r\n buff = page.read(BLOCK_SZ)\r\n if not buff:\r\n break\r\n f.write(buff)\r\n f.close()\r\n pass", "def __download_file(self, filename):\r\n \r\n respons = requests.get(self.__url + filename, stream=True)\r\n save_filename = os.path.join(self.__folder, os.path.basename(filename))\r\n with open(save_filename, 'wb') as output_file:\r\n for chunk in respons.iter_content(chunk_size=128):\r\n output_file.write(chunk)", "def main(file_url, file_path):\n\n # extract file from the link\n\n if not os.path.exists(file_path):\n os.makedirs(file_path, exist_ok=True)\n \n r = requests.get(str(file_url))\n\n #unzip the zip file\n z = zipfile.ZipFile(io.BytesIO(r.content))\n z.extractall(path = file_path)", "async def baixar(url: str, namedir: str):\r\n\r\n name = get_name(url)\r\n\r\n if name:\r\n if url.startswith('http'):\r\n try:\r\n timeout = aiohttp.ClientTimeout(total=10)\r\n async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:\r\n async with session.get(url) as resp:\r\n if resp.status == 200:\r\n f = await aiofiles.open(f'{namedir}/{name}', mode='wb')\r\n await f.write(await resp.read())\r\n await f.close() \r\n except:\r\n print('[ERRO 004]: Erro ao baixar arquivo: \\n-->', url + '\\n')\r\n #print('[-] Nome usado para salvar:', name)\r\n pass\r\n\r\n else:\r\n print('[ERRO 000]: Filename vazio:', str(len(name)))\r\n print('-->', url + '\\n')\r\n pass", "def main(url, localfile):\n ph.download_file(url, localfile)", "def download_file(url_path):\n local_filename = url_path.split('/')[-3] + \"-\" + url_path.split('/')[-1]\n local_filename = OUT_DIR + local_filename\n print local_filename\n url = \"https://commoncrawl.s3.amazonaws.com/\" + url_path\n # NOTE the stream=True parameter\n req = requests.get(url, stream=True)\n with open(local_filename, 'wb') as write_f:\n for chunk in req.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n write_f.write(chunk)\n write_f.close()\n return local_filename", "def save_file(self, response):\r\n # Extract filename from response url\r\n filename = re.search('[^/]+(?=/$|$)', response.url).group(0)\r\n\r\n # Prepend download folder name to the filename\r\n filename = self.config[\"folder\"] + filename\r\n os.makedirs(os.path.dirname(filename), exist_ok=True)\r\n\r\n # Write contents to file\r\n with open(filename, 'wb') as f:\r\n f.write(response.content)\r\n\r\n # Print message displaying the absolute filepath for convenience\r\n print(\"Downloaded file to \" + os.path.abspath(filename))", "def download_file(local_filename, url, clobber=False):\n\tdir_name = os.path.dirname(local_filename)\n\tmkdirs(dir_name)\n\n\tif clobber or not os.path.exists(local_filename):\n\t\ti = requests.get(url)\n\n\t\t# if not exists\n\t\tif i.status_code == 404:\n\t\t\tprint('Failed to download file:', local_filename, url)\n\t\t\treturn False\n\n\t\t# write out in 1MB chunks\n\t\tchunk_size_in_bytes = 1024*1024 # 1MB\n\t\twith open(local_filename, 'wb') as local_file:\n\t\t\tfor chunk in i.iter_content(chunk_size=chunk_size_in_bytes):\n\t\t\t\tlocal_file.write(chunk)\n\n\treturn True", "def download_file(url, file_name):\n conn = urllib3.PoolManager(\n cert_reqs='CERT_REQUIRED',\n ca_certs=certifi.where())\n\n with conn.request('GET', url, preload_content=False) as resp, open(file_name, 'wb') as out:\n shutil.copyfileobj(resp, out)", "def save_url_to_file(url, filepath):\n with open(filepath, 'ab') as f:\n f.write(url+'\\r\\n')", "def read_and_save(res):\n fname = os.path.split(urlsplit(res.url).path)[-1]\n fpath = os.path.join(cfg.OUTPUT_DIR, fname)\n with open(fpath, 'wb') as f:\n for chunk in res.iter_content(cfg.CHUNK):\n f.write(chunk)", "def download(url, path):\n response = requests.get(url)\n\n if response.ok:\n print(\"response is ok file is downloading ... \")\n # start to download file from url.\n with open(path, \"wb\") as f:\n f.write(response.content)\n else:\n print(\"Error!\", response.status_code)\n return False\n\n print(\"File downloaded succusfully.\")\n return True", "def download_url(url, path=None, name=None):\n r = requests.get(url, allow_redirects=True)\n if path:\n paths = []\n paths.append(path)\n make_dir_from_list(paths)\n open(os.path.join(paths[0], name), 'wb').write(r.content)\n return r.content.decode('utf-8')", "def download(url, to):\n filename = url.rstrip('/').split('/')[-1] + '.zip'\n r = requests.get(url, stream=True)\n\n outpath = os.path.join(to, filename)\n\n with open(outpath, 'wb') as fd:\n for chunk in r.iter_content(1024 * 1024):\n fd.write(chunk)\n\n return outpath", "def download_to_file(url, filename):\n with browser_spoof_open(url) as download_conn:\n with open(filename, \"wb\") as out_file:\n shutil.copyfileobj(download_conn, out_file, 1024 * 8)", "def download_file(self, url, filename):\n r = requests.get(url, stream=True)\n r.raise_for_status()\n\n with open(filename, 'wb') as f:\n for chunk in r.iter_content():\n if chunk:\n f.write(chunk)\n f.flush()", "def download_file(url, path):\n file_name = path + url.split(\"/\")[-1]\n req = requests.get(url)\n zipped_info = req.content\n print(file_name)\n if not os.path.isfile(file_name):\n print(\"file doesnt exist, writing\", file_name)\n with open(file_name, 'wb') as f:\n f.write(zipped_info)\n else:\n print(\"file exists\", file_name)", "def get_file(url):\n helpers.make_workdir() # create temp working directory\n file_url = url + constant.MALICIOUS_LOCATION\n print(file_url)\n filename = wget.download(file_url, out=constant.WORKDIR)\n return filename", "def download_file(dwn_url, dwn_folder):\n # Prepare path\n _, dwn_fil = split(dwn_url)\n dwn_dir = join(dwn_folder, dwn_fil)\n\n # download_tile = requests.get(dwn_url)\n open(dwn_dir, 'wb').write(requests.get(dwn_url).content)\n\n # Message for successful download\n status_msg = dwn_fil + ' succsesfully downloaded'\n\n return status_msg, dwn_fil", "def image_downloader(url, file_path, file_name):\n response = requests.get(url, stream=True)\n with open(file_path + \"/\" + file_name, 'wb') as out_file:\n shutil.copyfileobj(response.raw, out_file)", "def url_to_file_storage(url):\n r = requests.get(url, stream=True)\n filename = r.url.split('/')[-1]\n content_type = r.headers.get('Content-Type', 'application/octet-stream')\n return FileStorage(stream=r.raw, filename=filename, content_type=content_type)", "def download_file(url, fname_out=None) -> None:\n\n import ssl\n\n try:\n with urllib.request.urlopen(url) as f:\n if not fname_out:\n return f.read().decode(\"utf-8\")\n else:\n fdir = os.path.dirname(fname_out)\n if not os.path.exists(fdir):\n os.makedirs(fdir)\n\n with open(fname_out, \"wb\") as outfile:\n outfile.write(f.read())\n return fname_out\n\n except ssl.SSLError:\n print(\"WHAT!\")\n sys.exit(1)", "def download (url):\n path, url = url\n r = requests.get (url, stream = True)\n content = r.text\n #print (content)\n with open (path + '.txt', 'w') as f:\n f.write (content)", "def write_to_file(file_name, url):\n\n with open(file_name, 'a') as myfile:\n myfile.write('{}\\n'.format(url))", "def download_file(dwn_url, dwn_folder):\n download_tile = requests.get(dwn_url)\n\n # Save the content as file\n _, dwn_fil = split(dwn_url)\n dwn_dir = join(dwn_folder, dwn_fil)\n open(dwn_dir, \"wb\").write(download_tile.content)\n # Message for successful download\n status_msg = f\"{dwn_fil} succsesfully downloaded\"\n\n return status_msg, dwn_fil", "def download_file(src_url, dst_path):\n logger.info(f'Downloading file from: {src_url}')\n with src_url.open(mode='r') as in_file:\n with open(dst_path, 'wb') as out_file:\n out_file.write(in_file.read())\n logger.info(f'Downloaded file path on disk: {dst_path}')\n return dst_path", "def download_file(filename, url):\n with open(filename, 'wb') as fout:\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Write response data to file\n for block in response.iter_content(4096):\n fout.write(block)", "def download_file(url, local_filename):\n response = requests.get(url, stream=True)\n with open(local_filename, \"wb\") as outfile:\n for chunk in response.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n outfile.write(chunk)", "def ReadRemoteFile(url) -> bytes:\n local_url = download_util.DownloadResource(url)\n return file_util.OpenFile(local_url).read()", "def fetch_save(url):\n\n name = url.split(\"/\")[-1]\n response = requests.get(url, stream=True)\n if response.status_code == 200:\n with open(f\"{DATA_PATH}/{name}\", \"wb\") as f:\n f.write(response.raw.read())\n else:\n logging.info(f\"Failed {url} download\")", "def scrape_to(str, dest):\n dest = path_validate(dest)\n site = urlopen(str)\n site_content = site.read()\n full_path = dest+str.replace('/', '.')+'.txt'\n site_file = open(full_path, 'w')\n site_file.write(site_content)\n site_file.close()\n return", "def download(url: str, to_dir: str) -> str:\n to_file = os.path.join(to_dir, get_filename_from_url(url))\n logger.debug(\"Download %s to %s\", url, to_file)\n\n h = httplib2.Http(\".cache\")\n (_, content) = h.request(url, \"GET\")\n with open(to_file, 'wb') as f:\n f.write(content)\n return to_file", "def downloadFile(self, base_url, file_name):\n url = os.path.join(base_url, file_name)\n req = urllib2.Request(url)\n try:\n f = urllib2.urlopen(req, timeout=self.timeout)\n local_file = open(os.path.join(self.config.get('PATHS', 'pdfdir'), file_name), \"w\")\n local_file.write(f.read())\n local_file.close()\n except Exception, err:\n print \"[ Failed ]\"\n print \"\\n***ERROR in downloadFile: %s\" % err\n sys.exit(0)", "def download_song(url, filename):\n page = requests.get(url, headers=HEADERS)\n if page.status_code == 200: # OK\n with open(filename, 'w') as outf:\n outf.write(page.text)\n else:\n print(f'download failed with status code {page.status_code}!')", "def download(url, filename):\n response = requests.get(url, stream=True)\n with open(filename, \"wb\") as handle:\n for data in response.iter_content():\n handle.write(data)", "def download_file(url, download_path):\n\n # Extract the filename from the URL\n parsed = urlparse(url)\n filename = basename(parsed.path)\n\n # Ensure the output directory exists\n if not os.path.exists(download_path):\n os.makedirs(download_path)\n\n # Get a temporary file path for the compressed file download\n downloaded_file = os.path.join(tempfile.gettempdir(), filename)\n\n # Download the file\n urlretrieve(url, downloaded_file)\n\n # Move the file to the destination folder\n destination_path = os.path.join(download_path, filename)\n os.rename(downloaded_file, destination_path)", "def to_file(self, filename):\n resp = urlopen(self.url)\n self.file_size = self._get_content_length(resp.headers)\n block_size = 8192\n self.bytes_read = 0\n with open(filename, 'wb') as f:\n while True:\n buf = resp.read(block_size)\n if not buf:\n break\n self.bytes_read += len(buf)\n f.write(buf)\n self._dl_progress_bar()\n if self.show_progress:\n print(' ✓')", "def download(url, fname, directory):\n if not os.path.exists(directory):\n print(\"Creating directory %s\" % directory)\n os.mkdir(directory)\n else:\n print(\"Directory exists: %s\" % directory)\n filepath = os.path.join(directory, fname)\n if not os.path.exists(filepath):\n print(\"Downloading %s to %s\" % (fname, filepath))\n local_fname, _ = request.urlretrieve(url + fname, filepath)\n statinfo = os.stat(filepath)\n print(\"Successfully downloaded %s bytes %s\\n\" % (fname, statinfo.st_size))\n else:\n print(\"File %s exists in %s\\n\" % (fname, filepath))\n return filepath", "def download_file(self, url, path):\n print('\\tDownloading: ', path)\n with open(path, 'w') as outfile:\n try:\n response = self._http_client.get(url)\n outfile.write(response.text)\n finally:\n response.close()\n outfile.close()\n gc.collect()", "def download_file(url, filename):\n with requests.get(url, stream=True) as res:\n if res.status_code == 200:\n with open(filename, 'wb') as f:\n for chunk in res.iter_content(chunk_size=8192): \n f.write(chunk)\n else:\n raise ValueError(\"{} {}\".format(res.status_code, url))\n return filename", "def download_from_url(file_name: str, url: str, download_dir: str, cache_dir: Optional[str] = None):\n if not isinstance(url, str):\n raise TypeError(f\"{url} must be str type.\")\n if not isinstance(file_name, str):\n raise TypeError(f\"{file_name} must be str type.\")\n if not isinstance(download_dir, str):\n raise TypeError(f\"{download_dir} must be str type.\")\n\n if cache_dir is None:\n cache_dir = URDUHACK_DIRECTORY\n\n Path(cache_dir).mkdir(parents=True, exist_ok=True)\n tf.keras.utils.get_file(fname=file_name, origin=url, cache_subdir=download_dir, cache_dir=cache_dir, extract=True)", "def fetch(url, filename):\n with open(filename, 'wb') as handle:\n response = requests.get(url, stream=True)\n\n if not response.ok:\n logger.error('Download failed')\n return False\n\n for block in response.iter_content(1024):\n if not block:\n break\n\n handle.write(block)\n\n logger.info(' -> Rewriting URIs')\n q = re.compile(r'http://data.ub.uio.no/realfagstermer/([0-9]+)')\n with open(filename, 'r') as infile:\n with open(filename + '.tmp', 'w') as outfile:\n outfile.write(q.sub('http://data.ub.uio.no/realfagstermer/c\\\\1', infile.read()))\n os.unlink(filename)\n os.rename(filename + '.tmp', filename)\n\n return True", "def urlretrieve(url, directory):\n\t# Get filename from URL and construct full destination path\n\t_, filename = url.rsplit('/', 1)\n\tpath = os.path.join(directory, filename)\n\twhile True:\n\t\t# On non-404 errors, keep tring to fetch the file\n\t\ttry:\n\t\t\tremote_file = urllib2.urlopen(url)\n\t\t\twith open(path, 'w') as destination:\n\t\t\t\twhile True:\n\t\t\t\t\t# Get the data in chunks and save it locally\n\t\t\t\t\tbuffer = remote_file.read(CHUNK_SIZE)\n\t\t\t\t\tif buffer:\n\t\t\t\t\t\tdestination.write(buffer)\n\t\t\t\t\telse:\n\t\t\t\t\t\t# EOF has been reached\n\t\t\t\t\t\treturn path\n\t\texcept urllib2.URLError, error:\n\t\t\tif getattr(error, 'code', None) == 404:\n\t\t\t\treturn None\n\t\t\tprint 'Failed to fetch %s, retrying...' % url\n\t\t\t# TODO: count and log failures\n\t\ttime.sleep(RETRY_WAIT)", "def ftp_download(url, dir):\n filename = url.split('/')[-1]\n with closing(request.urlopen(url)) as r:\n with open(dir + filename, 'wb+') as f:\n shutil.copyfileobj(r, f)\n return dir + filename", "def download_from_filepath(self, filepath):\n\n file = self.get_file_from_filepath(filepath)\n\n self.download_file_to_directory(file)", "def download_if_not_exist(self):\n for (fname, furl) in cornell_file_urls:\n # dir_path = os.path.dirname(os.path.realpath(__file__))\n input_folder = '{input_dir}/cornell'.format(input_dir=self.input_dir)\n full_dirname = input_folder\n full_fname = '/'.join([full_dirname, fname])\n if not file_exists(full_fname):\n remote_file = urlopen(furl)\n data = remote_file.read()\n remote_file.close()\n # Try creating the dir\n try_create_dir(full_dirname)\n print('download if not exist fname:', fname, 'url:', furl)\n # Write the file\n with open(full_fname, 'wb') as f:\n f.write(data)", "def download_file(url, target_path):\n\n r = requests.get(url, stream=True)\n\n with open(target_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024):\n if chunk:\n f.write(chunk)", "def download_file(download_url, save_path):\n url = \"https://www.encodeproject.org/\" + download_url\n urllib.request.urlretrieve(url, save_path)", "def download_file(self, source, dest=None):\n if dest is None or self.download_root not in dest:\n dest = self.download_root + source\n\n # dest is a directory if ending with '/' or '.', otherwise it's a file\n if dest.endswith(\".\"):\n dest += \"/\"\n\n blob_dest = dest\n if dest.endswith(\"/\"):\n blob_dest = dest + os.path.basename(source)\n\n print(f\" Downloading {source} to {blob_dest}\")\n os.makedirs(os.path.dirname(blob_dest), exist_ok=True)\n bc = self.client.get_blob_client(blob=source)\n with open(blob_dest, \"wb\") as file:\n data = bc.download_blob()\n file.write(data.readall())\n return blob_dest", "def download(url, save_as):\n\topen(save_as, 'w').write(urllib2.urlopen(url).read())", "def save_file(url, output_path):\n\n print(url)\n\n try:\n response = requests.get(url, stream = True)\n except:\n print(\"=> Download failed: %s\" % url)\n return False\n\n if (response.status_code == 200):\n try:\n with open(output_path, \"wb\") as f:\n for chunk in response.iter_content(chunk_size = 512):\n if (chunk):\n f.write(chunk)\n sys.stdout.write(\"\\r=> %s (%dkb)\" % (output_path,\n f.tell()/1024))\n sys.stdout.flush()\n sys.stdout.write(\"\\r=> %s (%dkb)\" % (output_path,\n f.tell()/1024))\n sys.stdout.flush()\n print(\"\")\n return True\n\n except Exception as err:\n print(\"\\n=> Error: %s (%s)\" % (err, url))\n\n else:\n print(\"=> Download failed: %s\" % url)\n return False", "def _download_epw_file(url):\n r = requests.get(url)\n if r.ok:\n # py2 and 3 compatible: binary write, encode text first\n log.debug(\" ... OK!\")\n return io.StringIO(r.text)\n else:\n log.error(\" connection error status code: %s\" % r.status_code)\n r.raise_for_status()", "def filedownload(source, destination):\n\n # Initiate the download\n urllib.request.urlretrieve(source, destination)", "def maybe_download_from_cloud(url, filename, subfolder=None, cache_dir=None, refresh_cache=False) -> str:\n if cache_dir is None:\n cache_location = Path.joinpath(Path.home(), \".cache/torch/mridc\")\n else:\n cache_location = cache_dir\n if subfolder is not None:\n destination = Path.joinpath(cache_location, subfolder)\n else:\n destination = cache_location\n\n if not os.path.exists(destination):\n os.makedirs(destination, exist_ok=True)\n\n destination_file = Path.joinpath(destination, filename)\n\n if os.path.exists(destination_file):\n logging.info(f\"Found existing object {destination_file}.\")\n if refresh_cache:\n logging.info(\"Asked to refresh the cache.\")\n logging.info(f\"Deleting file: {destination_file}\")\n os.remove(destination_file)\n else:\n logging.info(f\"Re-using file from: {destination_file}\")\n return str(destination_file)\n # download file\n wget_uri = url + filename\n logging.info(f\"Downloading from: {wget_uri} to {str(destination_file)}\")\n # NGC links do not work everytime so we try and wait\n i = 0\n max_attempts = 3\n while i < max_attempts:\n i += 1\n try:\n wget.download(wget_uri, str(destination_file))\n if os.path.exists(destination_file):\n return str(destination_file)\n return \"\"\n except Exception as e:\n logging.info(f\"Download from cloud failed. Attempt {i} of {max_attempts}\")\n logging.info(f\"Error: {e}\")\n sleep(0.05)\n continue\n raise ValueError(\"Not able to download url right now, please try again.\")", "def download_file(url, file_path, force=False):\n\n if os.path.exists(file_path) and not force:\n return\n dirname = os.path.dirname(file_path)\n Path(dirname).mkdir(parents=True, exist_ok=True)\n gdown.download(url, file_path, quiet=False)", "def download(filename, work_directory, source_url, overwrite=False):\n\n if not gfile.Exists(work_directory):\n gfile.MakeDirs(work_directory)\n\n filepath = os.path.join(work_directory, filename)\n\n if overwrite or not gfile.Exists(filepath):\n _filename, _ = urlretrieve_with_retry(source_url + filename)\n #print('_filename:', _filename)\n gfile.Copy(_filename, filepath, overwrite=overwrite)\n with gfile.GFile(filepath) as f:\n size = f.size()\n print('Successfully downloaded', filename, size, 'bytes.')\n\n return filepath", "def downloadLocal(url_list,path):\n print(\"You are downloading {} images\".format(parser_arguments().limit),end=\" \");print(\"of {} class.\".format(parser_arguments().classes))\n print(\"Please, be patient :)\")\n for i in range(len(url_list)):\n filename= url_list[i].split(\"/\")[-1] # name of the picture file\n r = requests.get(url_list[i], stream =True)\n print(filename)\n\n with open(filename,'wb') as f : # create the file locally in binary-write mode\n r = requests.get(url_list[i], stream =True)\n shutil.copyfileobj(r.raw, f) #write our image to the file\n shutil.move(filename,path)\n print('Done!')", "def download_file(url, local_filename, update=False):\n if os.path.isfile(local_filename):\n if not update:\n return\n else:\n os.remove(local_filename)\n\n r = requests.get(url, stream=True)\n # http://stackoverflow.com/questions/15352668/download-and-decompress-gzipped-file-in-memory\n with open(local_filename, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n f.write(chunk)", "def download_file(url, fname):\n urllib.request.urlretrieve(url, fname)", "def download_save_file(url: str, save_directory: str) -> str:\n print(f'Downloading: {url}')\n downloaded_file = requests.get(url)\n head, tail = os.path.split(url)\n save_file_path = os.path.join(save_directory, tail)\n\n # ensure path exists\n os.makedirs(save_directory, exist_ok=True)\n\n with open(save_file_path, 'wb') as fd:\n fd.write(downloaded_file.content)\n print(f'Saved: {url}')\n return save_file_path", "def load_file(uri):\n # get filename\n filename = os.path.basename(uri)\n\n # If filepath specified and file exists, return filepath as-is\n if os.path.isfile(uri):\n return uri\n # Check to see if file exists in current working directory\n elif os.path.isfile(filename):\n return filename\n # Otherwise, check to see if URI is a URL\n elif uri.startswith(('http', 'ftp')):\n # retrieve remote file contents\n print(\"Downloading %s\" % uri)\n handle = urllib2.urlopen(uri)\n contents = handle.read()\n\n with open(filename, 'w') as fp:\n fp.write(contents)\n return filename\n # if it's note a URL or a valid filepath, raise and exception\n else:\n raise Exception(\"Invalid URI specified: %s\" % uri)", "def serve(file_path):\n\tfile_path = os.path.abspath(file_path)\n\tif os.path.exists(file_path):\n\t\tdest_path = get_path(os.path.basename(file_path))\n\t\tshutil.copyfile(file_path, dest_path)\n\telse:\n\t\traise Exception('File could not be located')", "def maybe_download(directory, filename, url):\n if not os.path.exists(directory):\n print(\"Creating directory %s\" % directory)\n os.mkdir(directory)\n filepath = os.path.join(directory, filename)\n if not os.path.exists(filepath):\n print(\"Downloading %s to %s\" % (url, filepath))\n filepath, _ = urllib.request.urlretrieve(url, filepath)\n statinfo = os.stat(filepath)\n print(\"Succesfully downloaded\", filename, statinfo.st_size, \"bytes\")\n return filepath", "def download(url, out_folder):\n \n filename = \"2.png\"\n \n outpath = os.path.join(out_folder, filename)\n \n if url.lower().startswith(\"http\"):\n urlretrieve(url, outpath)\n else:\n urlretrieve(urlparse.urlunparse(parsed), outpath)", "def download_file(url, destination_dir='./', desc=None, force=False):\n # Convert path to pathlib object if not already\n destination_dir = Path(destination_dir)\n # Get filename from url\n fname = url.split('/')[-1]\n # Construct path to file in local machine\n local_filepath = Path(destination_dir) / fname\n\n if local_filepath.is_file() and not force:\n logger.info(\n \"File(s) already downloaded. Use force=True to download again.\")\n return local_filepath\n else:\n # Safely create nested directory - https://stackoverflow.com/a/273227\n destination_dir.mkdir(parents=True, exist_ok=True)\n\n if desc is None:\n desc = f\"Downloading {fname}\"\n\n # Download large file with requests - https://stackoverflow.com/a/16696317\n with requests.get(url, stream=True) as r:\n r.raise_for_status()\n total_size_in_bytes = int(r.headers.get('content-length', 0))\n block_size = 1024\n # Progress bar for downloading file - https://stackoverflow.com/a/37573701\n pbar = tqdm(total=total_size_in_bytes,\n unit='iB',\n unit_scale=True,\n desc=desc)\n with open(local_filepath, 'wb') as f:\n for data in r.iter_content(block_size):\n pbar.update(len(data))\n f.write(data)\n pbar.close()\n\n # TODO Add SHA256 or MD5 comparison\n\n return local_filepath", "def download(self, url, destination):\n fileDownloader = utils.HttpFileDownloader(url, destination)\n fileDownloader.download()", "def save(self, filename: str):\n r = requests.get(self.raw_url, stream=True)\n if r.status_code == 404:\n raise Exception(f\"Document {self.key} does not exist\")\n r.raise_for_status()\n \n with open(filename, 'wb') as file:\n for chunk in r.iter_content(10 * 1024):\n file.write(chunk)", "def download(self, item, save_dir='./'):\r\n try:\r\n os.makedirs(save_dir)\r\n except OSError as e:\r\n if e.errno == errno.EEXIST and os.path.isdir(save_dir):\r\n # another thread beat us to creating this dir\r\n pass\r\n else:\r\n # target dir exists as a file, or a different error\r\n raise\r\n\r\n item['url'] = item[item['type'] + 's']['standard_resolution']['url'].split('?')[0]\r\n # remove dimensions to get largest image\r\n item['url'] = re.sub(r'/s\\d{3,}x\\d{3,}/', '/', item['url']) \r\n\r\n base_name = item['url'].split('/')[-1]\r\n file_path = os.path.join(save_dir, base_name)\r\n\r\n if not os.path.isfile(file_path):\r\n\r\n with open(file_path, 'wb') as file:\r\n try:\r\n bytes = requests.get(item['url']).content\r\n except requests.exceptions.ConnectionError:\r\n\t\t\t\t\tsleep(5)\r\n\t\t\t\t\tbytes = requests.get(item['url']).content\r\n\t\t\t\t\t\r\n file.write(bytes)\r\n\r\n file_time = int(item['created_time'])\r\n os.utime(file_path, (file_time, file_time))", "def put(self, url, localfile):\n\n cachedir = self._cachedir(url)\n filename = localfile.name\n\n logger.debug(f\"Storing {localfile} in cache for {url}\")\n shutil.copy2(localfile, cachedir / filename)\n self._writefilename(cachedir, filename)", "def url_retrieve(url, output_file):\n r = requests.get(url, allow_redirects=True)\n if r.status_code != 200:\n raise ConnectionError(f\"Could not download {url}\\nError code: {r.status_code}\")\n\n output_file.write_bytes(r.content)", "def download(self, url, filename=None, folder='./', file_field=None, delete_if_exists=True):\n rc = None\n downloading_set = SocialClient.get_downloading_set()\n if url in downloading_set:\n log.info('[IGNORED] %s is in downloading.' % url)\n return rc\n else:\n downloading_set.add(url)\n\n if not filename:\n filename = url.split('/')[-1]\n\n try:\n if file_field is not None:\n folder = file_field.field.upload_to(file_field, filename) \\\n if callable(file_field.field.upload_to) else file_field.field.upload_to\n fullpath = os.path.join(self.download_root, folder, filename)\n fullpath = os.path.abspath(fullpath)\n os.makedirs(os.path.dirname(fullpath), exist_ok=True)\n rc = fullpath\n\n # TODO: handle BIG file\n log.debug('Downloading %s to %s' % (url, fullpath))\n r = requests.get(url, stream=True, proxies=self.proxies)\n if r.status_code != requests.codes.ok:\n log.error('%d %s. Downloading %s' % (r.status_code, r.reason, url))\n rc = None\n\n if delete_if_exists:\n if os.path.exists(fullpath):\n try:\n os.remove(fullpath)\n except Exception as err:\n log.exception(err)\n # then will auto rename\n\n if file_field is not None:\n file_field.save(filename, r.raw)\n else:\n try:\n with open(fullpath, 'wb') as f:\n f.write(r.raw)\n except Exception as err:\n log.exception(err)\n try:\n if os.path.exists(fullpath):\n os.remove(fullpath)\n except:\n pass\n rc = None\n except Exception as err:\n log.exception(err)\n rc = None\n finally:\n downloading_set.remove(url)\n\n return rc", "def _download_to_file(session: requests.Session, url: str, pyfile: Path):\n with session.get(url, stream=True) as r:\n r.raise_for_status()\n pyfile.parent.mkdir(parents=True, exist_ok=True)\n with pyfile.open(mode=\"wb\") as f:\n for chunk in r.iter_content(chunk_size=40960):\n f.write(chunk)", "def store_file(self,client,url,orig_filename):\n # Get rep_path to copy file\n try:\n filename, headers=urllib.urlretrieve(url)\n except:\n # if we fail here returns a None object\n self.log('File retrieval error for file %s' % url)\n return ''\n\n # Fix filename encoding\n udata=orig_filename.decode(\"utf-8\")\n orig_filename=udata.encode(\"ascii\",\"ignore\")\n\n # Use ckanclient patch to upload file to storage\n url, msg=util.upload_file(client,filename,orig_filename)\n\n # Log error if True\n if msg is not '':\n #log.error('File upload error:\\n %s' % msg)\n self.log( 'File upload error:\\n %s' % msg)\n return None\n\n return url", "def maybe_download(filename):\n\n if not tf.gfile.Exists(WORK_DIRECTORY):\n tf.gfile.MakeDirs(WORK_DIRECTORY)\n filepath = os.path.join(WORK_DIRECTORY, filename)\n if not tf.gfile.Exists(filepath):\n filepath, _ = urllib.request.urlretrieve(SOURCE_URL + filename, filepath)\n with tf.gfile.GFile(filepath) as f:\n size = f.Size()\n print('Successfully downloaded', filename, size, 'bytes.')\n return filepath", "def download_file(url, fn, cookiejar, cookies_file, wget_bin):\n\ttry:\n\t\t# create the path if need be\n\t\tbasedir = os.path.dirname(fn)\n\t\tif not os.path.isdir(basedir):\n\t\t\tos.makedirs(basedir)\n\n\t\tif wget_bin is not None:\n\t\t\tdownload_file_wget(wget_bin, url, fn, cookies_file)\n\t\telse:\n\t\t\tdownload_file_nowget(url, fn, cookiejar)\n\n\texcept KeyboardInterrupt, e: \n\t\tprint \"\\nKeyboard Interrupt -- Removing partial file:\", fn\n\t\tos.remove(fn)\n\n\t\traise e", "def save_file(url, *, out_dir='sha_tmp/', out_name=None):\n exten_types = {'image/fits': '.fits',\n 'text/plain; charset=UTF-8': '.tbl',\n 'application/zip': '.zip',\n }\n # Make request\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Name file using ID at end\n if out_name is None:\n out_name = 'shaID_' + id_parse.findall(url)[0]\n # Determine extension\n exten = exten_types[response.headers['Content-Type']]\n # Check if path exists\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n # Write file\n with open(out_dir + out_name + exten, 'wb') as f:\n for block in response.iter_content(1024):\n f.write(block)", "def maybe_download(directory, filename, url):\n if not os.path.exists(directory):\n print(\"Creating directory %s\" % directory)\n os.mkdir(directory)\n filepath = os.path.join(directory, filename)\n if not os.path.exists(filepath):\n print(\"Downloading %s to %s\" % (url, filepath))\n filepath, _ = urllib.request.urlretrieve(url, filepath)\n statinfo = os.stat(filepath)\n print(\"Succesfully downloaded\", filename, statinfo.st_size, \"bytes\")\n return filepath", "def download():\n response = requests.get(URL, stream=True)\n\n file = open(FILE_NAME, 'wb')\n file.write(response.content)\n\n with zipfile.ZipFile(FILE_NAME, 'r') as zip_ref:\n zip_ref.extractall()\n\n file.close()\n os.remove(FILE_NAME)", "def download_zip(url, folder=None):\n\n # get this file folder name and save the file name\n if not folder:\n folder = os.path.dirname(os.path.abspath(__file__))\n file_name = os.path.split(url)[1]\n\n # Download the file from \"url\" and save it locally under \"file_name\":\n try:\n with urllib.request.urlopen(url) as response, open(folder + \"/\" + file_name, 'wb') as out_file:\n shutil.copyfileobj(response, out_file)\n except urllib.error.URLError as e:\n print('urllib.error.URLError')\n raise Exception(e)\n except Exception as e:\n raise Exception(e)\n else:\n return folder,file_name", "def download(self, url, directory):\n while True:\n try:\n urlretrieve(url, directory) # this fails if no internet\n break\n except IOError:\n if not ask(\"Notes\", \"Error: No internet connection\", self):\n raise", "def _download_file(file_url: str, file_path: str) -> str:\n if os.path.exists(file_path):\n return file_path\n op_desc = f\"Downloading {os.path.basename(file_path)}\"\n try:\n with requests.Session() as req_sess:\n req_res = req_sess.get(file_url, stream=True)\n total_length = int(req_res.headers.get(\"Content-Length\"))\n with tqdm.wrapattr(req_res.raw, \"read\", total=total_length, desc=op_desc) as raw:\n with open(file_path , \"wb\") as file:\n shutil.copyfileobj(raw,file)\n return file_path\n except Exception as network_error:\n if os.path.exists(file_path):\n os.remove(file_path)\n raise network_error", "def _download(url, file_name):\n # File length can only be approximated from the resulting GET, unfortunately\n r = requests.get(url, stream=True)\n if 'Content-Length' in r.headers:\n file_len = int(r.headers['Content-Length'])\n elif 'X-Original-Content-Length' in r.headers:\n file_len = int(r.headers['X-Original-Content-Length'])\n else:\n file_len = 0\n r.raw.decode_content = True\n with open(file_name, 'wb') as f:\n _copyfileobj(r.raw, f, chunks=(file_len / (64. * 1024)))\n r.close()\n\n return file_name", "def fetch_repo_file(self, path, save = False, mode = 'w'):\n\t\ttry:\n\t\t\tprint(\"Fetching repo file: {0}\".format(self.config[\"repo\"][\"repo_proto\"] + \"://\" + self.config[\"repo\"][\"repo_addr\"] + \":\" + self.config[\"repo\"][\"repo_port\"] + path))\n\t\t\n\t\t\tdata = urllib.request.urlopen(self.config[\"repo\"][\"repo_proto\"] + \"://\" + self.config[\"repo\"][\"repo_addr\"] + \":\" + self.config[\"repo\"][\"repo_port\"] + path).read()\n\n\t\t\tif save != False:\n\t\t\t\tf = open(path, mode)\n\t\t\t\tf.write(data)\n\t\t\t\tf.close()\n\t\t\treturn data\n\t\texcept Exception as e:\n\t\t\tprint(\"Failed to connect to server, exiting...\");\n\t\t\tsys.exit(1)", "def maybe_download(filename):\n if not tf.gfile.Exists(WORK_DIRECTORY):\n tf.gfile.MakeDirs(WORK_DIRECTORY)\n filepath = os.path.join(WORK_DIRECTORY, filename)\n if not tf.gfile.Exists(filepath):\n filepath, _ = urllib.request.urlretrieve(SOURCE_URL + filename, filepath)\n with tf.gfile.GFile(filepath) as f:\n size = f.size()\n print('Successfully downloaded', filename, size, 'bytes.')\n return filepath", "def maybe_download(filename):\n if not tf.gfile.Exists(WORK_DIRECTORY):\n tf.gfile.MakeDirs(WORK_DIRECTORY)\n filepath = os.path.join(WORK_DIRECTORY, filename)\n if not tf.gfile.Exists(filepath):\n filepath, _ = urllib.request.urlretrieve(SOURCE_URL + filename, filepath)\n with tf.gfile.GFile(filepath) as f:\n size = f.size()\n print('Successfully downloaded', filename, size, 'bytes.')\n return filepath", "def fetch_and_save(cls, url, path):\n content = cls.fetch_with_retry(url)\n if not content:\n return False\n # print(\"Saving {}\".format(os.path.basename(path)))\n with open(path, \"wb\") as file:\n file.write(content)\n return content", "def fetch(self, location=None, conn_timeout=None):\r\n if self.local and (location is None or os.path.dirname(self._url.path) == location):\r\n return self._url.path\r\n location = location or safe_mkdtemp()\r\n target = os.path.join(location, self.filename)\r\n if os.path.exists(target):\r\n return target\r\n try:\r\n with contextlib.closing(self.fh(conn_timeout=conn_timeout)) as url_fp:\r\n safe_mkdir(os.path.dirname(target))\r\n with open(target, 'wb') as fp:\r\n fp.write(url_fp.read())\r\n except (FetchError, IOError) as e:\r\n raise self.UnreadableLink('Failed to fetch %s to %s: %s' % (self.url, location, e))\r\n return target" ]
[ "0.6463796", "0.6256494", "0.61775124", "0.6177211", "0.60822904", "0.5990596", "0.5944307", "0.59229976", "0.589575", "0.58864975", "0.5879225", "0.58666736", "0.58587056", "0.5850899", "0.5834173", "0.5821299", "0.5803716", "0.5792066", "0.5764819", "0.5752733", "0.5699436", "0.5695784", "0.56952816", "0.56856585", "0.5680852", "0.5669633", "0.5663326", "0.56562734", "0.56444734", "0.5638393", "0.5636746", "0.56354856", "0.5630636", "0.56077206", "0.56034476", "0.5596101", "0.5588476", "0.5587766", "0.55843776", "0.556711", "0.55629605", "0.55614275", "0.55587924", "0.554831", "0.55466616", "0.55253434", "0.55194265", "0.55192447", "0.55136544", "0.55134094", "0.55045164", "0.5498055", "0.5483161", "0.5478954", "0.5477954", "0.54747313", "0.54731715", "0.5473138", "0.54724437", "0.5451676", "0.54463243", "0.5440356", "0.54262847", "0.54247755", "0.5420359", "0.54186815", "0.5412296", "0.5407762", "0.5404943", "0.54014504", "0.5397124", "0.53887874", "0.53854424", "0.53829783", "0.5379595", "0.53762686", "0.5366708", "0.53602004", "0.5355227", "0.53544605", "0.5342106", "0.5337635", "0.53331196", "0.5329973", "0.532737", "0.5316287", "0.5315565", "0.53069204", "0.53066796", "0.53052264", "0.5288928", "0.52888703", "0.5282614", "0.52818793", "0.52725524", "0.5270567", "0.5264987", "0.5259685", "0.5259685", "0.5259504", "0.5251628" ]
0.0
-1
Simple time series forecaster class
def __init__(self, cfg=None): if cfg is None: cfg = (1, 1, "persist") self.cfg = cfg self.shift, self.offset, self.avg_type = cfg self.forecast = self.average_forecast self.averager = None if self.avg_type == "mean": self.averager = np.mean elif self.avg_type == "median": self.averager = np.median elif self.avg_type == "persist": self.forecast = self.persistence_forecast elif self.avg_type == "drift": self.forecast = self.drift_forecast
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def model():\n return TimeSeriesMultiReg()", "def _build_forecast_series(self,\n points_preds: np.ndarray) -> TimeSeries:\n\n time_index = self._generate_new_dates(len(points_preds))\n\n return TimeSeries.from_times_and_values(time_index, points_preds, freq=self.training_series.freq())", "def __init__(self, ts_df, time_format=\"%Y-%m-%d %H:%M:%S\", freq='D',\n fill_method='ffill',\n n_test=0, n_val=0,\n hyper_params=None,\n test='adf',\n trend=None,\n seasonal=False,\n seasonal_periods=1,\n **kwds):\n self._ts_df_cols = ['ds', 'y']\n\n self.ts_df = ts_df\n self.time_format = time_format\n self.freq = freq\n self.fill_method = fill_method.lower()\n self.n_test = int(n_test)\n self.n_val = int(n_val)\n self.transform = None\n self._boxcox_lmbda = None\n\n self._mode = ''\n\n self._train_dt = None\n self._test_dt = None\n self._val_dt = None\n\n self.model_fit = None\n self.fittedvalues = None\n self.residuals = None\n self.rmse = 0\n self._gs = tsa.GridSearchClass()\n self.hyper_params = hyper_params\n self.best_model = dict()\n\n \"\"\"\n self.rmse_test = 0\n self.rmse_val = 0\n \"\"\"\n\n self.upper_whisker_res = None\n self.lower_conf_int = None\n self.upper_conf_int = None\n\n self.forecast = None\n self.residuals_forecast = None\n\n self._res_decomp = None\n self._arr_seasonal = None\n self._arr_trend = None\n self._arr_baseline = None\n\n self._test = test\n self._trend = trend\n if self._trend is not None:\n self._trend = self._trend.lower()\n self._seasonal = seasonal\n if isinstance(self._seasonal, str):\n self._seasonal = self._seasonal.lower()\n self._seasonal_periods = seasonal_periods\n\n self._uvts_cls_logger = Logger('uvts_cls')\n\n UVariateTimeSeriesClass.assertions(self)\n # work with ts_df\n self.ts_df = self.ts_df.reset_index()\n self.ts_df.columns = self._ts_df_cols\n self.ts_df['y'] = self.ts_df['y'].apply(np.float64, errors='coerce')\n self.ts_df.set_index('ds', inplace=True)\n self._uvts_cls_logger.info(\n \"Received time series data of range: \" + str(min(self.ts_df.index)) + ' - ' + str(\n max(self.ts_df.index)) + \" and shape: \" + str(self.ts_df.shape))\n\n if not isinstance(self.ts_df.index, pd.DatetimeIndex):\n self._uvts_cls_logger.warning(\"Time conversion required...\")\n self.ts_df = self.ts_df.reset_index()\n try:\n self.ts_df['ds'] = self.ts_df['ds'].apply(\n lambda x: datetime.datetime.strptime(\n str(x).translate({ord('T'): ' ', ord('Z'): None})[:-1],\n self.time_format))\n except ValueError as e:\n self._uvts_cls_logger.warning(\"Zulu time conversion not successful: {}\".format(e))\n self._uvts_cls_logger.warning(\"Will try without assuming zulu time...\")\n try:\n self.ts_df['ds'] = self.ts_df['ds'].apply(\n lambda x: datetime.datetime.strptime(str(x), self.time_format))\n except ValueError as e:\n self._uvts_cls_logger.info(\"Time conversion not successful. Check your time_format: {}\".format(e))\n sys.exit(\"STOP\")\n else:\n self._uvts_cls_logger.info(\"Time conversion successful!\")\n else:\n self._uvts_cls_logger.info(\"Time conversion successful!\")\n # set index\n self.ts_df.set_index('ds', inplace=True)\n #\n self.ts_df.index = pd.to_datetime(self.ts_df.index)\n self.ts_df.sort_index(inplace=True)\n # resample\n self.ts_resample()\n UVariateTimeSeriesClass.assertions(self, post=True)\n #\n if self.n_val > len(self.ts_df) - self.n_test:\n self.n_val = len(self.ts_df) - self.n_test\n\n if self.n_test == 0 and self.n_val == 0:\n self._mode = 'forecast'\n elif self.n_test > 0:\n self._mode = 'test'\n elif self.n_test == 0 and self.n_val > 0:\n self._mode = 'validate'\n \n # delegate just for good programming style here\n super(UVariateTimeSeriesClass, self).__init__(**kwds)", "def timeseries_dataframe(self):\n return", "def __process__(self, data: dict, metadata: dict, format: str):\n result = []\n metadata = MetadataModel(\n symbol=metadata[\"2. Symbol\"],\n last_refreshed=datetime.strptime(metadata[\"3. Last Refreshed\"], format),\n timezone=metadata[\"6. Time Zone\"] if \"6. Time Zone\" in metadata else metadata[\"5. Time Zone\"]\n )\n\n for key in data:\n result.append(TimeSerieModel(\n timestamp=datetime.strptime(key, format),\n open=float(data[key][\"1. open\"]),\n high=float(data[key][\"2. high\"]),\n low=float(data[key][\"3. low\"]),\n close=float(data[key][\"4. close\"]),\n volume=int(data[key][\"5. volume\"])\n ))\n\n ts = TimeSeriesModel(metadata=metadata, series_data=result)\n\n if self.datatype == \"class\":\n return ts\n elif self.datatype == \"pandas\":\n try:\n import pandas as pd\n return pd.DataFrame(ts.get_list())\n except ModuleNotFoundError:\n raise ModuleNotFoundError(\"Please make sure pandas is installed.\")", "def test_get_drift_timeseries(self):\n # NB inputs need to be to be lists\n ref1 = 1.0\n ref2 = 2.0\n data = {'target': {'reflectance': [ref1,]},\n 'reference': {'reflectance': [ref2,]}\n }\n doublet = ([0, 0], )\n result = librad_drift.RadiometricDrift.get_drift_timeseries(data, doublet)\n self.assertEquals(result, [ref1/ref2, ]) # Output will be a list", "def ts_fit(series: TimeSeries) -> TimeSeries:\n pass", "def forecast(self) -> TSDataset:\n future = self.ts.make_future(self.horizon)\n predictions = self.model.forecast(future)\n return predictions", "def __init__(\n self, epochs, mags, errs=[], ra='none', dec='none',\n source_id='none', time_unit='day', classname='unknown',\n band='unknown', features_to_use=[]):\n self.time_unit = time_unit\n self.id = str(source_id)\n self.classname = classname\n self.start = epochs[0]\n self.end = epochs[-1]\n self.total_time = self.end - self.start\n self.epochs = epochs\n self.n_epochs = len(epochs)\n self.errs = errs\n self.mags = mags\n self.avg_mag = np.average(mags)\n self.ra = ra\n self.dec = dec\n self.band = band\n self.avgt = round((self.total_time) / (float(len(epochs))), 3)\n self.cads = []\n\n self.double_to_single_step = []\n self.all_times = []\n\n if len(errs) > 0:\n self.avg_err = np.average(errs)\n self.med_err = np.median(errs)\n self.std_err = np.std(errs)\n else:\n self.avg_err = None\n self.med_err = None\n self.std_err = None\n\n for i in range(len(epochs)):\n\n # all the deltaTs (time to next obs)\n try:\n self.cads.append(epochs[i + 1] - epochs[i])\n except IndexError:\n pass\n\n # ratio of time to obs after next to time to next obs\n try:\n self.double_to_single_step.append(\n (epochs[i + 2] - epochs[i]) / (epochs[i + 2] - epochs[i + 1]))\n except IndexError:\n pass\n except ZeroDivisionError:\n pass\n\n # all possible deltaTs ()\n for j in range(1, len(epochs)):\n try:\n self.all_times.append(epochs[i + j] - epochs[i])\n except IndexError:\n pass\n\n self.all_times_std = np.std(self.all_times)\n self.all_times_med = np.median(self.all_times)\n self.all_times_avg = np.average(self.all_times)\n\n hist, bins = np.histogram(self.all_times, bins=50)\n nhist, bins = np.histogram(self.all_times, bins=50, normed=True)\n self.all_times_hist = hist\n self.all_times_bins = bins\n self.all_times_hist_peak_val = np.max(hist)\n self.all_times_hist_peak_bin = np.where(\n hist == self.all_times_hist_peak_val)[0][0]\n self.all_times_hist_normed = nhist\n self.all_times_bins_normed = bins / np.max(self.all_times)\n self.all_times_nhist_peak_val = np.max(nhist)\n\n peaks = [] # elements are lists: [peak, index]\n for peak in heapq.nlargest(10, nhist):\n index = np.where(nhist == peak)[0][0]\n try:\n if nhist[index - 1] < peak and nhist[index + 1] < peak:\n peaks.append([peak, index])\n elif nhist[index - 1] == peak:\n if nhist[index - 2] < peak:\n peaks.append([peak, index])\n elif nhist[index + 1] == peak:\n if nhist[index + 2] < peak:\n peaks.append([peak, index])\n except IndexError:\n # peak is first or last entry\n peaks.append([peak, index])\n\n peaks = sorted(peaks, key=lambda x: x[1])\n\n self.all_times_nhist_peaks = peaks[:4]\n self.all_times_nhist_numpeaks = len(peaks)\n if len(peaks) > 0:\n self.all_times_nhist_peak1_bin = peaks[0][1]\n else:\n self.all_times_nhist_peak1_bin = None\n (self.all_times_nhist_peak_1_to_2, self.all_times_nhist_peak_1_to_3,\n self.all_times_nhist_peak_2_to_3, self.all_times_nhist_peak_1_to_4,\n self.all_times_nhist_peak_2_to_4,\n self.all_times_nhist_peak_3_to_4) = [None, None, None, None, None, None]\n (self.all_times_nhist_peak4_bin, self.all_times_nhist_peak3_bin,\n self.all_times_nhist_peak2_bin) = [None, None, None]\n if len(peaks) >= 2:\n self.all_times_nhist_peak_1_to_2 = peaks[0][0] / peaks[1][0]\n self.all_times_nhist_peak2_bin = peaks[1][1]\n if len(peaks) >= 3:\n self.all_times_nhist_peak_2_to_3 = peaks[1][0] / peaks[2][0]\n self.all_times_nhist_peak_1_to_3 = peaks[0][0] / peaks[2][0]\n self.all_times_nhist_peak3_bin = peaks[2][1]\n if len(peaks) >= 4:\n self.all_times_nhist_peak_1_to_4 = peaks[\n 0][0] / peaks[3][0]\n self.all_times_nhist_peak_2_to_4 = peaks[\n 1][0] / peaks[3][0]\n self.all_times_nhist_peak_3_to_4 = peaks[\n 2][0] / peaks[3][0]\n self.all_times_nhist_peak4_bin = peaks[3][1]\n\n self.avg_double_to_single_step = np.average(self.double_to_single_step)\n self.med_double_to_single_step = np.median(self.double_to_single_step)\n self.std_double_to_single_step = np.std(self.double_to_single_step)\n\n self.cads_std = np.std(self.cads)\n self.cads_avg = np.average(self.cads)\n self.cads_med = np.median(self.cads)\n\n self.cad_probs = {}\n for time in [1, 10, 20, 30, 40, 50, 100, 500, 1000, 5000, 10000, 50000,\n 100000, 500000, 1000000, 5000000, 10000000]:\n if self.time_unit == 'day':\n self.cad_probs[time] = stats.percentileofscore(\n self.cads, float(time) / (24.0 * 60.0)) / 100.0\n elif self.time_unit == 'hour':\n self.cad_probs[time] = stats.percentileofscore(\n self.cads, float(time)) / 100.0\n\n self.cad_probs_1 = self.cad_probs[1]\n self.cad_probs_10 = self.cad_probs[10]\n self.cad_probs_20 = self.cad_probs[20]\n self.cad_probs_30 = self.cad_probs[30]\n self.cad_probs_40 = self.cad_probs[40]\n self.cad_probs_50 = self.cad_probs[50]\n self.cad_probs_100 = self.cad_probs[100]\n self.cad_probs_500 = self.cad_probs[500]\n self.cad_probs_1000 = self.cad_probs[1000]\n self.cad_probs_5000 = self.cad_probs[5000]\n self.cad_probs_10000 = self.cad_probs[10000]\n self.cad_probs_50000 = self.cad_probs[50000]\n self.cad_probs_100000 = self.cad_probs[100000]\n self.cad_probs_500000 = self.cad_probs[500000]\n self.cad_probs_1000000 = self.cad_probs[1000000]\n self.cad_probs_5000000 = self.cad_probs[5000000]\n self.cad_probs_10000000 = self.cad_probs[10000000]", "def __init__(\n self,\n regressor: Any,\n sp: int = 1,\n deseasonal_model: str = \"additive\",\n degree: int = 1,\n window_length: int = 10,\n fe_target_rr: Optional[list] = None,\n ):\n self.regressor = regressor\n self.sp = sp\n self.deseasonal_model = deseasonal_model\n self.degree = degree\n self.window_length = window_length\n\n if fe_target_rr is None:\n # All target lags as features.\n # NOTE: Previously, this forecaster class used the `window_length` argument\n # in make_reduction. Now we have moved to using the `transformers` argument.\n # The order of columns matter for some models like tree based models\n # Hence we start with the furthest away lag and end with the most recent lag.\n # This behavior matches the behavior of the `window_length`` argument in\n # make_reduction which is used in this forecaster class.\n kwargs = {\n \"lag_feature\": {\"lag\": list(np.arange(self.window_length, 0, -1))}\n }\n self.fe_target_rr = [WindowSummarizer(**kwargs, n_jobs=1)]\n else:\n self.fe_target_rr = fe_target_rr\n\n super(BaseCdsDtForecaster, self).__init__()", "def fit_timeseries(xdates, ydata):\n\n pass", "def _local_ts(self, *data):\n arr = self.function(*data)\n if self.var.func_input_dtype == 'numpy':\n arr = xr.DataArray(arr, coords=self.coords)\n arr.name = self.name\n return arr", "def test_time_series_from_file():\r\n\r\n TR = 1.35\r\n ts_ff = io.time_series_from_file\r\n\r\n #File names:\r\n fmri_file1 = os.path.join(data_path,'fmri1.nii.gz')\r\n fmri_file2 = os.path.join(data_path,'fmri2.nii.gz')\r\n\r\n #Spatial coordinates into the volumes:\r\n coords1 = np.array([[5,5,5,5],[5,5,5,5],[1,2,3,4]])\r\n coords2 = np.array([[6,6,6,6],[6,6,6,6],[3,4,5,6]])\r\n\r\n #No averaging, no normalization:\r\n t1 = ts_ff([fmri_file1,fmri_file2],[coords1,coords2],TR)\r\n\r\n npt.assert_equal(t1[0].shape,(4,80)) # 4 coordinates, 80 time-points\r\n\r\n t2 = ts_ff([fmri_file1,fmri_file2],[coords1,coords2],TR,average=True)\r\n\r\n npt.assert_equal(t2[0].shape,(80,)) # collapse coordinates,80 time-points\r\n\r\n t3 = ts_ff(fmri_file1,coords1,TR,normalize='zscore')\r\n\r\n #The mean of each channel should be almost equal to 0:\r\n npt.assert_almost_equal(t3.data[0].mean(),0)\r\n #And the standard deviation should be almost equal to 1:\r\n npt.assert_almost_equal(t3.data[0].std(),1)\r\n\r\n t4 = ts_ff(fmri_file1,coords1,TR,normalize='percent')\r\n\r\n #In this case, the average is almost equal to 0, but no constraint on the\r\n #std:\r\n npt.assert_almost_equal(t4.data[0].mean(),0)\r\n\r\n #Make sure that we didn't mess up the sampling interval:\r\n npt.assert_equal(t4.sampling_interval,nitime.TimeArray(1.35))\r\n\r\n # Test the default behavior:\r\n data = io.load(fmri_file1).get_data()\r\n t5 = ts_ff(fmri_file1)\r\n npt.assert_equal(t5.shape, data.shape)\r\n npt.assert_equal(t5.sampling_interval, ts.TimeArray(1, time_unit='s'))\r\n\r\n # Test initializing TR with a TimeArray:\r\n t6= ts_ff(fmri_file1, TR=ts.TimeArray(1350, time_unit='ms'))\r\n npt.assert_equal(t4.sampling_interval, t6.sampling_interval)\r\n\r\n # Check the concatenation dimensions:\r\n t7 = ts_ff([fmri_file1, fmri_file2])\r\n npt.assert_equal([t7.shape[:3], t7.shape[-1]], [data.shape[:3], data.shape[-1]*2])\r\n\r\n t8 = ts_ff([fmri_file1, fmri_file2], average=True)\r\n npt.assert_equal(t8.shape[0], data.shape[-1]*2)\r\n\r\n t9 = ts_ff([fmri_file1, fmri_file2], average=True, normalize='zscore')\r\n npt.assert_almost_equal(t9.data.mean(), 0)", "def run(self, input_time_series=None, num_iter=None, record=False,\n output=False):\n pass", "def __init__(self):\n super().__init__()\n self.dynamic = True # from base class, indicates time-dependence is handled internally\n self.numBins = None # integer number of bins to use in creating the duration curve. TODO default?\n self.targets = None # list of strings, variables to apply postprocessor to", "def __ror__(self, other):\n return self._dunder_concat(\n other=other,\n base_class=BaseForecaster,\n composite_class=MultiplexForecaster,\n attr_name=\"forecasters\",\n concat_order=\"right\",\n )", "def __iter__(self):\n return self._timeseriesData.__iter__()", "def __init__(self, pandas_dataframe, dates_column, target_column, regressors=None, train_test_split=0.66, seed=7,\n look_back=1, look_forward=1, interval=0):\n data = pd.DataFrame(index=pandas_dataframe[dates_column].values, data=pandas_dataframe[target_column].values)\n # Calculate the training set size\n train_size = int(len(data)*train_test_split)\n # Scale the data pre-train/test split\n scaler = MinMaxScaler(feature_range=(0, 1))\n self.scaler = scaler\n data = scaler.fit_transform(data)\n # Get the time series as stationary (for the given interval, if 0 don't make it a series of 0)\n if interval > 0:\n data = difference(data, interval)\n # Map the series to a supervised problem (values for days 1-n with regressors for these days to predict days\n # n + 1 ... n + k\n x, y = timeseries_to_supervised(data, look_back=look_back, look_forward=look_forward)\n # Split train and test\n self.x_train, self.y_train = x[:train_size], y[:train_size]\n self.x_test, self.y_test = x[train_size:], y[train_size:]\n # Use regressors if required\n if regressors is not None:\n self.x_train, self.x_test = add_regressors(self.x_train, self.x_test, regressors, pandas_dataframe,\n dates_column, look_forward, look_back)\n # Set last attributes\n self.seed = seed\n self.look_back = look_back\n self.look_forward = look_forward\n self.regressors = regressors", "def forward(self, x, dt, do_normalization=True):\n raise NotImplementedError", "def create_forecast_dataset(self):\n pass", "def _get_raw_data(self, idx=0):\n # Get the time step then make a data frame\n raise NotImplementedError('Code me up!')\n #data = self._data[???]\n return data", "def simple_time_series(full_df, test_period, display_graphs=True):\n df = full_df.copy()\n df = df.filter([\"Canteen\"])\n\n train = df.iloc[:-test_period]\n test = df.iloc[-test_period:]\n\n resulting_prediction, predictions = prediction(train, test)\n\n if display_graphs is True:\n plt.figure(figsize=(14, 7))\n plt.plot(train)\n plt.plot(resulting_prediction)\n plt.legend([\"Real values\", \"Prediction\"], loc=\"best\")\n plt.xlabel(\"Date\")\n plt.ylabel(\"Number of people\")\n\n print(\n \"The mean absolute error (MAE) for the Simple Time Series model is {0:.0f} people\".format(\n find_MAE(test, predictions)\n )\n )", "def time_series(self, length):\n maker = r.Recomposer(self._components, self.bias)\n return maker.time_series(length)", "def __init__(self, dataarray, only_tri=False, silence_level=0):\n\n # only_tri will calculate the upper triangle excluding the diagonal\n # only. This assumes stationarity on the time series\n self.only_tri = only_tri\n\n # Set silence level\n self.silence_level = silence_level\n\n # Flatten observable anomaly array along lon/lat dimension to allow\n # for more convinient indexing and transpose the whole array as this\n # is faster in loops\n if numpy.ndim(dataarray) == 4:\n (self.total_time, n_lev, n_lat, n_lon) = dataarray.shape\n self.N = n_lev * n_lat * n_lon\n self.dataarray = dataarray.reshape(-1, self.N).T.copy()\n if numpy.ndim(dataarray) == 3:\n (self.total_time, n_lat, n_lon) = dataarray.shape\n self.N = n_lat * n_lon\n self.dataarray = dataarray.reshape(-1, self.N).T.copy()\n\n elif numpy.ndim(dataarray) == 2:\n (self.total_time, self.N) = dataarray.shape\n self.dataarray = dataarray.T.copy()\n\n else:\n print(\"irregular array shape...\")\n self.dataarray = dataarray.T.copy()\n\n # factorials below 10 in a list for permutation patterns\n self.factorial = \\\n numpy.array([1, 1, 2, 6, 24, 120, 720, 5040, 40320, 362880])\n self.patternized = False\n self.has_fft = False\n self.originalFFT = None\n\n # lag_mode dict\n self.lag_modi = {\"all\": 0, \"sum\": 1, \"max\": 2}", "def _assess(self):\n # get eruptions\n with open(os.sep.join(getfile(currentframe()).split(os.sep)[:-2]+['data','eruptive_periods.txt']),'r') as fp:\n self.tes = [datetimeify(ln.rstrip()) for ln in fp.readlines()]\n # check if data file exists\n self.exists = os.path.isfile(self.file)\n if not self.exists:\n t0 = datetime(2011,1,1)\n t1 = datetime(2011,1,2)\n self.update(t0,t1)\n # check date of latest data in file\n self.df = pd.read_csv(self.file, index_col=0, parse_dates=[0,], infer_datetime_format=True)\n self.ti = self.df.index[0]\n self.tf = self.df.index[-1]", "def _get_timeseries_class():\n global _timeseries_class\n if not _timeseries_class:\n from energyquantified.data import Timeseries\n _timeseries_class = Timeseries\n return _timeseries_class", "def forward(self, x, dt, do_normalization=True):\n return x", "def calculate_timebase_features(self, X: pd.DataFrame) -> pd.DataFrame:\n X = self._add_lagged_features(X, [1, 3, 7, 14, 21, 365])\n\n X = self._add_rolling(X, 'mean', [5, 50])\n X = self._add_rolling(X, 'min', [5, 50])\n X = self._add_rolling(X, 'max', [5, 50])\n\n return X", "def analytic(self):\r\n data = self.input.data\r\n sampling_rate = self.input.sampling_rate\r\n #If you have scipy with the fixed scipy.signal.hilbert (r6205 and\r\n #later)\r\n if scipy.__version__ >= '0.9':\r\n hilbert = signal.hilbert\r\n else:\r\n hilbert = tsu.hilbert_from_new_scipy\r\n\r\n return ts.TimeSeries(data=hilbert(data),\r\n sampling_rate=sampling_rate)", "def feature_engineer_ts(self, month=12):\n st_data_dt = self.get_st_data_dt()\n end_data_dt = self.get_end_data_dt()\n date_list = pd.date_range(*(pd.to_datetime([st_data_dt, end_data_dt]) + pd.offsets.MonthEnd()), freq='M').to_list()\n population = self.get_population()\n is_raw_partition = self.get_is_raw_partition()\n# Lag 2 months\n all_data = []\n# join past is_raw columns\n for d in date_list:\n \n population_partition = population[population['ft_data_dt'] == d] \n old_date = d - relativedelta(months=month)\n y = old_date.year\n m = old_date.month\n day = calendar.monthrange(y, m)[1]\n old_date = date(y, m, day)\n old_date = max(old_date, st_data_dt)\n date_list_join = pd.date_range(*(pd.to_datetime([old_date, d]) + pd.offsets.MonthEnd()), freq='M').to_list()\n date_list_join.reverse()\n for index, date_join in enumerate(date_list_join):\n if date_join.strftime(\"%Y-%m-%d\") not in is_raw_partition.keys():\n continue\n \n tmp_is_raw_partition = is_raw_partition[date_join.strftime(\"%Y-%m-%d\")]\n \n rename_col = [c for c in list(tmp_is_raw_partition.columns) if c not in ['idd', 'ft_data_dt']]\n new_col = [c+'_'+str(index+1) for c in rename_col]\n name_dict = dict(list(zip(rename_col, new_col)))\n tmp_is_raw_partition = tmp_is_raw_partition.rename(columns = name_dict)\n population_partition = population_partition.merge(tmp_is_raw_partition.drop(columns=['ft_data_dt']), on=['idd'], how='left')\n all_data.append(population_partition)\n ts_df = pd.concat(all_data)\n threshold_null = len(ts_df.columns) - 4\n ts_df = ts_df[ts_df.isnull().sum(axis=1) < threshold_null]\n \n def sum_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_sum_'+str(duration)+'mth'\n tmp_df = df[col_list].sum(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def mean_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_avg_'+str(duration)+'mth'\n tmp_df = df[col_list].mean(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def std_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_std_'+str(duration)+'mth'\n tmp_df = df[col_list].std(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def med_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_med_'+str(duration)+'mth'\n tmp_df = df[col_list].std(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def min_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_min_'+str(duration)+'mth'\n tmp_df = df[col_list].min(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def max_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_max_'+str(duration)+'mth'\n tmp_df = df[col_list].max(axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def q1_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_q1_'+str(duration)+'mth'\n tmp_df = df[col_list].quantile(q=0.25, axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def q3_ts(self, df, col_list, feature, duration):\n ft_name = feature+ '_q3_'+str(duration)+'mth'\n tmp_df = df[col_list].quantile(q=0.75, axis = 1).to_frame(name=ft_name)\n return tmp_df\n \n def last_ts(self, df, feature):\n ft_name = feature+ '_last'\n tmp_df = df[feature+'_'+str(1)].to_frame(name=ft_name)\n return tmp_df\n \n ts_duration = [1, 3, 6, 9, 12]\n feature_list = self.get_is_raw_col()\n df = ts_df[['idd', 'ft_data_dt']]\n# Time Series Features\n for duration in ts_duration:\n for col in feature_list:\n col_list = [col+'_'+str(i) for i in range(1, duration+1)]\n df = pd.concat([df\\\n , sum_ts(self, ts_df, col_list, col, duration)\\\n , mean_ts(self, ts_df, col_list, col, duration)\\\n , med_ts(self, ts_df, col_list, col, duration)\\\n , q1_ts(self, ts_df, col_list, col, duration)\\\n , q3_ts(self, ts_df, col_list, col, duration)\\\n , min_ts(self, ts_df, col_list, col, duration)\\\n , max_ts(self, ts_df, col_list, col, duration)]\n , axis=1)\n self.set_all_data(df)", "def during(self, e):\r\n\r\n if not isinstance(e, Epochs):\r\n raise ValueError('e has to be of Epochs type')\r\n\r\n if e.data.ndim == 0:\r\n return TimeSeries(data=self.data[..., self.time.slice_during(e)],\r\n time_unit=self.time_unit, t0=e.offset,\r\n sampling_rate=self.sampling_rate)\r\n else:\r\n # TODO: make this a more efficient implementation, naive first pass\r\n if (e.duration != e.duration[0]).any():\r\n raise ValueError(\"All epochs must have the same duration\")\r\n\r\n data = np.array([self.data[..., self.time.slice_during(ep)]\r\n for ep in e])\r\n\r\n return TimeSeries(data=data,\r\n time_unit=self.time_unit, t0=e.offset,\r\n sampling_rate=self.sampling_rate)", "def dataTimeSeries(timesteps,df,predictors,target,dropnan,out=2,dropVars=True): \r\n \r\n series = series_to_supervised(df[predictors+[target]].copy(),timesteps,out,dropnan=dropnan)\r\n \r\n if dropnan==False:\r\n series.replace(pd.np.nan,0,inplace=True)\r\n \r\n # Dropping other variables:\r\n if dropVars:\r\n index = list(np.arange(series.shape[1]-2,\r\n series.shape[1]-len(predictors)-2,\r\n -1))\r\n \r\n labels = [item for idx,item in enumerate(series.columns) \r\n if idx in index]\r\n \r\n #print(\"Eliminando variáveis: {}\".format(labels))\r\n series.drop(labels,axis=1,inplace=True) \r\n \r\n return series", "def to_timeseries(self, dataset_name, light=False):\n timeseries = tokio.timeseries.TimeSeries()\n timeseries.dataset_name = dataset_name\n\n try:\n dataset = self[dataset_name]\n except KeyError:\n # can't attach because dataset doesn't exist; pass this back to caller so it can init\n return None\n\n timeseries.dataset = dataset if light else dataset[:, :]\n\n # load and decode version of dataset and file schema\n timeseries.global_version = self['/'].attrs.get('version')\n timeseries.version = self.get_version(dataset_name)\n if isinstance(timeseries.version, bytes):\n timeseries.version = timeseries.version.decode()\n\n # copy columns into memory\n columns = self.get_columns(dataset_name)\n timeseries.set_columns(columns)\n\n # copy metadata into memory\n for key, value in dataset.attrs.items():\n if isinstance(value, bytes):\n timeseries.dataset_metadata[key] = value.decode()\n else:\n timeseries.dataset_metadata[key] = value\n for key, value in dataset.parent.attrs.items():\n if isinstance(value, bytes):\n timeseries.group_metadata[key] = value.decode()\n else:\n timeseries.group_metadata[key] = value\n\n timeseries.timestamp_key = get_timestamps_key(self, dataset_name)\n timeseries.timestamps = self[timeseries.timestamp_key]\n timeseries.timestamps = timeseries.timestamps if light else timeseries.timestamps[:]\n\n timeseries.timestep = timeseries.timestamps[1] - timeseries.timestamps[0]\n return timeseries", "def get_perf(self) :\n self.train()\n self.df_true = self.df_true[self.length:]\n self.accuracy , self.recall, self.specificity, self.profit, self.min , self.max = get_accuracy_LSTM(self.df_test, self.df_true,self.model, self.length)", "def __init__(self):\n self.last_obs = -1.0\n self.last_timestamp = -1.0\n self._fitted = False", "def transform(self, y=None):\n forecast_dates = self.X[[\"dt_time\", \"month_day\"]]\n weather_avg = pd.read_csv(\"../data/weather_averages.csv\")\n weather_fcst = weather_avg[\n [\"DATE\", \"DLY-TMAX-NORMAL\", \"DLY-PRCP-50PCTL\", \"DLY-SNOW-50PCTL\"]\n ]\n weather_fcst[\"DATE\"] = pd.to_datetime(\n weather_fcst[\"DATE\"].astype(\"str\"), format=\"%Y%m%d\", errors=\"ignore\"\n )\n weather_fcst[\"month_day\"] = weather_fcst[\"DATE\"].dt.strftime(\"%m/%d\")\n weather_fcst = weather_fcst[\n [\"month_day\", \"DLY-PRCP-50PCTL\", \"DLY-TMAX-NORMAL\", \"DLY-SNOW-50PCTL\"]\n ].rename(\n columns={\n \"DLY-PRCP-50PCTL\": \"precip\",\n \"DLY-TMAX-NORMAL\": \"temp_max\",\n \"DLY-SNOW-50PCTL\": \"snow\",\n }\n )\n weather_fcst[\"snow\"] = 0.0\n weather_fcst = forecast_dates.join(\n weather_fcst.set_index(\"month_day\"), on=\"month_day\"\n )\n near_term_weather = seattle_weather_fcst()\n\n for i in range(len(near_term_weather)):\n weather_fcst[\"temp_max\"][\n weather_fcst[\"dt_time\"] == near_term_weather[\"date\"][i]\n ] = near_term_weather[\"temp_max\"][i]\n if near_term_weather[\"precip_bool\"][i] == 0:\n weather_fcst[\"precip\"][\n weather_fcst[\"dt_time\"] == near_term_weather[\"date\"][0]\n ] = 0\n\n self.X[[\"precip\", \"temp_max\", \"snow\"]] = weather_fcst[\n [\"precip\", \"temp_max\", \"snow\"]\n ]\n return self.X.drop(\n columns=[\n \"dt_time\",\n \"year\",\n \"month\",\n \"day\",\n \"day_of_week\",\n \"month_day\",\n \"month_weekday\",\n \"spec_day\",\n ]\n )", "def __init__(self, data, ts):\n self.data = data\n self.ts = ts", "def to_timeseries(self) -> TimeseriesDataset:\n return TimeseriesDataset(self.data, provenance=self.provenance)", "def datetime_features(\n s: pd.Series, result: Optional[pd.DataFrame] = None\n) -> pd.DataFrame:\n result = date_features(s, result)\n return time_features(s, result)", "def update(self, y_true: list[Number], y_pred: list[Number]) -> ForecastingMetric:", "def __init__(self, t, data):\n self.t = t\n self.data = data\n self.find_rising_edges()\n self.find_falling_edges()\n self.find_high_periods()\n self.find_rising_edge_periods()\n self.find_falling_edge_periods()", "def DataTrimmer(datatuple):\r\n TickerName = datatuple[1]\r\n\r\n df = pd.DataFrame(datatuple[0]).transpose()\r\n df.index = pd.DatetimeIndex(df.index)\r\n df = df[df.index >= datetime(2020,1,1)] # used 1/1/2020 because first Covid19 case in the US happened on 1/20/2020\r\n df = df.sort_index()\r\n df = df.rename(columns={\"4. close\":TickerName})\r\n\r\n outputseries = df[TickerName].astype(float)\r\n\r\n return outputseries", "def _load_data(self, ti, tf):\n # return pre loaded\n try:\n if ti == self.ti_prev and tf == self.tf_prev:\n return self.fM, self.ys\n except AttributeError:\n pass\n\n # read from CSV file\n try:\n t = pd.to_datetime(pd.read_csv(self.featfile, index_col=0, parse_dates=['time'], usecols=['time'], infer_datetime_format=True).index.values)\n if (t[0] <= ti) and (t[-1] >= tf):\n self.ti_prev = ti\n self.tf_prev = tf\n fM,ys = self._extract_features(ti,tf)\n self.fM = fM\n self.ys = ys\n return fM,ys\n except FileNotFoundError:\n pass\n\n # range checking\n if tf > self.data.tf:\n raise ValueError(\"Model end date '{:s}' beyond data range '{:s}'\".format(tf, self.data.tf))\n if ti < self.data.ti:\n raise ValueError(\"Model start date '{:s}' predates data range '{:s}'\".format(ti, self.data.ti))\n \n # divide training period into years\n ts = [datetime(*[yr, 1, 1, 0, 0, 0]) for yr in list(range(ti.year+1, tf.year+1))]\n if ti - self.dtw < self.data.ti:\n ti = self.data.ti + self.dtw\n ts.insert(0,ti)\n ts.append(tf)\n\n for t0,t1 in zip(ts[:-1], ts[1:]):\n print('feature extraction {:s} to {:s}'.format(t0.strftime('%Y-%m-%d'), t1.strftime('%Y-%m-%d')))\n fM,ys = self._extract_features(ti,t1)\n\n self.ti_prev = ti\n self.tf_prev = tf\n self.fM = fM\n self.ys = ys\n return fM, ys", "def data_from_dataframe(self, dataframe):\n self.dataframe = dataframe.drop_duplicates()\n #Convert numerical values into float type\n self.dataframe.apply(pandas.to_numeric, errors='ignore')\n #Convert timestamps into regular dates\n time_range = [datetime.datetime.fromtimestamp(time) for time in list(self.dataframe['time'])]\n beg = time_range[0]\n end = time_range[len(time_range)-1]\n #Attribute begining and ending dates\n self.beg = beg\n self.end = end", "def _tseries_from_nifti_helper(coords, data, TR, filter, normalize, average):\r\n if coords is not None:\r\n out_data = np.asarray(data[coords[0], coords[1], coords[2]])\r\n else:\r\n out_data = data\r\n\r\n tseries = ts.TimeSeries(out_data, sampling_interval=TR)\r\n\r\n if filter is not None:\r\n if filter['method'] not in ('boxcar', 'fourier', 'fir', 'iir'):\r\n e_s = \"Filter method %s is not recognized\" % filter['method']\r\n raise ValueError(e_s)\r\n else:\r\n #Construct the key-word arguments to FilterAnalyzer:\r\n kwargs = dict(lb=filter.get('lb', 0),\r\n ub=filter.get('ub', None),\r\n boxcar_iterations=filter.get('boxcar_iterations', 2),\r\n filt_order=filter.get('filt_order', 64),\r\n gpass=filter.get('gpass', 1),\r\n gstop=filter.get('gstop', 60),\r\n iir_ftype=filter.get('iir_ftype', 'ellip'),\r\n fir_win=filter.get('fir_win', 'hamming'))\r\n\r\n F = tsa.FilterAnalyzer(tseries, **kwargs)\r\n\r\n if filter['method'] == 'boxcar':\r\n tseries = F.filtered_boxcar\r\n elif filter['method'] == 'fourier':\r\n tseries = F.filtered_fourier\r\n elif filter['method'] == 'fir':\r\n tseries = F.fir\r\n elif filter['method'] == 'iir':\r\n tseries = F.iir\r\n\r\n if normalize == 'percent':\r\n tseries = tsa.NormalizationAnalyzer(tseries).percent_change\r\n elif normalize == 'zscore':\r\n tseries = tsa.NormalizationAnalyzer(tseries).z_score\r\n\r\n if average:\r\n if coords is None:\r\n tseries.data = np.mean(np.reshape(tseries.data,\r\n (np.array(tseries.shape[:-1]).prod(),\r\n tseries.shape[-1])),0)\r\n else:\r\n tseries.data = np.mean(tseries.data, 0)\r\n\r\n return tseries", "def test_TimeSeries_repr():", "def during(self, e):\r\n\r\n if not isinstance(e, Epochs):\r\n raise ValueError('e has to be of Epochs type')\r\n\r\n if e.data.ndim > 0:\r\n ## TODO: Implement slicing with 1-d Epochs array,\r\n ## resulting in (ragged/jagged) 2-d TimeArray\r\n raise NotImplementedError('e has to be a scalar Epoch')\r\n\r\n return self[self.slice_during(e)]", "def to_work_series(self, data: pd.Series) -> pd.Series:\n ...", "def time_series(t, f=0.02):\n T = t.size\n # Seasonal component and time-varying trend component\n ys = np.sin(2 * np.pi * f * t) * 0.6 + np.sin(1 / 5 * 2 * np.pi * f * t) * 0.2\n # Amplitude modulation component\n amp_mod = 0.5 * np.sin(1 / 6 * 2 * np.pi * f * t) + 0.8\n ys *= amp_mod\n ys = np.reshape(ys, (T,1))\n return ys", "def TimeFrame(*args, **kwargs):\n underride(kwargs, dtype=float)\n return pd.DataFrame(*args, **kwargs)", "def transform(self, y=None):\n\n df = self.X.copy()\n num_days = (\n int(\n np.timedelta64((max(df[\"date\"]) - min(df[\"date\"])), \"D\")\n / np.timedelta64(1, \"D\")\n )\n + 1\n )\n start = pd.to_datetime(min(df[\"date\"]))\n dates = [(start + np.timedelta64(i, \"D\")) for i in range(num_days)]\n\n seq = pd.DataFrame({\"dt_time\": dates, \"day_seq\": np.arange(num_days)})\n seq[\"date\"] = seq[\"dt_time\"].dt.date\n\n df1 = df.join(seq.set_index(\"date\"), on=\"date\")\n\n df1[\"year\"] = df1[\"dt_time\"].dt.year\n df1[\"month\"] = df1[\"dt_time\"].dt.month\n df1[\"day\"] = df1[\"dt_time\"].dt.day\n df1[\"day_of_week\"] = df1[\"dt_time\"].dt.weekday\n df1[\"month_day\"] = df1[\"dt_time\"].dt.strftime(\"%m/%d\")\n df1[\"month_weekday\"] = df1[\"dt_time\"].dt.strftime(\"%b_%a\")\n df1[\"month\"] = df1[\"dt_time\"].dt.strftime(\"%m/%d\")\n return df1", "def time_stats(df):", "def test_model_time_series(self):\n job = DarshanIngestedJob(label=\"job\", file_details={})\n job.time_start = 0\n\n # With no file data, we should end up with empty time series\n series = job.model_time_series()\n self.assertEqual(len(series), 0)\n\n # Otherwise, there should be time-series created with the correct totals\n\n file1 = DarshanIngestedJobFile(name=\"file1\")\n file2 = DarshanIngestedJobFile(name=\"file2\")\n\n file1.bytes_read = 1024 * 99\n file2.bytes_read = 1024 * 100\n file1.read_time_start = 11.2\n file1.read_time_end = 13.2\n file2.read_time_start = 13.5\n file2.read_time_end = 15.0\n\n file1.bytes_written = 1024 * 101\n file2.bytes_written = 1024 * 102\n file1.write_time_start = 19.2\n file1.write_time_end = 21.3\n file2.write_time_start = 32\n file2.write_time_end = 35\n\n job.file_details = {\"file1\": file1, \"file2\": file2}\n\n series = job.model_time_series()\n self.assertIn('kb_read', series)\n self.assertIn('kb_write', series)\n reads = series['kb_read']\n writes = series['kb_write']\n\n self.assertEqual(reads.sum, 199)\n self.assertEqual(writes.sum, 203)\n self.assertEqual(len(reads.xvalues), 2)\n self.assertEqual(len(writes.xvalues), 2)\n self.assertEqual(len(reads.yvalues), 2)\n self.assertEqual(len(writes.yvalues), 2)\n self.assertEqual(set(reads.xvalues), {11.2, 13.5})\n self.assertEqual(set(writes.xvalues), {19.2, 32})\n self.assertEqual(set(reads.yvalues), {99.0, 100.0})\n self.assertEqual(set(writes.yvalues), {101, 102})", "def series_to_supervised(data, n_in=1, n_out=1, dropnan=True, stride=None, dates=False, leaks=True):\n df = pd.DataFrame(data)\n \n time = None\n if 'date' in df.columns:\n time = 'date'\n elif 'time' in df.columns:\n time = 'time'\n if time != None:\n df = df.drop([time], axis=1)\n \n if 'leak' in df.columns:\n df = df.drop(['leak'], axis=1) \n n_vars = df.shape[1]\n times_column = list()\n if dates and time != None:\n times_column = data[time]\n del data\n \n cols, names, pivots = list(), list(), list()\n \n # input sequence (t-n, ... t-1)\n for i in range(n_in, 0, -1):\n cols.append(df.shift(i))\n names += [('var%d(t-%d)' % (j+1, i)) for j in range(n_vars)]\n\t# forecast sequence (t, t+1, ... t+n)\n for i in range(0, n_out):\n cols.append(df.shift(-i))\n if i == 0:\n names += [('var%d(t)' % (j+1)) for j in range(n_vars)]\n else:\n names += [('var%d(t+%d)' % (j+1, i)) for j in range(n_vars)]\n\t# put it all together\n agg = pd.concat(cols, axis=1)\n \n agg.columns = names\n\n #stride - delete windows\n if stride != None:\n indexes_to_drop = list()\n for i in range(stride, agg.shape[0], stride):\n print(\"index\", i)\n pivots += [i]\n \n onset = 0\n offset = pivots[0]\n for i in range(0, len(pivots)):\n print(\"onset\", onset)\n print(\"offset\", offset)\n to_drop = [ x for x in range(onset,offset)]\n indexes_to_drop += to_drop\n try:\n onset = pivots[i] + 1\n offset = pivots[i+1]\n \n except IndexError:\n onset = pivots[i] + 1\n offset = agg.shape[0]\n to_drop = [ x for x in range(onset,offset)]\n indexes_to_drop += to_drop\n \n \n \n print(\"indexes_to_drop\", indexes_to_drop)\n \n agg.drop(df.index[indexes_to_drop], inplace=True)\n \"\"\"\n if dates and time!=None:\n agg[time] = times_column\n \"\"\" \n # drop rows with NaN values \n if dropnan:\n agg.dropna(inplace=True)\n \n\n return agg", "def transform(self, y=None):\n num_days = (\n int(\n np.timedelta64(\n pd.to_datetime(self.end_date) - pd.to_datetime(self.start_date), \"D\"\n )\n / np.timedelta64(1, \"D\")\n )\n + 1\n )\n dates = [\n (pd.to_datetime(self.start_date) + np.timedelta64(i, \"D\"))\n for i in range(num_days)\n ]\n start_seq = int(\n (\n np.timedelta64(\n pd.to_datetime(self.start_date) - pd.to_datetime(self.model_end[0]),\n \"D\",\n )\n + self.model_end[1]\n )\n / np.timedelta64(1, \"D\")\n )\n df = pd.DataFrame(\n {\"dt_time\": dates, \"day_seq\": np.arange(start_seq, start_seq + num_days)}\n )\n df[\"date\"] = df[\"dt_time\"].dt.date\n df[\"year\"] = df[\"dt_time\"].dt.year\n df[\"month\"] = df[\"dt_time\"].dt.month\n df[\"day\"] = df[\"dt_time\"].dt.day\n df[\"day_of_week\"] = df[\"dt_time\"].dt.weekday\n df[\"month_day\"] = df[\"dt_time\"].dt.strftime(\"%m/%d\")\n df[\"month_weekday\"] = df[\"dt_time\"].dt.strftime(\"%b_%a\")\n return df", "def TimeSeries(self, header):\n data = self.DictData()\n time_series = [ (row[ \"Date\" ], float(row[ header ]) )for row in data ]\n return time_series", "def __init__(self, dataset_name, device):\r\n\r\n dataFrame = pandas.read_csv(dataset_name)\r\n\r\n Y = dataFrame.values[:,1:]\r\n Yt = Y.transpose()\r\n\r\n #create the input time series for the model, with one unit of delay, is no model parameter, no grad needed\r\n #yInput = Variable(torch.Tensor(Yt[:, :-1]).type(dtype), requires_grad = False).to(device)\r\n self.yInput = torch.tensor(Yt[:, :-1], dtype=torch.float, device=device, requires_grad=False)\r\n \r\n # create the target or ground truth data\r\n #yTarget = Variable(torch.Tensor(Yt[:, 1:]).type(dtype), requires_grad = False).to(device)\r\n self.yTarget = torch.tensor(Yt[:, 1:], dtype=torch.float, device=device, requires_grad=False)\r\n \r\n # Normalizes values\r\n self.yInput = self.yInput / torch.max(self.yInput)\r\n self.yTarget = self.yTarget / torch.max(self.yTarget)", "async def _forecast_single(\n self,\n model: Prophet\n ) -> pd.DataFrame:\n future = model.make_future_dataframe(self._periods, 'H', False)\n return model.predict(future)", "def data_preprocessing(dataset):\r\n df = pd.read_csv(dataset)\r\n df.head()\r\n df.describe()\r\n df.isnull().sum()\r\n df= df.drop(['instant'], axis=1)\r\n df['dteday'] = pd.to_datetime(df['dteday'].apply(str) + ' ' + df['hr'].apply(str) + ':00:00')\r\n return df", "def evaluate_timeseries(timeseries, window_size):\n filter_length = 5\n nb_filter = 4\n timeseries = np.atleast_2d(timeseries)\n if timeseries.shape[0] == 1:\n timeseries = timeseries.T # Convert 1D vectors to 2D column vectors\n\n nb_samples, nb_series = timeseries.shape\n print('\\n\\nTimeseries ({} samples by {} series):\\n'.format(nb_samples, nb_series), timeseries)\n model = make_timeseries_regressor(window_size=window_size, filter_length=filter_length, nb_input_series=nb_series, nb_outputs=nb_series, nb_filter=nb_filter)\n print('\\n\\nModel with input size {}, output size {}, {} conv filters of length {}'.format(model.input_shape, model.output_shape, nb_filter, filter_length))\n model.summary()\n\n X, y, q = make_timeseries_instances(timeseries, window_size)\n print('\\n\\nInput features:', X, '\\n\\nOutput labels:', y, '\\n\\nQuery vector:', q, sep='\\n')\n test_size = int(0.01 * nb_samples) # In real life you'd want to use 0.2 - 0.5\n X_train, X_test, y_train, y_test = X[:-test_size], X[-test_size:], y[:-test_size], y[-test_size:]\n model.fit(X_train, y_train, nb_epoch=25, batch_size=2, validation_data=(X_test, y_test))\n\n pred = model.predict(X_test)\n print('\\n\\nactual', 'predicted', sep='\\t')\n for actual, predicted in zip(y_test, pred.squeeze()):\n print(actual.squeeze(), predicted, sep='\\t')\n print('next', model.predict(q).squeeze(), sep='\\t')", "def convert_learning_data(timesteps, xaxis='timesteps'):\n if xaxis == 'timesteps':\n x_values = np.cumsum(timesteps.l.values)\n y_values = timesteps.r.values\n elif xaxis == 'episodes':\n x_values = np.arange(len(timesteps))\n y_values = timesteps.r.values\n elif xaxis == 'walltime_hrs':\n x_values = timesteps.t.values / 3600.\n y_values = timesteps.r.values\n else:\n raise NotImplementedError\n return pd.Series(y_values, index=x_values)", "def __pos__(self):\n ts = self._fsm.get(self._id)\n return SMTimeSeries(ts._time, ts._value, self._fsm)", "def detrend(x):\n\n t = x['t']\n f = x['f']\n t0 = np.mean(x['t'])\n time_since_transit = t - t0\n\n # select out just the continuum points\n continuum = x['continuum']==1\n\n pfit = np.polyfit(\n time_since_transit[continuum], f[continuum], poly_degree\n )\n\n fldt = f.copy()\n fldt -= np.polyval(pfit,time_since_transit)\n return fldt", "def _create_historic_forecasts(\n data, time_dt, frt_dt, standard_grid_metadata=\"uk_ens\", number_of_days=5, **kwargs\n):\n historic_forecasts = iris.cube.CubeList([])\n for day in range(number_of_days):\n new_frt_dt = frt_dt + datetime.timedelta(days=day)\n new_time_dt = time_dt + datetime.timedelta(days=day)\n historic_forecasts.append(\n set_up_variable_cube(\n data - 2 + 0.2 * day,\n time=new_time_dt,\n frt=new_frt_dt,\n standard_grid_metadata=standard_grid_metadata,\n **kwargs,\n )\n )\n return historic_forecasts", "def reprocessSeries(self, tiltseriesdata):\n\t\treturn None", "def FE_create_time_series_features(dft, ts_column, ts_adds_in=[]):\r\n dtf = copy.deepcopy(dft)\r\n reset_index = False\r\n try:\r\n # ts_column = None assumes that that index is the time series index\r\n reset_index = False\r\n if ts_column is None:\r\n reset_index = True\r\n ts_column = dtf.index.name\r\n dtf = dtf.reset_index()\r\n\r\n ### In some extreme cases, date time vars are not processed yet and hence we must fill missing values here!\r\n null_nums = dtf[ts_column].isnull().sum()\r\n if null_nums > 0:\r\n # missing_flag = True\r\n new_missing_col = ts_column + '_Missing_Flag'\r\n dtf[new_missing_col] = 0\r\n dtf.loc[dtf[ts_column].isnull(),new_missing_col]=1\r\n dtf[ts_column].fillna(method='ffill', inplace=True)\r\n print(' adding %s column due to missing values in data' %new_missing_col)\r\n if dtf[dtf[ts_column].isnull()].shape[0] > 0:\r\n dtf[ts_column].fillna(method='bfill', inplace=True)\r\n\r\n if dtf[ts_column].dtype == float:\r\n dtf[ts_column] = dtf[ts_column].astype(int)\r\n\r\n ### if we have already found that it was a date time var, then leave it as it is. Thats good enough!\r\n items = dtf[ts_column].apply(str).apply(len).values\r\n #### In some extreme cases,\r\n if all(items[0] == item for item in items):\r\n if items[0] == 4:\r\n ### If it is just a year variable alone, you should leave it as just a year!\r\n dtf[ts_column] = pd.to_datetime(dtf[ts_column],format='%Y')\r\n ts_adds = []\r\n else:\r\n ### if it is not a year alone, then convert it into a date time variable\r\n dtf[ts_column] = pd.to_datetime(dtf[ts_column], infer_datetime_format=True)\r\n ### this is where you create the time series features #####\r\n dtf, ts_adds = _create_ts_features(df=dtf, tscol=ts_column)\r\n else:\r\n dtf[ts_column] = pd.to_datetime(dtf[ts_column], infer_datetime_format=True)\r\n ### this is where you create the time series features #####\r\n dtf, ts_adds = _create_ts_features(df=dtf, tscol=ts_column)\r\n if not ts_adds_in:\r\n ts_adds_copy = dtf[ts_adds].select_dtypes(include='number').columns.tolist()\r\n ### drop those columns where all rows are same i.e. zero variance ####\r\n for col in ts_adds_copy:\r\n if dtf[col].std() == 0:\r\n dtf.drop(col, axis=1, inplace=True)\r\n print(' dropping column due to zero variance in %s column' %col)\r\n ts_adds.remove(col)\r\n else:\r\n rem_cols = left_subtract(dtf.columns.tolist(), ts_adds_in)\r\n dtf = dtf[rem_cols+ts_adds_in]\r\n\r\n # If you had reset the index earlier, set it back before returning\r\n # to make it consistent with the dataframe that was sent as input\r\n if reset_index:\r\n dtf = dtf.set_index(ts_column)\r\n elif ts_column in dtf.columns:\r\n dtf.drop(ts_column, axis=1, inplace=True)\r\n else:\r\n pass\r\n except Exception as e:\r\n print(e)\r\n print('Error in Processing %s column for date time features. Continuing...' %ts_column)\r\n return dtf, ts_adds", "def transform(self, resampled_xray, n_burn_in, n_lookahead, skf_is): \n # Set all temps on world map as features\n #valid_range = range(n_burn_in, temperatures_xray['time'].shape[0] - n_lookahead)\n #time_steps, lats, lons = temperatures_xray['tas'].values.shape\n #X = temperatures_xray['tas'].values.reshape((time_steps,lats*lons))\n #X = X[valid_range,:]\n\n tas = select_box(resampled_xray['tas']) \n\n valid_range = range(n_burn_in, resampled_xray['time'].shape[0] - n_lookahead)\n #enso = get_enso_mean(temperatures_xray['tas'])\n # reshape the vector into a table years as rows, months as columns\n #enso_matrix = enso.values.reshape((-1,12))\n\n theShape = tas.shape\n n_time,n_lat,n_long = theShape[0],theShape[1],theShape[2] \n #print n_time,n_lat,n_long \n enso_matrix = tas.values.reshape(-1,12,n_lat,n_long)\n\n count_matrix = np.ones(enso_matrix.shape)\n # compute cumulative means of columns (remember that you can only use\n # the past at each time point) and reshape it into a vector\n enso_monthly_mean = (enso_matrix.cumsum(axis=0) / count_matrix.cumsum(axis=0)).reshape(-1,n_lat,n_long)#.ravel()\n # roll it backwards (6 months) so it corresponds to the month of the target\n\n enso_anomaly = tas - enso_monthly_mean\n\n enso_anomaly_rolled = np.roll(enso_anomaly, n_lookahead - 12,axis = 0)\n # select valid range\n enso_anomaly_rolled_valid = enso_anomaly_rolled[valid_range,:,:]\n # reshape it into a matrix of a single column\n X = enso_anomaly_rolled_valid.reshape(-1,n_lat*n_long)\n\n return X", "def at(self, time_slices):\n\n if self.base is not None:\n return self.base.at(time_slices)\n\n if isinstance(time_slices, TimeSlice):\n time_slices = [time_slices]\n\n # join the time slice values\n timed_data = pd.DataFrame(columns=self.data.columns)\n\n # make the new data\n for slice_t in time_slices:\n slice_index = (slice_t.time <= self.data.index) & (\n self.data.index < slice_t.time + slice_t.duration\n )\n timed_data.loc[slice_t.time] = self.aggregate(\n self.data[slice_index], axis=0\n )\n\n # return the new feature object\n return Feature(\n data=timed_data,\n aggregate=self.aggregate,\n base=self,\n time_slices=time_slices,\n )", "def transform(self, y=None):\n _events = []\n for event in self.event_dict.keys():\n for day in self.event_dict[event]:\n _events.append([dt.datetime.strptime(day, \"%m/%d/%Y\"), event])\n\n _events = pd.DataFrame(_events, columns=[\"date\", \"local_event\"])\n _events[\"date\"] = _events[\"date\"].dt.date\n return pd.get_dummies(_events.set_index(\"date\")).reset_index()", "def realtime(self):", "def transform_series(obj):\n vals = obj.values\n return transform_array(vals)", "def fake_data(sample_rate=512,psd_segment_length=60,nsegs=16):\n epoch = 1153742417.0\n ts_data = numpy.random.normal(0,1,sample_rate*psd_segment_length*nsegs)\n ts_data = types.TimeSeries(ts_data,delta_t=1.0/sample_rate,epoch=epoch)\n return ts_data", "def to_timeseries(benchmark_data, x_label='Episode', y_label='Average Episode Reward',\n target=rewards_by_episode, cut_x=1e12, smooth=0):\n data_experiments, data_times, data_values = [], [], []\n\n for experiment_id, experiment_data in enumerate(benchmark_data):\n extended_results = experiment_data.extended_results()\n\n if smooth > 0:\n extended_results['rewards'] = np.array(pd.Series(extended_results['rewards']).ewm(span=smooth).mean())\n\n x, y = target(cut_x=cut_x, **extended_results)\n\n data_times.extend(x)\n data_values.extend(y)\n data_experiments.extend([experiment_id] * len(x))\n\n return pd.DataFrame({'experiment': data_experiments, x_label: data_times, y_label: data_values})", "def example3():\n arrive_time=example2() # Get packets arrive time using example1\n time_series.plot_time_series(arrive_time) # Plot time series using packets arrive time", "def __init__(self, dtdata, freq, nrows=1):\n \n assert isinstance(dtdata, (list, tuple))\n assert isinstance(dtdata[0], datetime.datetime)\n assert freq in ['YEARLY', 'MONTHLY', 'WEEKLY', \n 'DAILY', 'HOURLY', 'MINUTELY', \n\t\t 'SECONDLY']\n\n self.data_orig = dtdata\n self.data_dt = split_nrows(dtdata[:], nrows)\n self.data_numeric = split_nrows(dates.date2num(dtdata[:]), nrows)\n self.freq = freq\n self.nrows = nrows", "def __add__ ( self, other, resample_opts=None ):\n result = ObservationStorage (datadir=self.datadir, \\\n resample_opts=resample_opts )\n if self.date[0] > other.date[0]:\n start_date = other.date[0]\n else:\n start_date = self.date[0]\n if self.date[-1] > other.date[-1]:\n end_date = other.date[-1]\n else:\n end_date = self.date[-1]\n \n delta = datetime.timedelta ( days=1 )\n this_date = start_date.date()\n end_date = end_date.date() + delta\n \n this_obs_dates = [ x.date() for x in self.date ]\n other_obs_dates = [ x.date() for x in other.date ]\n \n date = [] ; vza = [] ; vaa = [] ; sza = [] ; saa = []\n emulator = [] ; mask = [] ; data_pntr = [] ; spectral = []\n sensor = []\n \n while this_date < end_date:\n if this_date in this_obs_dates:\n iloc = this_obs_dates.index ( this_date )\n date.append ( self.date[iloc] )\n emulator.append ( self.emulator[iloc] )\n vza.append ( self.vza[iloc] )\n sza.append ( self.sza[iloc] )\n vaa.append ( self.vaa[iloc] )\n saa.append ( self.saa[iloc] )\n spectral.append ( self.spectral )\n mask.append ( ( self.get_mask, [iloc] ) )\n sensor.append ( self.sensor )\n \n data_pntr.append ( self._data_pntr[iloc] )\n if this_date in other_obs_dates:\n iloc = other_obs_dates.index ( this_date )\n date.append ( other.date[iloc] )\n emulator.append ( other.emulator[iloc] )\n vza.append ( other.vza[iloc] )\n sza.append ( other.sza[iloc] )\n vaa.append ( other.vaa[iloc] )\n saa.append ( other.saa[iloc] )\n spectral.append ( other.spectral )\n mask.append ( ( other.get_mask, [iloc] ) )\n sensor.append ( other.sensor )\n data_pntr.append ( other._data_pntr[iloc] )\n this_date += delta\n result.vza = vza\n result.vaa = vaa\n result.sza = sza \n result.saa = saa \n result.date = date\n result.spectral = spectral\n result.masks = mask\n result.sensor = sensor\n result.emulator = emulator\n result._data_pntr = data_pntr\n return result", "def retrieve_data_timeseries(hfile, setname):\n dset = hfile[setname]\n sample_rate = dset.attrs[\"SamplingRate(Hz)\"]\n gps_epoch = construct_utc_from_metadata(dset.attrs[\"Date\"], dset.attrs[\"t0\"])\n data = retrieve_channel_data(hfile, setname)\n ts_data = TimeSeries(data, sample_rate=sample_rate, epoch=gps_epoch)\n return ts_data", "def makeTimeSeriesData(self,cluster,server,items):\n start = 0\n end = len(items)\n step = 1\n values = []\n for key,value in items.iteritems():\n values.append(value)\n \n name = cluster+\",\"+server+\",alert\"\n series = TimeSeries(name, start, end, step, values)\n #for key,value in items:\n return series", "def ar1_moving_average_time_series(series, length=1):\n\n # just in case the index isn't already datetime type\n series.index = pd.to_datetime(series.index)\n\n ar1 = []\n ar1_se = []\n index = []\n\n for i in range(len(series) - length ):\n #print(series[i:(length + i)])\n param, se = get_AR1_parameter_estimate(series[i:(length + i)])\n ar1.append(param)\n ar1_se.append(se)\n index.append(series.index[length + i])\n\n ar1_name = series.name+\"_ar1\"\n ar1_se_name = series.name+\"_ar1_se\"\n\n ar1_df = pd.DataFrame()\n ar1_df[ar1_name] = pd.Series(ar1)\n ar1_df[ar1_se_name] = pd.Series(ar1_se)\n ar1_df.index = index\n\n return ar1_df", "def ts_resample(self):\n try:\n ts_freq = pd.DataFrame(\n index=pd.date_range(self.ts_df.index[0], self.ts_df.index[len(self.ts_df) - 1], freq=self.freq),\n columns=['dummy'])\n except ValueError:\n self._uvts_cls_logger.exception(\"Exception occurred, possibly incompatible frequency!\")\n sys.exit(\"STOP\")\n\n if self.fill_method == 'ffill':\n self.ts_df = ts_freq.join(self.ts_df).drop(['dummy'], axis=1)\n self.ts_df.y = self.ts_df.y.fillna(method='ffill')\n # if np.isnan ( self.ts_df.y ).any ():\n # self.ts_df.y = self.ts_df.y.fillna ( method='bfill' )\n else: # interp\n xp = np.linspace(0, self.ts_df.size, self.ts_df.size, endpoint=False)\n fp = self.ts_df['y']\n # join\n self.ts_df = ts_freq.join(self.ts_df).drop(['dummy'], axis=1)\n # pick new points\n x = np.linspace(0, ts_freq.size, ts_freq.size, endpoint=False)\n x = x[self.ts_df['y'].isna()]\n print(x.size)\n print(x)\n\n # put the values\n self.ts_df.y[self.ts_df['y'].isna()] = np.interp(x, xp, fp)\n\n if np.isnan(self.ts_df.y).any():\n self._uvts_cls_logger.warning(\"Some NaN found, something went wrong, check the data!\")\n sys.exit(\"STOP\")\n\n self._uvts_cls_logger.info(\"Time series resampled at frequency: \" + str(self.ts_df.index.freq) +\n \". New shape of the data: \" + str(self.ts_df.shape))\n self._uvts_cls_logger.info(\"Using time series data of range: \" + str(min(self.ts_df.index)) + ' - ' + str(\n max(self.ts_df.index)) + \" and shape: \" + str(self.ts_df.shape))\n\n return self", "def _initial_conversion(X: Any) -> TimeSeriesInstances:\n if isinstance(X, np.ndarray) and X.ndim == 2:\n X = X.reshape(X.shape[0], 1, X.shape[1])\n return X", "def rainfall_series(self):\n\n # assign local temporal variables\n datatype = 'strds'\n increment = str(self.rain_interval)+\" minutes\"\n raster = 'raster'\n rain_excess = 'rain_excess'\n net_difference = 'net_difference'\n #iterations = sum(1 for row in precip)\n\n # create a raster space time dataset\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.elevation_timeseries,\n title=self.elevation_title,\n description=self.elevation_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.depth_timeseries,\n title=self.depth_title,\n description=self.depth_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.erdep_timeseries,\n title=self.erdep_title,\n description=self.erdep_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.flux_timeseries,\n title=self.flux_title,\n description=self.flux_description,\n overwrite=True)\n gscript.run_command(\n 't.create',\n type=datatype,\n temporaltype=self.temporaltype,\n output=self.difference_timeseries,\n title=self.difference_title,\n description=self.difference_description,\n overwrite=True)\n\n # register the initial digital elevation model\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=self.elevation,\n start=self.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # create evolution object\n evol = Evolution(\n elevation=self.elevation,\n precipitation=self.precipitation,\n start=self.start,\n rain_intensity=self.rain_intensity,\n rain_interval=self.rain_interval,\n walkers=self.walkers,\n runoff=self.runoff,\n mannings=self.mannings,\n detachment=self.detachment,\n transport=self.transport,\n shearstress=self.shearstress,\n density=self.density,\n mass=self.mass,\n grav_diffusion=self.grav_diffusion,\n erdepmin=self.erdepmin,\n erdepmax=self.erdepmax,\n k_factor=self.k_factor,\n c_factor=self.c_factor,\n m=self.m,\n n=self.n,\n threads=self.threads,\n fill_depressions=self.fill_depressions)\n\n # open txt file with precipitation data\n with open(evol.precipitation) as csvfile:\n\n # check for header\n has_header = csv.Sniffer().has_header(csvfile.read(1024))\n\n # rewind\n csvfile.seek(0)\n\n # skip header\n if has_header:\n next(csvfile)\n\n # parse time and precipitation\n precip = csv.reader(csvfile, delimiter=',', skipinitialspace=True)\n\n # initial run\n initial = next(precip)\n evol.start = initial[0]\n evol.rain_intensity = 'rain_intensity'\n # compute rainfall intensity (mm/hr)\n # from rainfall observation (mm)\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity}\"\n \"={rain_observation}\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_intensity=evol.rain_intensity,\n rain_observation=float(initial[1]),\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # determine mode and run model\n if self.mode == \"simwe_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.erosion_deposition()\n # remove relative timestamps\n # from r.sim.water and r.sim.sediment\n gscript.run_command(\n 'r.timestamp',\n map=depth,\n date='none')\n gscript.run_command(\n 'r.timestamp',\n map=erosion_deposition,\n date='none')\n\n elif self.mode == \"usped_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.usped()\n\n elif self.mode == \"rusle_mode\":\n (evolved_elevation, time, depth, sediment_flux,\n difference) = evol.rusle()\n\n else:\n raise RuntimeError(\n '{mode} mode does not exist').format(mode=self.mode)\n\n # register the evolved maps\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=evolved_elevation,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.depth_timeseries,\n maps=depth,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.erdep_timeseries,\n maps=erosion_deposition,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n except (NameError, CalledModuleError):\n pass\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.flux_timeseries,\n maps=sediment_flux,\n start=evol.start,\n increment=increment,\n flags='i', overwrite=True)\n except (NameError, CalledModuleError):\n pass\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.difference_timeseries,\n maps=difference,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # run the landscape evolution model for each rainfall record\n for row in precip:\n\n # update the elevation\n evol.elevation=evolved_elevation\n\n # update time\n evol.start=row[0]\n\n # compute rainfall intensity (mm/hr)\n # from rainfall observation (mm)\n rain_intensity = 'rain_intensity'\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity}\"\n \"={rain_observation}\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_intensity=rain_intensity,\n rain_observation=float(row[1]),\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # derive excess water (mm/hr) from rainfall rate (mm/hr)\n # plus the depth (m) per rainfall interval (min)\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_excess}\"\n \"={rain_intensity}\"\n \"+{depth}\"\n \"/1000.\"\n \"/{rain_interval}\"\n \"*60.\".format(\n rain_excess=rain_excess,\n rain_intensity=rain_intensity,\n depth=depth,\n rain_interval=self.rain_interval),\n overwrite=True)\n\n # update excess rainfall\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{rain_intensity} = {rain_excess}\".format(\n rain_intensity='rain_intensity',\n rain_excess=rain_excess),\n overwrite=True)\n evol.rain_intensity = rain_intensity\n\n # determine mode and run model\n if self.mode == \"simwe_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.erosion_deposition()\n # remove relative timestamps\n # from r.sim.water and r.sim.sediment\n gscript.run_command(\n 'r.timestamp',\n map=depth,\n date='none')\n gscript.run_command(\n 'r.timestamp',\n map=erosion_deposition,\n date='none')\n\n elif self.mode == \"usped_mode\":\n (evolved_elevation, time, depth, erosion_deposition,\n difference) = evol.usped()\n\n elif self.mode == \"rusle_mode\":\n (evolved_elevation, time, depth, sediment_flux,\n difference) = evol.rusle()\n\n else:\n raise RuntimeError(\n '{mode} mode does not exist').format(mode=self.mode)\n\n # register the evolved maps\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.elevation_timeseries,\n maps=evolved_elevation,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.depth_timeseries,\n maps=depth,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.erdep_timeseries,\n maps=erosion_deposition,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n except (NameError, CalledModuleError):\n pass\n try:\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.flux_timeseries,\n maps=sediment_flux,\n start=evol.start,\n increment=increment,\n flags='i', overwrite=True)\n except (NameError, CalledModuleError):\n pass\n gscript.run_command(\n 't.register',\n type=raster,\n input=self.difference_timeseries,\n maps=difference,\n start=evol.start,\n increment=increment,\n flags='i',\n overwrite=True)\n\n # remove temporary maps\n gscript.run_command(\n 'g.remove',\n type='raster',\n name=['rain_excess'],\n flags='f')\n\n # compute net elevation change\n gscript.run_command(\n 'r.mapcalc',\n expression=\"{net_difference}\"\n \"= {evolved_elevation}-{elevation}\".format(\n net_difference=net_difference,\n elevation=self.elevation,\n evolved_elevation=evol.elevation),\n overwrite=True)\n gscript.write_command(\n 'r.colors',\n map=net_difference,\n rules='-',\n stdin=difference_colors)", "def with_time(self):\n if self.time_slices is None:\n raise FeatureError(\"Feature has no time reference.\")\n\n for i, datum in enumerate(self.data[self.name]):\n yield (self.time_slices[i], datum)", "def history(self, t_minus=0):\n data = self.ohlcv_df[self.ohlcv_df.index <= utc_to_epoch(\n self.prior_time)]\n return OHLCVData(data[-t_minus:])", "def to_real_series(self, data: pd.Series) -> pd.Series:\n ...", "def __init__(self, miser, fromdt, todt):\r\n self.miser = miser\r\n print self.summary(fromdt, todt)", "def make_time_features(ts, index=None, epoch=None, epoch_span=None):\n # input validation\n try:\n if len(ts) == 1:\n _singleton = True\n elif len(ts) > 1:\n _singleton = False\n elif len(ts) < 1:\n raise ValueError(\"must pass non-empty iterable of timestamps\")\n except TypeError:\n return make_time_features([ts], index=index, epoch=epoch, epoch_span=epoch_span)\n\n if not isinstance(ts, pd.DatetimeIndex):\n ts = pd.Series(0, index=ts).index\n if not isinstance(ts, pd.DatetimeIndex):\n raise ValueError(\"must pass non-empty iterable of timestamps\")\n\n if index is None:\n index = pd.RangeIndex(len(ts))\n if epoch is None:\n epoch = min(ts)\n if epoch_span is None:\n epoch_span = float((end - epoch).total_seconds())\n\n time_features = {}\n start = min(ts)\n end = max(ts)\n\n # Major US holidays\n NewYearsDay = pd.tseries.holiday.Holiday('New Years Day', month=1, day=1)\n MemorialDay = pd.tseries.holiday.Holiday('Memorial Day', month=6, day=1, offset=pd.DateOffset(weekday=MO(-1)))\n IndependenceDay = pd.tseries.holiday.Holiday('Independence Day', month=7, day=4)\n LaborDay = pd.tseries.holiday.Holiday('Labor Day', month=9, day=1, offset=pd.DateOffset(weekday=MO(1)))\n ThanksgivingDay = pd.tseries.holiday.Holiday('Thanksgiving Day', month=11, day=1, offset=pd.DateOffset(weekday=TH(4)))\n ChristmasDay = pd.tseries.holiday.Holiday('Christmas Day', month=12, day=25)\n holidays = \\\n NewYearsDay.dates(start.date(), end.date()).tolist() +\\\n MemorialDay.dates(start.date(), end.date()).tolist() +\\\n IndependenceDay.dates(start.date(), end.date()).tolist() +\\\n LaborDay.dates(start.date(), end.date()).tolist() +\\\n ThanksgivingDay.dates(start.date(), end.date()).tolist() +\\\n ChristmasDay.dates(start.date(), end.date()).tolist()\n holidays = set([h.date() for h in holidays])\n\n # projections onto unit circle\n time_features['day_cos'] = np.cos((ts.hour * 3600 + ts.minute * 60 + ts.second) * 2 * np.pi / 86400.)\n time_features['day_sin'] = np.sin((ts.hour * 3600 + ts.minute * 60 + ts.second) * 2 * np.pi / 86400.)\n time_features['week_cos'] = np.cos(ts.dayofweek * 2 * np.pi / 7.)\n time_features['week_sin'] = np.sin(ts.dayofweek * 2 * np.pi / 7.)\n time_features['year_cos'] = np.cos(ts.dayofyear * 2 * np.pi / 365.)\n time_features['year_sin'] = np.sin(ts.dayofyear * 2 * np.pi / 365.)\n # linear march through time\n time_features['epoch'] = (ts - epoch).total_seconds() / epoch_span\n # workday indicator\n time_features['workday'] = [int(weekday < 5 and date not in holidays) for weekday, date in zip(ts.weekday, ts.date)]\n\n if _singleton:\n return {k: v[0] for k, v in time_features.iteritems()}\n else:\n return pd.DataFrame(time_features, index=index)", "def __init__(self, ts, ys):\n super(ForwardEulerOutput, self).__init__(np.min(ts), np.max(ts))\n self.interp = interp1d(ts, ys, kind='linear', copy=True)", "def get_trend_pred(united_samples, look_back):\n\n features = united_samples[:, :1].astype(str)\n labels = united_samples[:, -1:]\n\n # move all dates a day behind\n delta = -1\n generator = (change_date(date[0], delta_days=delta) for date in features)\n new_dates = np.fromiter(generator, features.dtype)\n\n # selecting samples after April 2020 when the COVID-19 became global\n BOOL_COND_ARRAY = [(int(date[0:4]) >= 2020 and int(date[5:7]) >= 4)\n for date in new_dates]\n new_dates = new_dates[BOOL_COND_ARRAY]\n new_dates = new_dates.reshape(-1, 1)\n\n labels = labels[BOOL_COND_ARRAY]\n\n # converting date to numerical source:\n generator = (dt.toordinal(dt.strptime(date[0], DATE_FORMAT)) for date in\n new_dates)\n numerical_dates = np.fromiter(generator, features.dtype)\n numerical_dates = numerical_dates.reshape(-1, 1).astype(float)\n\n # change degree of polynomial features\n poly_features = PolynomialFeatures(degree=4)\n features_transformed = poly_features.fit_transform(numerical_dates)\n\n # model\n linreg_model = LinearRegression()\n linreg_model.fit(features_transformed, labels)\n\n # trend\n trend_labels = linreg_model.predict(\n poly_features.fit_transform(numerical_dates))\n\n ahead = trend_labels[-1:, 0]\n behind = trend_labels[-look_back-1: -look_back, 0]\n\n if ahead > behind:\n return 'upward'\n elif ahead < behind:\n return 'downward'\n else:\n return 'not_changed'", "def create_trainset(\n self,\n s=\"2016-01-01\",\n e=\"2018-12-31\",\n freq=\"D\",\n ) -> DateTensors:\n self.date_series = DatasetDateSeries(\n start=s, end=e, wsz=self.wsz, to_onehot=True\n ) # wsz same as W\n\n # ti window data to tensor\n ti = Tsr(self.date_series.ti_win)\n\n # tc window data to tensor\n N, W, Dtc = len(ti), self.date_series.wsz, 3\n tc = torch.randint(0, 2, (1, 1, Dtc)).repeat(N, W, 1) # shape: (N, W, Dtc)\n\n # kn window data to tensor\n kn = Tsr(self.date_series.kn_win)\n\n # create target data as `tg` (target)\n tg = self.dct_curve[self.model_type](ti).repeat(1, 1, self.Dout)\n\n ti, tc, kn, tg = self.to_device(ti, tc, kn, tg)\n trainset = DateTensors(\n ti=ti, tc=tc, kn=kn, tg=tg, device=self.device\n ) # ti/tc/kn.shape: (N, W, Dout), tg.shape = (N, 1, Dout)\n self.trainset = trainset\n return trainset", "def __call__(self, ts):\r\n # initialize state machine parameters\r\n self.t1, self.tmax, self.Lmax, self.events = (None, None, None, [])\r\n # create indoor time series\r\n self.tsIndoor = createIndoorTimeseries(ts=ts, insulation=self.insulation)\r\n # initialize the event detector parameters\r\n self.indicators = self.tsIndoor.basicIndicators()\r\n if isinstance(self.Lbeta, str):\r\n self.Lalpha = self.indicators[{'LEQ': 'LAeq', 'L50': 'LA50', 'L90': 'LA90'}[self.Lbeta]] + self.E\r\n else:\r\n self.Lalpha = self.Lbeta + self.E\r\n if self.Lomega == None:\r\n self.Lomega = self.Lalpha\r\n # generate a list of event candidates\r\n candidates = []\r\n for t, level in self.tsIndoor:\r\n result = self.step(t=t, level=level)\r\n if result != None:\r\n candidates.append(result)\r\n result = self.stop(t=t)\r\n if result != None:\r\n candidates.append(result)\r\n # filter the event candidate list for time gaps, and merge events if necessary\r\n for candidate in candidates:\r\n if len(self.events) == 0:\r\n # first event, so no problem with time gap\r\n self.events.append(candidate)\r\n else:\r\n if candidate.t1 - self.events[-1].t2 < self.minTauG:\r\n self.events[-1].merge(candidate)\r\n else:\r\n self.events.append(candidate)\r\n # filter the event candidate list for event durations\r\n self.events = [event for event in self.events if event.duration() >= self.minTauE]\r\n self.events = [event for event in self.events if event.duration() <= self.maxTauE]\r\n return self", "def make_time_series(rics: list, fields: list, start_date: str, end_date: str, name: str) -> None:\n df = get_time_series(rics, fields, start_date, end_date)\n df.dropna(inplace=True)\n to_csv(df, name, time_series=True)", "def forecast(\n input_values: np.ndarray, future_dates: List, model: Sequential, scaler\n) -> pd.DataFrame:\n if scaler:\n future_values = scaler.inverse_transform(\n model.predict(input_values.reshape(1, -1, 1)).reshape(-1, 1)\n )\n else:\n future_values = model.predict(input_values.reshape(1, -1, 1)).reshape(-1, 1)\n\n df_future = pd.DataFrame(\n future_values, index=future_dates, columns=[\"Predicted Price\"]\n )\n return df_future", "def new_data(self, base: str, first: pd.Timestamp, last: pd.Timestamp):\n ohlcv = self.predictor.ohlcv.get_data(base, first, last)\n fdf = self.predictor.features.get_data(base, first, last)\n tdf = self.predictor.targets.get_data(base, first, last)\n if (tdf is None) or tdf.empty:\n return None\n pred = self.predict_batch(base, fdf)\n pdf = pd.DataFrame(data=pred, index=fdf.index, columns=self.keys())\n pdf = pd.concat([ohlcv.close, tdf.target, pdf], axis=1, join=\"inner\")\n return self.check_timerange(pdf, first, last)", "def to_drawdown_series(self):\n self.tsdf = drawdown_series(self.tsdf)\n self.tsdf.columns = pd.MultiIndex.from_product([[self.label], ['Drawdowns']])\n self.tsdf.index = pd.to_datetime(self.tsdf.index)\n return self", "def createIndoorTimeseries(ts, insulation):\r\n result = ts.copy()\r\n result.correct(-INSULATIONS[insulation])\r\n result.addLevel(BACKGROUND)\r\n return result", "def dctrend(f):\r\n \r\n fdc=sps.detrend(f)\r\n \r\n return fdc", "def _load_data(self, datasource):\n import pandas as pd\n if not isinstance(datasource, pd.DataFrame):\n raise TypeError('DfFeature must loaded from pd.DataFrame')\n self.data = datasource\n self.data['thetime']=self.data['thetime'].apply(lambda x:try_to_parse_date(x))", "def __getitem__(self, index):\n return self._timeseriesData[index]", "def __copy__(self):\n ts = TimeSeries.from_twodim_list(self._timeseriesData)\n\n ts._normalizationLevel = self._normalizationLevel\n ts._normalized = self._normalized\n ts._sorted = self._sorted\n ts._predefinedSorted = self._predefinedSorted\n ts._predefinedNormalized = self._predefinedNormalized\n ts._timestampFormat = self._timestampFormat\n\n return ts", "def __call__(self, data, year):\n return self.transition(data, year)" ]
[ "0.6148477", "0.59344846", "0.5870157", "0.58571", "0.5753403", "0.57533115", "0.57117873", "0.5679154", "0.56493205", "0.56239253", "0.56217587", "0.55675584", "0.5516488", "0.55088216", "0.5477455", "0.54756147", "0.5470915", "0.54526985", "0.54525316", "0.5447155", "0.54222167", "0.54164165", "0.5388522", "0.5377369", "0.5375938", "0.53735673", "0.5350218", "0.53456026", "0.5335474", "0.5313338", "0.530193", "0.5278955", "0.52755743", "0.526424", "0.52587736", "0.52577066", "0.5257252", "0.5242785", "0.5241675", "0.52228004", "0.5219991", "0.52120876", "0.5204708", "0.52009225", "0.5190392", "0.51873434", "0.5182266", "0.5181049", "0.51767373", "0.5169306", "0.51611835", "0.5152486", "0.5150719", "0.51461214", "0.5143004", "0.5136214", "0.5130504", "0.51288134", "0.5119151", "0.5115226", "0.51142377", "0.51123124", "0.5106078", "0.51018894", "0.5099261", "0.5097228", "0.5082716", "0.50818104", "0.50746787", "0.50726074", "0.5068462", "0.5064695", "0.50636643", "0.5057622", "0.50532216", "0.5046659", "0.5044966", "0.5042722", "0.50400215", "0.50398475", "0.5036518", "0.5035411", "0.5032984", "0.50311035", "0.5010319", "0.5010133", "0.50080615", "0.50074005", "0.50072193", "0.50045687", "0.500454", "0.50034297", "0.49975654", "0.49969694", "0.4995357", "0.49931306", "0.4985001", "0.49817428", "0.4981032", "0.4979626", "0.49768174" ]
0.0
-1
return info about recently modified events
def __init__(self, eventRegistry, maxCount = 500, mandatoryLang = None, mandatoryLocation = True, returnInfo = ReturnInfo()): QueryParamsBase.__init__(self) assert maxCount <= 500, "Maximum number of events returned per call is 500" self._er = eventRegistry self._setVal("addEvents", True) self._setVal("addArticles", False) self._setVal("recentActivityEventsMaxEventCount", maxCount) self._setVal("recentActivityEventsMandatoryLocation", mandatoryLocation) # return only events that have at least a story in the specified language if mandatoryLang != None: self._setVal("recentActivityEventsMandatoryLang", mandatoryLang) self._update(returnInfo.getParams("recentActivityEvents"))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getChanges():", "def modified_stats(self):\n return self._counts_per_month('last_modified')", "def updated_on(self):\n return self.get_time(\"updated_on\")", "def getLastModifiedTime(self): #$NON-NLS-1$\r", "def _get_changes_metadata(document):\n return ((el.get(author_attrib),\n datetime.datetime.strptime(el.get(date_attrib), date_format))\n for el in _get_comments(document))", "def modified(self):\n return self.properties.get(\"Modified\", datetime.min)", "def svn_info_t_last_changed_date_get(svn_info_t_self): # real signature unknown; restored from __doc__\n pass", "def modified(self):\n return self._modified", "def modified(self):\n return self._modified", "def modified(self):\n return self._modified", "def modified(self):\n return self._modified", "def findCreatedModifiedTimes(zippedFile):\n lines = linesFromZippedFile(zippedFile, 'META')\n meta = json.loads(lines[0])\n return meta['created'], meta['modified']", "def modified(self):\n return self.__modified", "def modified(self):\n return self.__modified", "def recently_modified(request):\n pages = models.Page.all().order('modified').fetch(10)\n return utility.respond(request, 'admin/recently_modified', {'pages': pages})", "def modified_timestamp(self) -> str:\n return pulumi.get(self, \"modified_timestamp\")", "def modified_timestamp(self) -> str:\n return pulumi.get(self, \"modified_timestamp\")", "def last_modified_dts(self):\n return self._last_modified_dts", "def date_modified(self):\n return self._date_modified", "def last_changed(self):\n return self._last_changed", "def time_modified(self) -> str:\n return pulumi.get(self, \"time_modified\")", "def time_modified(self) -> str:\n return pulumi.get(self, \"time_modified\")", "def getUpdates(self):\n # execute the query\n ret = self._er.execQuery(self)\n\n if ret and ret.get(\"recentActivity\") and ret[\"recentActivity\"].get(\"events\"):\n # return the updated information\n return ret[\"recentActivity\"][\"events\"]\n # or empty\n return {}", "def last_update(self):\r\n request = http.Request('GET', '/metadata/last_update.json')\r\n return request, parsers.parse_json", "def get_events() -> list[Event]:\n g.ledger.changed()\n return [e for e in g.filtered.entries if isinstance(e, Event)]", "def mtime(self):\r\n return self.info().mtime", "def file_events(self):\n return self._file_events", "def getListModifiedDates(self):\n return _libsbml.ModelHistory_getListModifiedDates(self)", "def last_modified(self):\n return os.path.getmtime(self.filename)", "def get_changed(self):\n ret = []\n def list_callback(status, path):\n ret.append( (status, path) )\n self._walk_tree(ChangedEditor, pass_root=1, callback=list_callback)\n return ret", "def modified(self):\r\n\t\treturn self.last_modified > self.last_processed", "def GetChanges(self):\n return self._changes", "def modified(self):\n\t\treturn self.last_modified > self.last_processed", "def last_file_updated(self):\n query = '*.xml'\n keymap_files = glob.glob(query)\n\n sorted_files = sorted(keymap_files, key=self.mtime, reverse=1)\n last_modified_file = sorted_files[0]\n second_last_modified_file = sorted_files[1]\n\n t1 = self.mtime(last_modified_file)\n t2 = self.mtime(second_last_modified_file)\n\n logger.debug('Last modified time: {0}'.format(t1))\n logger.debug('Second Last modified time: {0}'.format(t2))\n\n last_modified_time = self.mtime(last_modified_file)\n last_access_time = self.atime(last_modified_file)\n\n if sys.platform == \"win32\":\n logger.info('Detected Windows environment')\n # self.regenerate_osx(last_access_time, last_modified_time)\n elif sys.platform == 'darwin':\n logger.info('Detected OSX environment')\n # self.regenerate_windows(last_access_time, last_modified_time)\n else:\n logger.error('Unhandled platform: {0}'.format(sys.platform))\n pass", "def was_modified(self):\n return self.modified", "def get_region_updated_time(self):", "def get_modified_time(self, name):\n raise NotImplementedError(\n \"subclasses of Storage must provide a get_modified_time() method\"\n )", "def metadata_updated_on(item):\n ts = item['updated_at']\n ts = str_to_datetime(ts)\n\n return ts.timestamp()", "def modification_time(self) -> str:\n return pulumi.get(self, \"modification_time\")", "def updated_on(self):\n return self._updated_on", "def get_monitored_changes(self) -> List:\n pass", "def last_modified():\n return \"Last modified: %s\" % time.ctime(os.path.getmtime(FILE_NAME))", "def Updated(self):\n return self._get_attr('Updated')", "def time_updated(self):\n return self._time_updated", "def get_modified_time(self, name):\n return self.cache.get(name).time", "def GetChangesSample():\n client = CreateClient()\n changes = client.GetChanges()\n for change in changes.entry:\n print change.title.text, change.changestamp.value", "def last_modified_at(self):\n return self.viztrail.last_modified_at", "def get_events(self):\n return self.events", "def svn_info_t_last_changed_rev_get(svn_info_t_self): # real signature unknown; restored from __doc__\n pass", "def date_modified():\n return render_template(\"date_modified.html\", date_modified=last_modified())", "def time_last_modified(self):\n return self.properties.get(\"TimeLastModified\", None)", "def modified(self) -> datetime.datetime:\n timestamp = os.path.getmtime(self._manifest_path)\n\n return datetime.datetime.fromtimestamp(timestamp)", "def get_modified_time(self, name):\n full_path = self.path(name)\n return self.__volume.getmtime(full_path)", "def event_stats(self):\n pass", "def last_modified(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"last_modified\")", "def field_changes(self):\n return self._field_changes", "def lastmod(self, obj):\n return obj.modified", "def getModifiedDate(self, *args):\n return _libsbml.ModelHistory_getModifiedDate(self, *args)", "def was_modified(self):\n\n return self.__modified", "def todo(self):\n # sort events with eventid using datetime string\n pass", "def last_modified_at(self) -> str:\n return pulumi.get(self, \"last_modified_at\")", "def last_modified_at(self) -> str:\n return pulumi.get(self, \"last_modified_at\")", "def get_last_modified_date(self):\n\t\treturn call_sdk_function('PrlFsEntry_GetLastModifiedDate', self.handle)", "def get_mtime(self):\n if settings.DEBUG:\n return os.path.getmtime(self.get_path())\n return staticfiles_storage.modified_time(self.get_name())", "def last_modified_time(self) -> str:\n return pulumi.get(self, \"last_modified_time\")", "def last_modified_time(self) -> str:\n return pulumi.get(self, \"last_modified_time\")", "def last_edited(self):\n return self._last_edited", "def last_modified_time(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"last_modified_time\")", "def get_last_events(self):\n\n events = self._last_events\n self._last_events = list()\n return events", "def get_curr_events(self):\n today = datetime.date.today()\n return self.s.query(Event).filter(Event.time > today).all()", "def events(self):\n return self.current_events", "def get_events(self):\n raise NotImplementedError", "def get_events(self):\n raise NotImplementedError", "def last_modified(self) -> str:\n\t\tif not self._closed:\n\t\t\ttimestamp = self.ds.last_modified()\n\t\t\treturn timestamp\n\t\treturn None", "def get_changed_columns(self):\r\n return [k for k,v in self._values.items() if v.changed]", "def last_change(self) -> Tuple[float, float]:\n return self.last_change_file_time, self.last_change_cursor_time", "def getLastMod(page):\n return page.info().get(\"Last-Modified\")", "def modification_timestamp(self):\n return parse_windows_timestamp(self.unpack_qword(0xC))", "def changes(self) -> dict:\n return self.config['changes']", "def _update_modified_since(self, timestamp):\n pass", "def modified_object(obj, event):\n now = datetime.now(tz=_zone)\n obj.modification_date = now", "def last_updated(self):\n return self._last_updated", "def get_inbound_statement_details_last_modified_date(self):\n return self.get_text_from_element(self.inbound_statements_details_last_modified_date_locator, False)", "def updated(self):\n xutimes = self.xutimes()\n\n # If there are 2 xutimes, updated is the first. Otherwise, there is no\n # updated date, just published.\n\n return (\n dt.fromtimestamp(int(xutimes[0]))\n if len(xutimes) == 2 else None\n )", "def event_log(self):\n pass", "def get_modified_time(fname):\n return os.stat(fname).st_mtime", "def get_mtime(self):\n storage = getattr(self._file, \"storage\", None)\n if storage:\n return storage.modified_time(self._file.name)\n return super(FileAsset, self).get_mtime()", "def is_modified(self):\n return self._tag == 'modified'", "def svn_info_t_last_changed_author_get(svn_info_t_self): # real signature unknown; restored from __doc__\n return \"\"", "async def recentchanges(self, ctx, limit=50):\r\n if await bMsg(ctx,ctx.message.author.name,client):\r\n return\r\n logger.info('Wiki.recentchanges: ' + str(limit), extra={'invoker': ctx.message.author.name})\r\n twenties, limit = divmod(limit, 20)\r\n async with ctx.channel.typing():\r\n result = ['']\r\n changes = []\r\n start = 'now'\r\n for i in [20 for j in range(twenties)] + [limit]:\r\n resp = await self.req({\r\n 'action': 'query',\r\n 'list': 'recentchanges',\r\n 'rcprop': 'user|timestamp|comment|title|sizes|flags',\r\n 'rctype': 'edit|new',\r\n 'rclimit': i,\r\n 'rcstart': start\r\n })\r\n changes.extend(resp['query']['recentchanges'])\r\n start = resp['query']['recentchanges'][-1]['timestamp']\r\n i = 0\r\n for ch in changes:\r\n change = '\\n'\r\n change += ch['timestamp']\r\n change += ': '\r\n change += ch['title']\r\n change += '; '\r\n sizechange = ch['newlen'] - ch['oldlen']\r\n if sizechange <= -500 or sizechange >= 500:\r\n change += '**'\r\n change += '('\r\n if sizechange <= 0:\r\n change += str(sizechange)\r\n if sizechange > 0:\r\n change += '+' + str(sizechange)\r\n change += ')'\r\n if sizechange <= -500 or sizechange >= 500:\r\n change += '**'\r\n change += ' . . '\r\n change += ch['user']\r\n change += ' _('\r\n change += ch['comment'].replace('*', '\\\\*').replace('_', '\\\\_').replace('`', '\\\\`')\r\n change += ')_'\r\n result[i] += change\r\n if len(result[i]) > 2000:\r\n result.append('')\r\n result[i], result[i+1] = result[i].rsplit('\\n', 1)\r\n i += 1\r\n for r in result:\r\n await ctx.send(r)", "def lastEventTime(self):\n return self._lastEventTime", "def events(self):\n return self._events", "def contribution_timestamp(self):\n return self.run_query(f\"({self.r}/contributor[id = 5558]/ancestor::page)[12]/revision/timestamp/text()\")", "def get_time_since_modified(fname):\n return time.time() - get_modified_time(fname)", "def with_last_update(self):\n return self.annotate(last_update=Coalesce(F('modified'), F('created')))", "def _GetUpdateTime(filename):\n stat_info = os.stat(filename)\n return (stat_info.st_atime, stat_info.st_mtime)", "def get_mtime(self):\n return os.path.getmtime(self.get_path())", "def last_modified(self) -> str:\n\t\tif self.name == \"\":\n\t\t\tif \"last_modified\" in self.ds._file[\"/matrix\"].attrs:\n\t\t\t\treturn self.ds._file[\"/matrix\"].attrs[\"last_modified\"]\n\t\t\telif self.ds._file.mode == 'r+':\n\t\t\t\tself.ds._file[\"/matrix\"].attrs[\"last_modified\"] = timestamp()\n\t\t\t\tself.ds._file.flush()\n\t\t\t\treturn self.ds._file[\"/matrix\"].attrs[\"last_modified\"]\n\n\t\tif self.name != \"\":\n\t\t\tif \"last_modified\" in self.ds._file[\"/layers/\" + self.name].attrs:\n\t\t\t\treturn self.ds._file[\"/layers/\" + self.name].attrs[\"last_modified\"]\n\t\t\telif self.ds._file.mode == 'r+':\n\t\t\t\tself.ds._file[\"/layers/\" + self.name].attrs[\"last_modified\"] = timestamp()\n\t\t\t\tself.ds._file.flush()\n\t\t\t\treturn self.ds._file[\"/layers/\" + self.name].attrs[\"last_modified\"]\n\n\t\treturn timestamp()", "def modified(self) -> datetime:\n # TODO: Should this be overridden for LocalDirectoryAsset?\n return datetime.fromtimestamp(self.filepath.stat().st_mtime).astimezone()", "def get_history(self):\n msg_ids = self._records.keys()\n # Remove any that do not have a submitted timestamp.\n # This is extremely unlikely to happen,\n # but it seems to come up in some tests on VMs.\n msg_ids = [m for m in msg_ids if self._records[m]['submitted'] is not None]\n return sorted(msg_ids, key=lambda m: self._records[m]['submitted'])", "def _get_files_timestamps(self, working_dir: Union[str, os.PathLike]):\n return {f: os.path.getmtime(os.path.join(working_dir, f)) for f in os.listdir(working_dir)}" ]
[ "0.6769352", "0.64247614", "0.6342947", "0.63398314", "0.6318615", "0.63153297", "0.6308564", "0.6271734", "0.6271734", "0.6271734", "0.6271734", "0.6255355", "0.62016386", "0.62016386", "0.61900157", "0.6164317", "0.6164317", "0.61549234", "0.61546636", "0.6148851", "0.60952514", "0.60952514", "0.60939866", "0.60639334", "0.60437703", "0.60183966", "0.6006256", "0.6006034", "0.60040325", "0.599398", "0.5982819", "0.5977457", "0.59543866", "0.5938133", "0.59317875", "0.5923093", "0.5915204", "0.5907904", "0.58933336", "0.5891793", "0.5884934", "0.5846704", "0.5839137", "0.5832511", "0.58181536", "0.5816808", "0.58166665", "0.57950073", "0.5789407", "0.57885486", "0.57848406", "0.577774", "0.57731074", "0.5754854", "0.57466257", "0.573807", "0.57360655", "0.5731356", "0.5710868", "0.5703153", "0.56972516", "0.56972516", "0.56819236", "0.5679411", "0.56777185", "0.56777185", "0.56718713", "0.5666634", "0.5666151", "0.5665893", "0.5665421", "0.5639706", "0.5639706", "0.5636274", "0.5633605", "0.5625074", "0.56188464", "0.55973357", "0.5588004", "0.55833435", "0.55831814", "0.55798125", "0.5576939", "0.5576422", "0.55176044", "0.55090684", "0.55017596", "0.5499643", "0.54935616", "0.54746425", "0.5474278", "0.54730123", "0.54709756", "0.54699516", "0.5469876", "0.54649884", "0.5460339", "0.5451546", "0.54349065", "0.54123396", "0.54110336" ]
0.0
-1
Get the latest new or updated events from Event Registry
def getUpdates(self): # execute the query ret = self._er.execQuery(self) if ret and ret.get("recentActivity") and ret["recentActivity"].get("events"): # return the updated information return ret["recentActivity"]["events"] # or empty return {}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_events() -> list[Event]:\n g.ledger.changed()\n return [e for e in g.filtered.entries if isinstance(e, Event)]", "def get_events():\n url = app.config['EVENTS_ENDPOINT']\n response = requests.get(url, params={})\n if response.status_code == 200:\n return parse_events(response.json())\n raise RuntimeError('Error in retrieving events.')", "def get_events(self):\n return self.events", "def get_events(self):\n ret = []\n while True:\n event = self.event.get_event(wait=1, full=True)\n if event is None:\n return ret\n ret.append(event)", "def get_events(self):\n raise NotImplementedError", "def get_events(self):\n raise NotImplementedError", "def get_last_events(self):\n\n events = self._last_events\n self._last_events = list()\n return events", "def _get_persistent_events(self) -> Dict[uuid.UUID, CronEvent]:\n if not self.storage.contains(StateHandler.EVENTS_ENTRY):\n self.storage.put(StateHandler.EVENTS_ENTRY, {})\n return self.storage.get(StateHandler.EVENTS_ENTRY)", "def get_events(self):\n self._events = []\n self.ircobj.process_once(timeout=0.1)\n return self._events", "def _get_events(self):\n self.cache = []\n\n # Test if we have event table\n with datascope.closing(datascope.dbopen(self.db, 'r')) as db:\n dbtable = db.lookup(table='event')\n if dbtable.query(datascope.dbTABLE_PRESENT):\n steps = ['dbopen event']\n steps.extend(['dbjoin origin'])\n steps.extend(['dbsubset origin.orid != NULL'])\n steps.extend(['dbsubset origin.orid == prefor'])\n fields = ['evid']\n else:\n steps = ['dbopen origin']\n steps.extend(['dbsubset orid != NULL'])\n fields = []\n\n fields.extend(['orid','time','lat','lon','depth','auth','nass',\n 'ndef','review'])\n\n for v in extract_from_db(self.db, steps, fields, self.db_subset):\n if not 'evid' in v:\n v['evid'] = v['orid']\n\n self.logging.debug( \"Events(): new event #%s\" % v['evid'] )\n\n v['allmags'] = []\n v['magnitude'] = '-'\n v['maglddate'] = 0\n v['srname'] = '-'\n v['grname'] = '-'\n v['time'] = parse_sta_time(v['time'])\n v['strtime'] = readable_time(v['time'], self.timeformat, self.timezone)\n\n try:\n v['srname'] = stock.srname(v['lat'],v['lon'])\n except Exception,e:\n warninig('Problems with srname for orid %s: %s' % (v['orid'],\n v['lat'],v['lon'],e) )\n\n try:\n v['grname'] = stock.grname(v['lat'],v['lon'])\n except Exception,e:\n warninig('Problems with grname for orid %s: %s' % (v['orid'],\n v['lat'], v['lon'],e) )\n\n orid = v['orid']\n if orid in self.mags:\n for o in self.mags[orid]:\n v['allmags'].append(self.mags[orid][o])\n if self.mags[orid][o]['lddate'] > v['maglddate']:\n v['magnitude'] = self.mags[orid][o]['strmag']\n v['maglddate'] = self.mags[orid][o]['lddate']\n\n\n self.cache.append( v )", "def read_events():\n service = setup_google_calendar()\n dict = {}\n # Call the Calendar API\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\n print('Getting the upcoming 10 events')\n events_result = service.events().list(calendarId='primary', timeMin=now,\n maxResults=10, singleEvents=True,\n orderBy='startTime').execute()\n events = events_result.get('items', [])\n\n if not events:\n print('No upcoming events found.')\n i = 0\n for event in events:\n start = event['start'].get('dateTime', event['start'].get('date'))\n print(start, event['summary'])\n dict[i] = (start, event['summary'])\n i += 1\n return dict", "def get_game_events(self):\n\t\tcontents = self.archive.read_file('replay.game.events')\n\t\treturn self.protocol.decode_replay_game_events(contents)", "def get_allpack_events(self):\n return self.comp('packmanager').get_allpack_events()", "def events(self):\n return self.current_events", "def ajaxevent():\n return common.get_latest_event()", "def get_event(self):\n return self.keys.events.get()", "def get_event_list(self):\n pass", "def events(self):\r\n return resources.Events(self)", "def get_events(self):\r\n database = main.connect_to_cloudsql()\r\n cursor = database.cursor()\r\n\r\n query = \"\"\"\r\n SELECT DISTINCT E.eid, E1.ename, E1.description,\r\n E.category, E1.start_date, E1.end_date, E1.num_cap,\r\n E1.num_attending, L.lname, L.address_1, E.tag, L.lat, L.lon\r\n FROM {}.EventTags AS E, {}.UserTags AS U, {}.Events as E1, {}.Locations as L\r\n WHERE U.username='{}' AND\r\n E.tag = U.tag AND\r\n E1.eid = E.eid AND\r\n E1.lid = L.lid AND\r\n E1.start_date >= {}\r\n ORDER by E1.start_date\r\n \"\"\".format(\r\n ENV_DB,\r\n ENV_DB,\r\n ENV_DB,\r\n ENV_DB,\r\n self.user.username,\r\n str(datetime.date.today())\r\n )\r\n\r\n cursor.execute(query)\r\n data = cursor.fetchall()\r\n database.close()\r\n\r\n return [i for i in data]", "def updateEvents(self):\n # Update calendar data\n d_start = datetime.datetime.today()\n d_end = d_start + datetime.timedelta(self.delta_days)\n results = self.cal_cal.date_search(d_start, d_end)\n\n # Flush the events dict\n self.events = []\n # Add each events\n for event in results:\n # Format the title of the event\n str_title = event.instance.vevent.summary.value\n if len(str_title) > 20:\n str_title = str_title[:17] + \"...\"\n # Format the date of the event\n vdate = event.instance.vevent.dtstart.value\n d = datetime.datetime.strptime(\n vdate.strftime(\"%d %m %Y\"), \"%d %m %Y\")\n str_date = \"%s %d %s\" % (\n self.days_french[d.weekday()],\n d.day,\n self.months_french[d.month -1])\n # Format the date gap\n gap = 1 + (d - d_start).days\n # Save the event\n self.events.append((str_title, str_date, gap))", "def get_all_debug_events() -> Event:\n return Event.objects.filter(level__contains=\"debug\")", "def get_events(self):\n return self.s.query(Event).all()", "def events(self):\n return self.search(comp_class=Event)", "def available_events(self):\n return self.target.read_value(self.available_events_file).splitlines()", "async def events(self) -> Iterable[Event]:", "def parse_events(events_dict):\n return events_dict['events']", "async def get_events(self) -> list[Event]:\n log.debug(\"Discovering events in branding repository.\")\n\n try:\n event_directories = await self.fetch_directory(\"events\", types=(\"dir\",)) # Skip files.\n except Exception:\n log.exception(\"Failed to fetch 'events' directory.\")\n return []\n\n instances: list[Event] = []\n\n for event_directory in event_directories.values():\n log.trace(f\"Attempting to construct event from directory: '{event_directory.path}'.\")\n try:\n instance = await self.construct_event(event_directory)\n except Exception as exc:\n log.warning(f\"Could not construct event '{event_directory.path}'.\", exc_info=exc)\n else:\n instances.append(instance)\n\n return instances", "def events(self):\r\n return ev.Events(self)", "def events(self):\r\n return ev.Events(self)", "def events(self):\n return self._events", "def getAll(self, event_name):\n raw_events = self._callEventGetAll(self._id, event_name)\n return [snippet_event.from_dict(msg) for msg in raw_events]", "async def _get_events_from_cache(\n self, events: Iterable[str], update_metrics: bool = True\n ) -> Dict[str, EventCacheEntry]:\n event_map = self._get_events_from_local_cache(\n events, update_metrics=update_metrics\n )\n\n missing_event_ids = (e for e in events if e not in event_map)\n event_map.update(\n await self._get_events_from_external_cache(\n events=missing_event_ids,\n update_metrics=update_metrics,\n )\n )\n\n return event_map", "def events(self):\r\n return e.Events(self)", "def get_curr_events(self):\n today = datetime.date.today()\n return self.s.query(Event).filter(Event.time > today).all()", "def get_events():\n # reads the session\n session = request.args.get('session', type=str)\n process = request.args.get('process', default='receipt', type=str)\n\n dictio = {}\n\n if check_session_validity(session):\n user = get_user_from_session(session)\n if lh.check_user_log_visibility(user, process):\n caseid = request.args.get('caseid', type=str)\n events = lh.get_handler_for_process_and_session(process, session).get_events(caseid)\n i = 0\n while i < len(events):\n keys = list(events[i].keys())\n for key in keys:\n if str(events[i][key]).lower() == \"nan\" or str(events[i][key]).lower() == \"nat\":\n del events[i][key]\n i = i + 1\n dictio = {\"events\": events}\n ret = jsonify(dictio)\n return ret", "def get(self, *args):\n return _libsbml.ListOfEvents_get(self, *args)", "def get_latest_events(self, batch_size=1):\n\n # get latest event from the dispatcher\n queue_empty_reached, latest_dispatcher_events = \\\n self.dispatcher.get_events_batch(batch_size=batch_size)\n\n if queue_empty_reached:\n logger.debug(\"Empty queue reached!\")\n\n if latest_dispatcher_events:\n info = \"New events arrived [Total so far: {}]\".format(self._total_n_processed_events)\n logger.info(info)\n\n for ev in latest_dispatcher_events:\n logger.info(str(ev))\n\n # update internal list of events as appropriate\n self.update_events(latest_dispatcher_events)\n\n # update total n of processed events so far..\n self._total_n_processed_events += len(latest_dispatcher_events)\n\n # update the list of newly arrived events\n self.latest_events.extend(latest_dispatcher_events)\n\n # return the newly arrived events and empty the internal list\n all_latest_events = copy.deepcopy(self.latest_events)\n self.latest_events = []\n\n return all_latest_events", "def load_updated_events_list(self):\n self._event_index_list, self._event_id_list = \\\n zip(*self.gdc.updated_events_indices_and_ids)\n self.populate_event_list_from_index_list()", "def collect_new_events(self) -> list:\n self.logger.debug('Collecting new events...')\n events = self.build_events()\n if not events:\n self.logger.debug('No new events.')\n for event in events:\n self.logger.info('A new event has been detected: {}'.format(event))\n self._buffer_buisy_mutex.acquire()\n self._events_buffer.append(event)\n self._buffer_buisy_mutex.release()", "def events(self) -> object:\n return self._events", "def get_registry(self):\n response = {}\n delete_keys = []\n for heart_beat in self._registry:\n key = heart_beat.tag\n response[key] = {\n 'url': str(heart_beat)\n }\n last_seen = self._registry[heart_beat]\n now = time.time()\n ttl = (now - last_seen - self.age)\n if ttl > Registry.TTL_DELETE_ENTRY:\n # response[key]['status'] = Registry.LABELS[Registry.TTL_DELETE_ENTRY]\n delete_keys.append(heart_beat) # once done with this loop remove expired entries.\n elif ttl > Registry.TTL_OFFLINE:\n response[key]['status'] = Registry.LABELS[Registry.TTL_OFFLINE]\n elif ttl > Registry.TTL_WARNING:\n response[key]['status'] = Registry.LABELS[Registry.TTL_WARNING]\n else:\n response[key]['status'] = Registry.LABELS[Registry.TTL_ALIVE]\n\n for heart_beat in delete_keys:\n del self._registry[heart_beat]\n return response", "def view_events():\n result = get_events_helper(Event)\n return jsonify(result[0]), result[1]", "def getEvent(self):\n year, month, day = self.date\n event = Event()\n event.add(\"summary\", \"%s release\" % (self.dict[\"name\"]))\n event.add(\"uid\", \"http://www.freebase.com/view/guid/%s\" % (self.dict['guid'][1:]))\n event.add(\"dtstart\", \"%04d%02d%02d\" % (year,month,day), encode=0)\n return event", "def get_events(self):\n #Returne the capture events\n raise NotImplementedError", "def get_events(self):\n #Returne the capture events\n raise NotImplementedError", "def _iter_events(self) -> Generator:\n response = self.client.call()\n events: list = response.json()\n\n if not events:\n return []\n\n while True:\n yield events\n last = events.pop()\n self.client.set_next_run_filter(last['@timestamp'])\n response = self.client.call()\n events = response.json()\n try:\n events.pop(0)\n assert events\n except (IndexError, AssertionError):\n LOG('empty list, breaking')\n break", "def load_new_events_list(self):\n self._event_index_list = self.gdc.new_events_indices\n self.populate_event_list_from_index_list()", "def events(self):\n return self.properties.get('events', EventCollection(self.context, ResourcePath(\"events\", self.resource_path)))", "def __calender_events(self):\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('calendar', 'v3', http=http)\n\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\n pt=\"Getting the upcoming latest events\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % pt)\n self.speech.synthesize_text(pt)\n eventsResult = service.events().list(\n calendarId='primary', timeMin=now, maxResults=1, singleEvents=True,\n orderBy='startTime').execute()\n events = eventsResult.get('items', [])\n\n if not events:\n pq=\"No upcoming events found.\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % pt)\n self.speech.synthesize_text(pq)\n for event in events:\n start = event['start'].get('dateTime', event['start'].get('date'))\n #start1=''.join(start)\n summary=event['summary']\n print start,summary\n requests.get(\"http://localhost:8080/statement?text=\"+start+\" \"+summary)", "def _get_all_event_history(device_event_file_path, event_labels, timeout=10.0):\n result = []\n timedout = False\n\n file_exists, remaining_timeout = _wait_for_event_file(device_event_file_path,\n timeout)\n if not file_exists:\n timedout = True\n return result, timedout\n\n timeout_str = \"{:f}\".format(remaining_timeout)\n\n if event_labels is None:\n tac_cmd = [\"timeout\", timeout_str, \"tac\", device_event_file_path]\n out = \"\"\n try:\n out = subprocess.check_output(tac_cmd).decode(\"utf-8\", \"replace\")\n except subprocess.CalledProcessError as err:\n if err.returncode == 124:\n timedout = True\n json_events = out.splitlines()\n else:\n grep_cmd = [\"timeout\", timeout_str, \"grep\", \"-w\"]\n for event_label in event_labels:\n if event_label:\n grep_cmd.append(\"-e\")\n grep_cmd.append(event_label)\n grep_cmd.append(device_event_file_path)\n grep_proc = subprocess.Popen(grep_cmd, stdout=subprocess.PIPE)\n out, _ = grep_proc.communicate()\n if grep_proc.returncode == 124:\n timedout = True\n json_events = out.splitlines()\n json_events.reverse()\n\n return _get_events_from_json_output(json_events, event_labels), timedout", "def event_list(self):\n return self._event_list", "def events(self) -> Dict[EventCall, Set[Node]]:\n return self._events", "def events(self):\n return EventsTable(self.rpc, self.name)", "async def _get_events_from_external_cache(\n self, events: Iterable[str], update_metrics: bool = True\n ) -> Dict[str, EventCacheEntry]:\n event_map = {}\n\n for event_id in events:\n ret = await self._get_event_cache.get_external(\n (event_id,), None, update_metrics=update_metrics\n )\n if ret:\n event_map[event_id] = ret\n\n return event_map", "def fetch_events(self):\n while 1:\n try:\n self.events_local.append(self._q.get(False))\n except queue.Empty:\n break", "def pull_event(self):\n self._buffer_buisy_mutex.acquire()\n event = None\n if self._events_buffer:\n event = self._events_buffer.pop(0)\n self._dilivered_events_stack.push(event.hash)\n self._buffer_buisy_mutex.release()\n if event:\n self.logger.info('Pulling new event: {}'.format(event))\n return event", "def test_get_events(self):\n events = gracedb.events()\n for event in events:\n self.assertTrue('graceid' in event)\n break", "def scrape_events(path, urls):\n seen_ids = set()\n result = []\n for url in urls:\n # Get all of the Network requests being sent out\n print(f'Processing {url}')\n driver.get(url)\n browser_log = driver.get_log('performance') \n events = [process_browser_log_entry(entry) for entry in browser_log]\n results = []\n # Find the Network request that sends a GET request to EventBrite API\n for event in events:\n if event['method'] == 'Network.responseReceived':\n # print(event)\n if 'event_ids' in event['params']['response']['url']:\n results.append(event)\n # Get the GET request URL\n get_url = \"\"\n # TODO: Sometimes returning 0 or more than 1... I'm not sure why :(\n if len(results) >= 1:\n get_url = results[0]['params']['response']['url']\n # Get the GET request response JSON\n json_response = get_request(get_url)\n event_list = json_response['events']\n # Find unique events in the response JSON \n unique_event_list = []\n for event in event_list:\n if event['id'] not in seen_ids:\n seen_ids.add(event['id'])\n unique_event_list.append(event)\n parsed_events = parse_event_page(unique_event_list)\n result.extend(parsed_events)\n else:\n print(results)\n print('yikes something went wrong')\n\n driver.close()\n return result\n # save_events(path, result)", "def slurp_events(self):\n while self.has_event():\n self.get_event()", "def get_sample_events(self): \n return self.sample_events[:]", "def eventList(self):\n return self._eventList", "def events(self) -> Sequence[Tuple[str, Sequence[Union[np.ndarray, bytes]]]]:\n return self._env.events()", "def _get_events_from_local_cache(\n self, events: Iterable[str], update_metrics: bool = True\n ) -> Dict[str, EventCacheEntry]:\n event_map = {}\n\n for event_id in events:\n # First check if it's in the event cache\n ret = self._get_event_cache.get_local(\n (event_id,), None, update_metrics=update_metrics\n )\n if ret:\n event_map[event_id] = ret\n continue\n\n # Otherwise check if we still have the event in memory.\n event = self._event_ref.get(event_id)\n if event:\n # Reconstruct an event cache entry\n\n cache_entry = EventCacheEntry(\n event=event,\n # We don't cache weakrefs to redacted events, so we know\n # this is None.\n redacted_event=None,\n )\n event_map[event_id] = cache_entry\n\n # We add the entry back into the cache as we want to keep\n # recently queried events in the cache.\n self._get_event_cache.set_local((event_id,), cache_entry)\n\n return event_map", "def get_events(self):\n\n url = '/v2.4/'+self.page_id+'/events'\n data = self.graph.request(url)\n\n while 'next' in data['paging'].keys():\n print data['paging']['next']\n data = self.graph.request(url, args={\n 'limit' : 100,\n 'after' : data['paging']['cursors']['after']\n })\n\n return data", "def list_events(self, name):\n return self._get_events(name)", "def get():\n return jsonify({'events': 'Events API'}), 200", "def getEvent(self, timeout=None):\n socks = self.poller.poll(timeout)\n if not socks:\n return\n msg = socks[0][0].recv()\n d = self.mh.unserialize(msg)\n e = Event.fromDict(d)\n if self.store:\n _id = self.store.addEvent(e)\n e.id = _id\n return e", "def get_my_last_event(self):\r\n return self._handler.get_my_last_event()", "def nextEvents(self):\n # sort based on timestamp\n self.event_q.sort(key=lambda evt: evt.timestamp)\n\n # there may be events with same timestamp\n events = []\n earliest_ts = self.event_q[0].timestamp\n while len(self.event_q) > 0:\n if self.event_q[0].timestamp == earliest_ts:\n evt = self.event_q.pop(0)\n events.append(evt)\n else:\n break\n return events", "def get(self, request, group):\n event = group.get_latest_event()\n\n try:\n return client.get('/events/{}/'.format(event.id), request.user, request.auth)\n except client.ApiError as e:\n return Response(e.body, status=e.status)", "def get_events(self):\r\n return QtSql.QSqlQuery('''SELECT DISTINCT Event FROM presentations''')", "def get_events():\n\n all_calendar_events = {}\n\n # Suppress warning in logs\n # https://github.com/googleapis/google-api-python-client/issues/299\n service = build('calendar', 'v3', credentials=google_auth.creds, cache_discovery=False)\n\n now = datetime.datetime.utcnow().today().isoformat() + 'Z' # 'Z' indicates UTC time\n\n for calendar_name, calendar_id in config.GOOGLE_CALENDARS.items():\n all_events = []\n events_result = service.events().list(calendarId=calendar_id, timeMin=now,\n maxResults=10, singleEvents=True, orderBy='startTime').execute()\n events = events_result.get('items', [])\n if not events:\n all_events.append(['Ei tulevia tapahtumia'])\n for event in events:\n start = event['start'].get('dateTime', event['start'].get('date'))[:10]\n all_events.append([start, event[\"summary\"], event[\"htmlLink\"]])\n all_calendar_events[calendar_name] = all_events\n\n return all_calendar_events", "def get_all_events(cls):\n try:\n events = list(events_coll.find())\n events_list = []\n if events is not None:\n for event in events:\n one_event = cls(**event)\n events_list.append(one_event)\n return events_list\n except Exception as e:\n print(e)", "def get_events_batch() -> PayloadDictList:\n ...", "def update(self, events):\n events = events", "def get_all(self, q=None):\r\n q = q or []\r\n event_filter = _event_query_to_event_filter(q)\r\n return [Event(message_id=event.message_id,\r\n event_type=event.event_type,\r\n generated=event.generated,\r\n traits=event.traits)\r\n for event in\r\n pecan.request.storage_conn.get_events(event_filter)]", "def get_events(self, limit=10, query=None):\n\n conn = http.client.HTTPSConnection(self.OPENFDA_API_URL)\n request = self.OPENFDA_API_EVENT + \"?limit=\" + str(limit)\n if query is not None:\n request += \"&\" + query\n conn.request(\"GET\", request)\n events_search = conn.getresponse()\n raw_data = events_search.read()\n events_str = raw_data.decode(\"utf8\")\n events = json.loads(events_str)\n events = events['results']\n\n return events", "def apigw_event():\n with open(\"events/event.json\") as json_file:\n return json.load(json_file)", "def events(self):\r\n return Events(self)", "def events(self):\r\n return Events(self)", "def events(self):\r\n return Events(self)", "def get_log_events(client, log_group):\n\n\tresp = client.filter_log_events(logGroupName=log_group, limit=10000)\n\treturn resp['events']", "def update_events(self, new_events):\n\n for new_event in new_events:\n self.__events.setdefault(new_event.type, []).append(new_event)", "def last_update(self):\r\n request = http.Request('GET', '/metadata/last_update.json')\r\n return request, parsers.parse_json", "def get_event(self, event_id):\n mask = \"\"\"mask[\n acknowledgedFlag,\n attachments,\n impactedResources,\n statusCode,\n updates,\n notificationOccurrenceEventType]\n \"\"\"\n return self.client.call('Notification_Occurrence_Event', 'getObject', id=event_id, mask=mask)", "def getListOfEvents(self):\n return self.model.getListOfEvents()", "def get_event(self):\r\n return self.events[0]", "def get_all(self):\r\n return list(pecan.request.storage_conn.get_event_types())", "def _default_events_fetcher(self):\n raise NotImplementedError", "def _default_events_fetcher(self):\n raise NotImplementedError", "def _callEventGetAll(self, callback_id, event_name):\n return self._event_client.eventGetAll(callback_id, event_name)", "async def watch_events(\n mock_kubernetes: MockKubernetesApi,\n namespace: str,\n *,\n resource_version: Optional[str] = None,\n) -> list[CoreV1Event]:\n method = mock_kubernetes.list_namespaced_event\n watch_args = {\n \"namespace\": namespace,\n \"timeout_seconds\": 10, # Just in case, so tests don't hang\n }\n if resource_version:\n watch_args[\"resource_version\"] = resource_version\n async with Watch().stream(method, **watch_args) as stream:\n seen = []\n async for event in stream:\n assert event[\"type\"] == \"ADDED\"\n obj = event[\"raw_object\"]\n seen.append(obj)\n if \"Done\" in obj.get(\"message\"):\n return seen\n return seen", "def fusion_api_get_events(self, uri=None, param='', api=None, headers=None):\n return self.event.get(uri=uri, api=api, headers=headers, param=param)", "def get(self, request, event_id):\n try:\n event = Event.objects.get(id=event_id)\n except Event.DoesNotExist:\n raise ResourceDoesNotExist\n\n self.check_object_permissions(request, event.group)\n\n Event.objects.bind_nodes([event], 'data')\n\n # HACK(dcramer): work around lack of unique sorting on datetime\n base_qs = Event.objects.filter(\n group_id=event.group_id,\n ).exclude(id=event.id)\n\n # First, we collect 5 leading/trailing events\n next_events = sorted(\n base_qs.filter(\n datetime__gte=event.datetime,\n ).order_by('datetime')[0:5],\n key=EVENT_ORDERING_KEY,\n )\n prev_events = sorted(\n base_qs.filter(\n datetime__lte=event.datetime,\n ).order_by('-datetime')[0:5],\n key=EVENT_ORDERING_KEY,\n reverse=True,\n )\n\n # Now, try and find the real next event.\n # \"next\" means:\n # * If identical timestamps, greater of the ids\n # * else greater of the timestamps\n next_event = None\n for e in next_events:\n if e.datetime == event.datetime and e.id > event.id:\n next_event = e\n break\n\n if e.datetime > event.datetime:\n next_event = e\n break\n\n # Last, pick the previous event\n # \"previous\" means:\n # * If identical timestamps, lesser of the ids\n # * else lesser of the timestamps\n prev_event = None\n for e in prev_events:\n if e.datetime == event.datetime and e.id < event.id:\n prev_event = e\n break\n\n if e.datetime < event.datetime:\n prev_event = e\n break\n\n try:\n user_report = UserReport.objects.get(\n event_id=event.event_id,\n project=event.project,\n )\n except UserReport.DoesNotExist:\n user_report = None\n\n data = serialize(event, request.user)\n data['userReport'] = serialize(user_report, request.user)\n data['release'] = self._get_release_info(request, event)\n\n if next_event:\n data['nextEventID'] = six.text_type(next_event.id)\n else:\n data['nextEventID'] = None\n if prev_event:\n data['previousEventID'] = six.text_type(prev_event.id)\n else:\n data['previousEventID'] = None\n\n return Response(data)", "def getevent(self, name):\n return self.events[name.lower()]", "def get_messages(self):\n\t\tcontents = self.archive.read_file('replay.message.events')\n\t\treturn self.protocol.decode_replay_message_events(contents)", "def extract_all_events(events):\n result = []\n for e in events:\n evt = IpuTraceEvent.FromString(e)\n result += [evt]\n return result", "def parse_events(response):\n\n if not request_was_successful(response):\n print('WARNING: Unsuccessful HTTP response from eventful')\n return []\n\n json = response.json()\n if json.get('events') is None:\n print(\"ERROR: No eventful results on page\")\n return []\n\n # parse the events into a list of Event objects\n # print(json)\n events = []\n events.extend(map(Event, json['events']['event']))\n return events", "def get_events(showcode=False):\n events = []\n import webscripts\n reload(webscripts)\n attributes = dir(webscripts.WebScripts)\n for attr in attributes:\n if attr.startswith(\"evn_\"):\n event = getattr(webscripts.WebScripts, attr)\n # Retrieve event name from method's doc\n evndescr = inspect.getdoc(event)\n event_data = {\"id\": attr, \"name\": evndescr}\n if showcode:\n body = WebEvent.get_event_code(attr)\n if body is not None:\n event_data[\"code\"] = body\n \n events.append(event_data)\n \n return {\"events\": events}", "def main():\n credentials = get_credentials()\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\n max = 7\n events = getEvents(credentials, now, max)\n if not events:\n print('No upcoming events found.')\n for event in events:\n start = event['start'].get('dateTime', event['start'].get('date'))\n print(start, event['summary'])\n #addEvent(credentials)" ]
[ "0.64524996", "0.63650477", "0.6306866", "0.62387884", "0.6169891", "0.6169891", "0.6113987", "0.6093191", "0.60880923", "0.5969866", "0.5965109", "0.59017813", "0.5898376", "0.5892461", "0.58733714", "0.58715504", "0.5857285", "0.5807864", "0.5802426", "0.5778328", "0.5755623", "0.5742102", "0.5733224", "0.5730286", "0.57233346", "0.5723012", "0.5714389", "0.57073987", "0.57073987", "0.57013553", "0.5685535", "0.56748027", "0.56737924", "0.5645865", "0.5641717", "0.5635023", "0.56243783", "0.5615472", "0.5614774", "0.56089854", "0.5590961", "0.5585267", "0.55832416", "0.55826074", "0.55826074", "0.5580982", "0.55737877", "0.55578166", "0.5538377", "0.5530483", "0.5528475", "0.5521254", "0.5518024", "0.55159646", "0.55138385", "0.5511252", "0.5499488", "0.54801947", "0.5468829", "0.54663223", "0.5451981", "0.54342455", "0.54259175", "0.54151905", "0.5408418", "0.5405357", "0.54049057", "0.53941435", "0.538714", "0.5379345", "0.53770185", "0.5373668", "0.5373017", "0.53699064", "0.5368349", "0.5363281", "0.53572154", "0.53522384", "0.5343513", "0.5343513", "0.5343513", "0.5337645", "0.53237903", "0.53145677", "0.5312461", "0.5306804", "0.5300703", "0.5300337", "0.5297272", "0.5297272", "0.5284463", "0.5283035", "0.5277411", "0.5274479", "0.5268237", "0.5261806", "0.5259498", "0.52565163", "0.52539486", "0.52535236" ]
0.5839551
17
return info about recently added articles
def __init__(self, eventRegistry, maxCount = 500, mandatorySourceLocation = False, articleLang = None, returnInfo = ReturnInfo()): QueryParamsBase.__init__(self) assert maxCount <= 500, "Maximum number of articles returned per call is 500" self._er = eventRegistry self._setVal("addEvents", False) self._setVal("addArticles", True) self._setVal("recentActivityArticlesMaxArticleCount", maxCount) self._setVal("recentActivityArticlesMandatorySourceLocation", mandatorySourceLocation) if articleLang != None: self._setVal("recentActivityArticlesLang", articleLang) self._update(returnInfo.getParams("recentActivityArticles"))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_recently_articles(cls, num):\n return cls.objects.values('title', 'view_times', 'update_time', 'author')\\\n .filter(status=0).order_by('-update_time')[:num]", "def latest_content(request):\n latest_articles = Article.published_articles()[:5]\n latest_comments = Comment.objects.all().order_by('-pub_date')[:5]\n tags = Tag.objects.annotate(num_articles=Count('article')).order_by(\n '-num_articles')\n contributors = Contributor.objects.annotate(\n num_articles=Count('article')).order_by('-num_articles')\n return {'latest_articles': latest_articles,\n 'latest_comments': latest_comments,\n 'tags': tags,\n 'contributors': contributors,\n }", "def recently(self):\n items = []\n for item in self.p.entries:\n dt = datetime.fromtimestamp(mktime(item.published_parsed))\n delta = datetime.today() - dt\n\n if delta.days > self.days:\n continue\n items.append(item)\n if 'verbose' in self.args and self.args['verbose']:\n print delta.days, dt\n self.items = items\n return items", "def get_articles(self):\n\t\tarticles = Blog.objects.all()\\\n\t\t\t.filter(publication_date__lte=datetime.date.today())\\\n\t\t\t.order_by('publication_date')\n\t\ti = random.randint(0, articles.count()-1)\n\t\treturn articles, articles[i]", "def get_recent_news_items():\n news_item_count = request.args.get('newsItemCount') or 3\n try:\n animal_news = AnimalNews.get_printable_news_items_all_animals(news_item_count)\n return jsonify(message=animal_news), 200\n except Exception as e:\n print(e)\n return jsonify(message='{}'.format(e)), 501", "def recently_modified(request):\n pages = models.Page.all().order('modified').fetch(10)\n return utility.respond(request, 'admin/recently_modified', {'pages': pages})", "def recent(request):\r\n rdict = request.matchdict\r\n params = request.params\r\n\r\n # Make sure we generate a url to feed our rss link.\r\n current_route = request.current_route_url()\r\n\r\n # check for auth related stuff\r\n # are we looking for a specific user\r\n username = rdict.get('username', None)\r\n if username:\r\n username = username.lower()\r\n\r\n # do we have any tags to filter upon\r\n tags = rdict.get('tags', None)\r\n\r\n if isinstance(tags, str):\r\n tags = [tags]\r\n\r\n ret = {\r\n 'username': username,\r\n 'tags': tags,\r\n 'rss_url': current_route.replace('recent', 'rss')\r\n }\r\n\r\n # if we've got url parameters for the page/count then use those to help\r\n # feed the init of the ajax script\r\n ret['count'] = params.get('count') if 'count' in params else RESULTS_MAX\r\n ret['page'] = params.get('page') if 'page' in params else 0\r\n\r\n # Do we have any sorting criteria?\r\n ret['sort'] = params.get('sort') if 'sort' in params else None\r\n\r\n return ret", "def NewArticle(request):\n category_list = Category.objects.all().order_by('created_time')\n tag_list = Tag.objects.all().order_by('created_time')\n GetWebSiteInfo()\n dic = {'category_list':category_list, 'tag_list': tag_list, 'WebSiteInfo': WebSiteInfo}\n return render(request, \"blog/add_article.html\", dic)", "def get_artists_recent_added(session_):\n # artists = session_.query(Artist).order_by(Artist.name.asc()).paginate()\n artists = session_.query(Artist).order_by(Artist.added_at.asc()).all()\n return artists", "def articles(self):\n return self.get_queryset().filter(content_type__model='article').order_by('-articles__published_at')", "def get_queryset(self):\n return Article.objects.filter(pub_date__lte=timezone.now())", "def rt_recently_added():\n channels = api.get_channels()\n if request.method == 'GET':\n return html_helper.channel_form(channels)\n\n if request.method == 'POST':\n selected_channels = request.form.getlist('channel')\n eps_requested = request.form.get('episodes')\n pages_requested = request.form.get('pages')\n\n all_eps = []\n\n for channel in selected_channels:\n episodes = api.get_episode_pages(channel,\n channels[channel],\n pages_requested,\n eps_requested)\n\n all_eps.extend(episodes)\n\n episodes_html = []\n all_eps.sort(key=operator.itemgetter(3), reverse=True)\n for e in all_eps:\n episodes_html.append(html_helper.episode_html(e))\n ra_html = html_helper.recently_added_html(episodes_html)\n\n return ra_html", "def GET_front_recent_posts(self, *a, **kw):\r\n # Server side cache is also invalidated when new article is posted\r\n return self.render_cached('recent-promoted', RecentPromotedArticles, g.side_posts_max_age)", "def _fetch_latest_for_tag(self, tag, today):\n result = []\n url = Fetch163.search_link % urllib2.quote(tag.name.encode('utf8'))\n try:\n resp = urllib2.urlopen(url)\n except urllib2.URLError as e:\n urllib_error(e)\n else:\n doc = eval(resp.read())\n if doc and type(doc) is list:\n if today:\n news_today = self._today_filter(doc, delta=2)\n else:\n news_today = doc\n for d in news_today:\n docid = d.get('docid', '')\n #title = u'%s' % d.get('title', '')\n # the d.get('title') is a unicode string represent by\n # python str, so use unicode-escape to decode it.\n title = d.get('title', '')\n #print type(title)\n news_title = self._trans_title(title)\n if docid and title:\n news_exits = News.objects.filter(\n Q(docid=docid) | Q(title=news_title)\n )\n #print docid, news_title, news_exits\n intro, body, c_num, ptime, pic = self._fetch_news(docid)\n if not news_exits:\n print 'new news', news_title, docid\n news = News()\n news.docid = docid\n news.title = news_title\n news.content = body\n news.tag = tag\n news.comment_num = c_num\n news.list_pic = pic\n news.abstract = intro\n news.update_time = ptime\n news.save()\n import time\n time.sleep(2)\n if news:\n result.append(news)\n else:\n print 'update news', news_title\n n = news_exits[0]\n print 'old:', n.comment_num, 'new:', c_num\n n.comment_num = c_num\n n.save()\n else:\n print 'Fetch news for tag: %s, Error' % tag.name\n\n return result", "def get_queryset(self):\n return Article.objects.filter(\n pub_date__lte=timezone.now()\n ).order_by('-pub_date')[:5]", "def get_top_articles(update=False):\n # use caching to avoid running unnecessary DB queries at each page load\n key = 'top_ten'\n articles = memcache.get(key)\n\n logging.warn('MEMCACHE | Wiki articles %s' % str(articles))\n\n if (articles is None) or (len(articles) == 0) or update:\n # necessary artificial delay when a new article has just been persisted to the datastore\n if update:\n time.sleep(2)\n\n articles = db.GqlQuery('SELECT * FROM Article ORDER BY updated DESC LIMIT 10')\n articles = list(articles)\n memcache.set(key, articles)\n\n logging.warn('DATASTORE | Wiki articles count %s' % str(len(articles)))\n return articles", "def topic_recent(request):\n posts = Post.objects.all().order_by(\"-created\")[:3]\n posts = mk_paginator(request, posts, DJANGO_SIMPLE_FORUM_REPLIES_PER_PAGE)\n # topic = Topic.objects.get(pk=topic_id)\n return render_to_response(\"forum/topic_recent.html\", add_csrf(request, posts=posts), context_instance=RequestContext(request))", "def top_news():\n data = get_top_news()\n return jsonify(data)", "def popular_articles():\n query = \"\"\"SELECT articles.title,count(*) AS total_views FROM articles,log WHERE log.path like concat('/article/',articles.slug)\n group by articles.title order by total_views desc limit 3\"\"\"\n result = get_data(query)\n print(\" 1. The most popular three articles of all time:\")\n print(\"\")\n for record in result :\n print(' ' + '\\\"' + str(record[0]) + '\\\"' + '-' + ' ' + str(record[1]) + ' '+ 'views')\n print(\" \")", "def new_article(self, article: Article):\n if self.view.current_feed is not None and self.view.current_feed.db_id == article.feed_id:\n if self.view.header().sortIndicatorOrder() == qtc.Qt.AscendingOrder:\n operation = operator.gt\n else:\n operation = operator.lt\n section = [\"title\", \"author\", \"updated\"][self.view.header().sortIndicatorSection()]\n\n i = next((i for (i, v) in enumerate(self.articles) if operation(getattr(article, section), getattr(v, section))), len(self.articles))\n self.beginInsertRows(qtc.QModelIndex(), i, i + 1)\n self.articles.insert(i, article)\n self.endInsertRows()", "def GET(self, *args):\n all_news= self.get_all_news()\n all_news.sort( key=lambda n : n['date'], reverse=True)\n if len(args):\n n_last=int(args[0])\n all_news = all_news[:n_last]\n\n return json.dumps(all_news)", "def recent(self, page=None, per_page=None):\r\n url = '{0}/{1}'.format(self.get_url(), 'recent')\r\n params = base.get_params(('page', 'per_page'), locals())\r\n\r\n return http.Request('GET', url, params), parsers.parse_json", "def articles(self):\n articles = Post.objects.live().descendant_of(self)\n articles = articles.order_by('-date')\n\n return articles", "def author_articles(self):\n return ArticlePage.objects.live().filter(author=self).order_by('-date')", "def show_news_list():\r\n\tnews_list = Page.objects.filter(tags='news').order_by('-created')\r\n\treturn {'news_list': news_list}", "def recent():\n # type: () -> None\n data = locs.recents.retrieve()\n for href in data:\n data = locs.get_json(href)\n add_menu_item(\n play_film,\n data.get(\"item\", {}).get(\"title\").title(),\n {\"href\": href},\n locs.get_art(data),\n locs.get_info(data),\n False)\n xbmcplugin.setPluginCategory(plugin.handle, ku.localize(32005)) # Recent\n xbmcplugin.endOfDirectory(plugin.handle)", "def newsList(request):\n\n news_count = New.objects.count() # Pocet vsech zaznamu novinek\n news_list = New.objects.all().order_by(\"date\") # Sort by date ... and only part of list\n # misto vsech zaznamu ziskat jen ty v intervalu start - stop -> API\n\n pictureOfWeek = PhotoOfWeek.objects.last()\n context = {'news_list': news_list, 'news_count': news_count, 'pictureOfWeek': pictureOfWeek}\n return render(request, 'news/newsList.html', context)", "def getLatestStories(self, newest, alreadyReadList):\n\t\turl = \"http://news.ycombinator.com\"\n\t\tif newest == \"newest\":\n\t\t\turl += \"/newest\"\n\t\tsource = self.getSource(url)\n\t\tstories = self.getStories(source, alreadyReadList)\n\t\treturn stories", "def get_news(url):\r\n \r\n # parse RSS feed into list of dictionaries\r\n feed = feedparser.parse(url)\r\n\r\n # no RSS feed articles for url\r\n if len(feed['entries']) == 0:\r\n return []\r\n \r\n # get first ten articles from the RSS feed\r\n news = []\r\n i = 0\r\n while True:\r\n if i == len(feed['entries']) or i > 30:\r\n break\r\n \r\n try:\r\n # get link to article\r\n link = feed[\"entries\"][i][\"link\"]\r\n\r\n # get title of article\r\n title = feed[\"entries\"][i][\"title\"]\r\n \r\n try:\r\n # get raw summary of article\r\n summary_raw = feed[\"entries\"][i][\"summary\"]\r\n \r\n # format summary\r\n summary = \"\"\r\n for c in summary_raw:\r\n if c == \"<\":\r\n summary += \"...\"\r\n break\r\n summary += c\r\n except KeyError as e:\r\n logging.error(\"no summary for RSS feed article: {}\".format(link))\r\n summary = \"read more here...\"\r\n \r\n # get raw date \r\n date_raw = feed[\"entries\"][i][\"published_parsed\"]\r\n \r\n if date_raw is None:\r\n date = feed[\"entries\"][i][\"published\"]\r\n \r\n else:\r\n # format date\r\n year = str(date_raw.tm_year)\r\n months = [\"January\", \"February\", \"March\", \"April\", \"May\", \"June\", \"July\", \"August\", \"September\", \"October\", \"November\", \"December\"]\r\n month = months[date_raw.tm_mon - 1]\r\n day = str(date_raw.tm_mday)\r\n weekdays = [\"Monday\", \"Tuesday\", \"Wednesday\", \"Thursday\", \"Friday\", \"Saturday\", \"Sunday\"]\r\n wday = weekdays[date_raw.tm_wday]\r\n hour = str(date_raw.tm_hour)\r\n hour = \"{:2}\".format(hour).format(' ','0')\r\n min = str(date_raw.tm_min)\r\n min = \"{:2}\".format(min).replace(' ','0')\r\n date = hour + \":\" + min + \" - \" + wday + \" \" + month + \" \" + day + \", \" + year\r\n \r\n # compile entry and append to news list\r\n entry = {\"link\":link, \"title\":title, \"date\":date, \"summary\":summary}\r\n \r\n # sanitize entry\r\n for key in entry:\r\n # apostrophe\r\n entry[key] = entry[key].replace(\"&#39;\", \"'\")\r\n # right single quotation mark\r\n entry[key] = entry[key].replace(\"’\", \"&#8217;\")\r\n # left single quotation mark\r\n entry[key] = entry[key].replace('\"', \"&#8216;\")\r\n # right double quotation mark\r\n entry[key] = entry[key].replace(\"'\", \"&#8221;\")\r\n # left double quotation mark\r\n entry[key] = entry[key].replace(\"'\", \"&#8220;\")\r\n # Weird ampersand formatting\r\n entry[key] = entry[key].replace(\"&amp;\", \"&\")\r\n \r\n # prepare entry for sqlite queries\r\n entry[key] = surround(entry[key])\r\n \r\n # add entry to news list\r\n news.append(entry)\r\n \r\n # max 10 entries\r\n if len(news) == 10:\r\n break\r\n i += 1\r\n \r\n except Exception as e:\r\n logging.error(e)\r\n i += 1\r\n pass\r\n \r\n # success\r\n return news", "def articles():\n \n # Parse through RSS feed of Get Rich Slowly\n feed = feedparser.parse(\"http://www.getrichslowly.org/blog/feed/\")\n \n # Get current username\n username = get_user()\n \n return render_template(\"articles.html\", username=username, feed=feed)", "def get_latest_content():\n\n latest_content = {}\n latest_content['all'] = ContentItem.objects.all().order_by('updated_at')[:4]\n latest_content['ga'] = ContentItem.objects.filter(tags__name='Geeks Abroad').order_by('updated_at')[:4]\n latest_content['gaming'] = ContentItem.objects.filter(tags__name='Gaming').order_by('updated_at')[:4]\n latest_content['osalt'] = ContentItem.objects.filter(tags__name='OS.Alt').order_by('updated_at')[:4]\n latest_content['sqa'] = ContentItem.objects.filter(tags__name='Squirrel Army').order_by('updated_at')[:4]\n\n return latest_content", "def dashboard_content_article_tag_cloud():\n tag_stats = dict()\n past_30 = offset_time_past(30, str=True)\n articles = mongo.db[app.config['ARTICLES_COLLECTION']]\n results = articles.find({'collected': {'$gt': past_30}}, {'_id': 0})\n for result in results:\n for tag in result.get('tags', list()):\n tag_stats[tag] = tag_stats.get(tag, 0) + 1\n tags_sorted = sorted(tag_stats.items(), key=operator.itemgetter(1),\n reverse=True)[:50]\n data = list()\n for item in tags_sorted:\n data.append({'name': item[0], 'weight': item[1]})\n return jsonify(data)", "def articleList():\n articles = get_news(\n 5, since=news.YESTERDAY.strftime(\"%yyyy-%mm-%dd\"), query=\"covid\"\n )\n title_list = []\n desc_list = []\n url_list = []\n image_list = []\n source_list = []\n for art in articles:\n image_list.append(art.image)\n title_list.append(art.title)\n source_list.append(art.source)\n desc_list.append(art.description)\n url_list.append(art.url)\n socketio.emit(\n ARTICLE,\n {\n \"title\": title_list,\n \"desc\": desc_list,\n \"url\": url_list,\n \"img\": image_list,\n \"sources\": source_list,\n },\n )\n return True", "def get_featured_articles(request):\n try:\n count = 1\n if 'count' in request.POST and int(request.POST['count']):\n count = int(request.POST['count'])\n\n newest_list = []\n for article in Article.objects.order_by('-modified')[:count]:\n newest_list.append(article.dump_to_dict())\n\n popular_list = []\n for article in Article.objects.order_by('-views')[:count]:\n popular_list.append(article.dump_to_dict())\n\n return format_ajax_response(True, \"Featured articles retrieved successfully.\", {'newest': newest_list,'popular': popular_list})\n except Exception as ex:\n logger.error(\"Failed to get_featured_articles: %s\" % ex)\n return format_ajax_response(False, \"There was an error retrieving the featured articles.\")", "def get_news(self):\n if self.api_key_entry.get() == \"\":\n return None\n api = nac(api_key=self.api_key_entry.get())\n now = datetime.datetime.utcnow()\n two_weeks = (now-datetime.timedelta(days=14))\n #today = now.strftime()\n query = \"\"\n for cat in self.sorted_categories():\n query += f\"{cat},\"\n search = api.get_top_headlines(q=query,\n sources=\"bbc-news,the-verge\",\n language=\"en\")\n news = \"\"\n for article in search[\"articles\"]:\n news += f\"{search['articles'][article]['title']}\\n\"\n self.news_box.delete('1.0', tk.END)\n self.news_box.insert('1.0', news)", "def NewsArticles():\n health_articles = get_articles('health')\n education_articles = get_articles('technology')\n return render_template('articles.html',health=health_articles, tech =education_articles)", "def recieve_new_articles(self, articles: list[Article], feed_id: int) -> None:\n pass", "def _get_new_article(pages):\n date = arrow.now().replace(days=-30).format('YYYY-MM-DD')\n pages = [p for p in pages if p.created > date]\n\n skips = [p for p in pages if 'scp' in p.tags and p.rating >= 40]\n tales = [p for p in pages if 'tale' in p.tags and p.rating >= 20]\n goi = [p for p in pages if 'goi-format' in p.tags and p.rating >= 20]\n pages = skips + tales + goi\n\n return random.choice(pages) if pages else None", "def newsfeed(request):\n article_list = Article.objects.order_by('published_date')\n context = {'article_list': article_list}\n return render(request, 'sacms/newsfeed.html', context)", "def get_popular_articles():\n db = psycopg2.connect(database=DBNAME)\n c = db.cursor()\n query_popular_articles = \"\"\"\n SELECT art.title, COUNT(lg.id) as views\n FROM articles as art\n JOIN log as lg\n ON art.slug = substring(lg.path,10)\n AND lg.status = '200 OK'\n GROUP BY art.title\n ORDER BY views desc\n LIMIT 3; \"\"\"\n c.execute(query_popular_articles)\n articles = from_db_cursor(c)\n db.close()\n return articles", "def get_top_news_and_the_rest(self):\n queryset = self.news.order_by('-marked', '-publication_date')\n return queryset.first(), queryset[1:]", "def feed_entries(self):\n date_format = \"%Y-%m-%dT%H:%M:%SZ\"\n entries = self.mapper.list_entries(limit=10)\n if entries:\n updated = max([e.updated for e in entries]).strftime(date_format)\n else:\n updated = datetime.utcnow().strftime(date_format)\n return {\"entries\": entries, \"updated\": updated}", "def articles(self):\r\n return articles.Articles(self)", "def execute_news(date, connection, url, logger):\n cursor = connection.cursor()\n if url:\n logger.info(f\"Retrieves news for the selected url ({url}) from database...\")\n cursor.execute('SELECT title, link, full_date, source, description, image, url FROM news WHERE date=:date '\n 'and url=:url', {'date': date, 'url': url})\n else:\n cursor.execute('SELECT title, link, full_date, source, description, image, url FROM news WHERE date=:date',\n {'date': date})\n records = cursor.fetchall()\n articles = []\n for title, link, full_date, source, description, image, url in records:\n articles.append(Article(title, link, full_date, source, description, image))\n return articles", "def articles():\n entries = []\n cur = g.db.execute(\n \"\"\"\n SELECT entries.location FROM categories\n INNER JOIN entries ON\n entries.slug = categories.slug AND\n entries.published = categories.published\n WHERE categories.category='{category}'\n ORDER BY entries.published DESC\n \"\"\".format(category='article'))\n\n for (row,) in cur.fetchall():\n if os.path.exists(row+\".md\"):\n entries.append(file_parser(row+\".md\"))\n return render_template('blog_entries.html', entries=entries)", "def nfldotcomnews(self, irc, msg, args):\n \n url = self._b64decode('aHR0cDovL3MzLmFtYXpvbmF3cy5jb20vbmZsZ2MvYWxsX25ld3NMaXN0Lmpz')\n \n try:\n req = urllib2.Request(url)\n html = (urllib2.urlopen(req)).read()\n except:\n irc.reply(\"Failed to fetch: %s\" % url)\n return\n \n try:\n jsondata = json.loads(html)['content']\n except:\n irc.reply(\"Failed to parse article json from: %s\" % url)\n return\n \n for article in jsondata[0:6]:\n title = article.get('title', None)\n desc = article.get('description', None)\n link = article.get('linkURL', None)\n date = article.get('date_ago', None)\n \n output = \"{0} - {1}\".format(ircutils.bold(title), self._shortenUrl(link))\n irc.reply(output)", "def getUpdates(self):\n # execute the query\n ret = self._er.execQuery(self)\n\n if ret and ret.get(\"recentActivity\") and ret[\"recentActivity\"].get(\"articles\"):\n # return the latest articles\n return ret[\"recentActivity\"][\"articles\"][\"activity\"]\n # or empty\n return []", "def all_news(request):\n\n all_news = News.objects.all().order_by(\"-date_added\")\n context = {\n 'news': all_news,\n 'show_without_bag': True\n }\n return render(request, 'news/news.html', context)", "def recent():\n # type: () -> None\n data = iwm.get_recent()\n for item in data:\n details = iwm.get_info(item[\"uri\"])\n add_menu_item(\n play_film,\n details[\"title\"],\n args={\"href\": \"/record/{}\".format(item[\"uri\"].split(\"/\")[-1:][0])},\n info=details[\"info\"],\n art=details[\"art\"],\n directory=False)\n xbmcplugin.setPluginCategory(plugin.handle, ku.localize(32005)) # Recent\n xbmcplugin.endOfDirectory(plugin.handle)", "def get_albums_recent_added(session_):\n artists = session_.query(Album).order_by(Album.added_at.desc()).all()\n return artists", "def _get_article_identifiers(self, feed_id: int) -> Dict[str, datetime]:\n articles = {}\n with self._sqlite_connection:\n for article in self._sqlite_connection.execute('''SELECT identifier, updated FROM articles WHERE feed_id = ?''', [feed_id]):\n articles[article['identifier']] = datetime.fromtimestamp(article['updated'], timezone.utc)\n return articles", "def get_saved_news(user, origin=\"saved\"):\n \n saved_news_feed = []\n \n for entry in user.saved_news:\n article = entry.article\n print(article.article_id)\n article_item = {\n \"title\" : article.title,\n \"image\" : article.image,\n \"description\" : article.description,\n \"content\" : article.content,\n \"pub_date\" : article.pub_date.strftime(\"%m/%d/%Y\"),\n \"news_url\" : article.news_url,\n \"saved_news_id\" : entry.id,\n \"origin\" : origin,\n \"note\" : entry.notes,\n \"article_id\" : article.article_id\n }\n saved_news_feed.append(article_item)\n print(saved_news_feed)\n return saved_news_feed", "def get(self):\n return {\"newest_msg\": newest_msg()}", "def detail(request, article_id):\n try:\n article = Article.objects.get(pk=article_id)\n except Member.DoesNotExist:\n raise Http404(\"Article does not exist\")\n #article_list = Article.objects.order_by('-released_at')[:5]\n login = request.user and request.user.is_authenticated()\n article_list = get_article_list('-released_at',login)[:5]\n auth_form = AuthenticationForm(None, request.POST or None)\n return render(request, 'app/article_detail.html', { \n 'title':'ニュースの詳細',\n 'year':datetime.now().year,\n 'articles':article_list,\n 'blogs':EntryView.get_entry_list('-posted_at',-1, -1 if not login else request.user.pk )[:5],\n 'article': article,\n 'auth_form':auth_form,\n 'current_user':request.user,\n }\n )", "def recent(self, page_num):\n page_count = db.get_page_count()\n if not (0 < page_num <= page_count):\n return web.notfound('No such page.')\n return render.recent(\n page_num=page_num,\n page_count=page_count,\n pastes=db.get_paste_list(page_num))", "def article_list(request):\n try:\n logger.info('Calling the api' + APIURL + '/articles/?format=json&limit=' + str(COUNT))\n response = requests.get(APIURL + '/articles/?format=json&limit=' + str(COUNT))\n parser = json.loads(response.content)\n preview_article = random_article(parser)\n next_read = read_next()\n return render(request, 'article/article_list.html', {'articlelist':parser, 'preview_article': preview_article, 'next_read': next_read})\n except:\n logger.error('Calling the api error in article_list')\n raise Http404(\"Article does not exist\")", "def blog():\n \n articles = mongo.db.articles.find().sort('date',pymongo.DESCENDING)\n return render_template('pages/blog.html',\n title='Blog', \n articles=articles,\n legend='Read the latest articles'\n )", "def popular_authors() :\n query = \"\"\"SELECT authors.name,count(*) AS total_views FROM authors,articles,log WHERE log.path like concat ('/article/',articles.slug)\n AND articles.author=authors.id group by authors.name order by total_views desc\"\"\"\n result = get_data(query)\n print(\" 2. The most popular articles authors of all time:\")\n print(\"\")\n for record in result :\n print(' ' +' ' + str(record[0]) + ' -' + ' ' + str(record[1]) + ' ' +'views')\n print(\" \")", "def home(request):\n\n posts = Post.objects.filter(published=True)\n latest = 0\n if posts:\n latest = Post.objects.latest('updated').unix_time()\n\n return render(request, 'posts/home.html', {'posts':posts, 'latest':latest})", "def get_news() -> None:\r\n api_key = get_api_key()\r\n country = 'gb'\r\n url = 'https://newsapi.org/v2/top-headlines?country={}&apiKey={}' \\\r\n .format(country, api_key)\r\n new_news = requests.get(url).json()\r\n with open('news.json', 'r') as news_file:\r\n try:\r\n old_news = json.load(news_file)\r\n except Exception as error:\r\n log_warning(error)\r\n # Checks if the news is new or the same as the news already stored\r\n # in news.json.\r\n for i in range(5):\r\n if old_news['articles'][i] != new_news['articles'][i]:\r\n news_notification = ({'timestamp': \\\r\n time.strftime('%H:%M:%S'),\r\n 'type': 'News',\r\n 'title': new_news \\\r\n ['articles'][i]['title'],\r\n 'description': ''})\r\n news_log = ({'timestamp': time.strftime('%H:%M:%S'),\r\n 'type': 'news',\r\n 'description': 'New news articles' \\\r\n + new_news['articles'][i]['title'],\r\n 'error': ''})\r\n new_notification(news_notification)\r\n log_info(news_log)\r\n # RuntimeError caused when text to speech is already\r\n # currently playing something else.\r\n try:\r\n tts('New news story.' \\\r\n + new_news['articles'][i]['title'])\r\n except RuntimeError:\r\n log_error(RuntimeError)\r\n\r\n with open('news.json', 'w') as news_file:\r\n json.dump(new_news, news_file, indent=2)\r\n # Start the timer to run this function every 60 seconds.\r\n Timer(60, get_news).start()", "def get_hots_articles(cls, num):\n return cls.objects.values('id', 'title', 'view_times', 'update_time', 'author').\\\n filter(status=0).order_by('-view_times'\n )[:num]", "def list_articles():\n\n return template(\"index\", articles=get_articles())", "def news():\n\n # ensure parameters are present\n # geo = request.args.get(\"geo\")\n geo = '95060'\n if not geo:\n raise RuntimeError(\"missing geo\")\n\n # lookup articles and store them as JSON array\n article_list = lookup(geo)\n\n # TODO\n print(article_list)\n news = jsonify(article_list) \n print(news)\n # return render_template(\"index.html\")\n return article_list", "def todo_added(name, description):", "def get_information(article_link):\n\n if \"video\" in article_link or \"/apps/\" in article_link or \"checknews\" in\\\n article_link or not re.search(r\"\\d\\d\\d\\d/\\d\\d/\\d\\d\", article_link):\n return None\n\n else:\n\n date_article = re.search(r\"\\d{4}/\\d{2}/\\d{2}\", article_link)[0]\n date_article = date.datetime.strptime(date_article, \"%Y/%m/%d\")\n\n diff_date = date.datetime.now() - date_article\n\n if diff_date.days > 7:\n return None\n\n else:\n req = requests.get(article_link)\n req.encoding = \"utf-8\"\n data = req.text\n soup = BeautifulSoup(data, \"lxml\")\n\n if soup.find(\n \"div\",\n class_=\"direct-headband\") or article_link != req.url:\n return None\n else:\n balise_title = soup.find(\"h1\")\n balise_title = balise_title.get_text()\n balise_title = re.sub(r\"\\s\\s+\", \"\", balise_title)\n\n newspaper = \"Liberation\"\n title = unidecode.unidecode(balise_title)\n\n author = \"\"\n for span in soup.find_all('span'):\n if span.get(\"class\") == ['author']:\n if(span.a):\n author = span.a.string\n if span.get(\"class\") == ['date']:\n if(span.time):\n date_p = date.datetime.strptime(\n span.time.get(\"datetime\"), \"%Y-%m-%dT\" +\n \"%H:%M:%S\").date()\n date_p = date_p.strftime(\"%Y-%m-%d\")\n print(date_p)\n\n content = \"\"\n for div in soup.find_all('div'):\n for p in div.find_all('p'):\n content += p.get_text() + \" \"\n content = re.sub(\"<>\", \"\", content)\n content = unidecode.unidecode(content)\n\n new_article = utils.recovery_article(\n title, newspaper, [author], date_p, content, \" \")\n\n return new_article", "def articles(self):\n return articles.Articles(self)", "def __init__(self, title):\n\n self.__recent = []\n self.__lasturl = ''", "def get_article_info(elem: str) -> ArticleInfo:\n\n headers = {\n 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) '\n 'Chrome/53.0.2785.143 Safari/537.36 '\n }\n\n time.sleep(60)\n print(f\"Collecting info from link {elem}...\")\n\n html_content = requests.get(elem, headers=headers).content\n soup = BeautifulSoup(html_content, 'lxml')\n\n base = 'div.grid-container div.single-post-grid div'\n\n title_query = f'{base} div.post-header div.post-header-container div.post-header-title div.the_title'\n\n title = soup.select_one(title_query).get_text()\n\n text_query = f'{base} div.post-inside div.post-content p'\n text_parts = []\n\n for elem1 in soup.select(text_query):\n temp_text = elem1.get_text().replace(\n '<strong>', ''\n ).replace(\n '</strong>', ''\n )\n\n text_parts.append(temp_text)\n\n full_text = ' '.join(text_parts)\n\n tags_query = f'{base} div.post-inside div.post-content div.tags a'\n tags = []\n\n for elem1 in soup.select(tags_query):\n tags.append(elem1.get_text())\n\n article = ArticleInfo(\n url=elem,\n title=title,\n text=full_text,\n keywords=tags\n )\n\n print(article)\n\n return article", "def latestEntriesRss():\n now = datetime.now()\n latestEntries = session.query(Pokemon).order_by(desc(Pokemon.date_entered))\\\n .limit(20)\n rss = render_template('rss.xml', lastBuildDate=now, entries=latestEntries)\n response = make_response(rss)\n response.headers[\"Content-Type\"] = \"application/xml\"\n return response", "def EditArticle(request, article_id):\n category_list = Category.objects.all().order_by('created_time')\n tag_list = Tag.objects.all().order_by('created_time')\n article = Article.objects.get(id=article_id)\n\n GetWebSiteInfo()\n dic = {'category_list':category_list, 'tag_list': tag_list, 'article': article, 'WebSiteInfo': WebSiteInfo}\n return render(request, \"blog/edit_article.html\", dic)", "def getNewestStories(self):\n source = self.getSource(\"http://news.ycombinator.com/newest\")\n stories = self.getStories(source)\n return stories", "def summary(self, *args, **kwargs):\n article = self.get_object()\n summary_data = self.get_serializer(article).data\n\n keywords = summary_data['keywords']\n related_articles = \\\n Article.objects.filter(Q(keywords__contains=keywords[:1])\n | Q(keywords__contains=keywords[1:2])\n | Q(keywords__contains=keywords[2:3])) \\\n .order_by('-publish_time')[:11] \\\n .values('identifier', 'title', 'images', 'site_name', 'domain', 'publish_time')\n\n related_articles = [related for related in list(related_articles)\n if related['identifier'] != article.identifier]\n\n summary_data['related'] = related_articles\n\n return Response(summary_data)", "def get_article(self):\n return self.article", "def historical():\n\n return {\n 'page': 'historical',\n }", "def refresh_added_date(self) -> None:\n self.date_added = datetime.now()", "def articles(self):\r\n return Articles(self)", "def latest_blog_posts(self, request, *args, **kwargs):\n context = self.get_context(request, *args, **kwargs)\n context[\"latest_posts\"] = MyblogDetailPage.objects.live().public()[:1] \n return render(request, \"myblog/latest_posts.html\", context)", "def get_featured_content():\n\n return FeatureHistory.objects.filter(featured=True).order_by('updated_at')[:3]", "async def get_news(q: str = None):\n\treturn aggregate_news(q)", "def news()->str:#return array[news desc,news link]\n event_log(\"retrieve news data....\",\"\")\n c = 0\n location = read_json(\"news_api\")[0]\n main_url = \"https://newsapi.org/v2/top-headlines?country=\"+location+\"&apiKey=\"+read_json(\"news_api\")[1]+\"\"#add a country selection optin via json\n page = requests.get(main_url).json()\n article = page[\"articles\"]\n news_result = []\n for data in article:\n news_result.append([data[\"title\"],str(data[\"url\"]).replace('\"',\" \")])#exctracts the wanted data from api\n if c == 5:#add this to json file so scalibility\n break\n c+=1\n return news_result", "def getPopularArticles():\n db = psycopg2.connect(\"dbname=news\")\n c = db.cursor()\n c.execute(\" select count (*) as views, title from articles \"\n + \"left join \"\n + \"log on concat('/article/', articles.slug) = log.path \"\n + \"group by title order by views desc limit 3\")\n views = c.fetchall()\n db.close()\n return views", "def tips():\n category = list(mongo.db.tips.find().sort(\"tip_date\", -1))\n return render_template(\"tips.html\", category=category)", "def most_viewed_articles():\n query = \"\"\"\n SELECT articles.title, COUNT(*) AS views\n FROM articles\n JOIN log\n ON log.path = '/article/' || articles.slug\n WHERE log.status ='200 OK'\n GROUP BY articles.title ORDER BY views DESC LIMIT 3;\n \"\"\"\n results = psql_connection(query)\n\n print(\"Most viewed articles:\")\n for result in results:\n print '{article} - {count} views'.format(\n article=result[0], count=result[1])", "def get_most_popular_articles():\n\tdb = psycopg2.connect(database=DBNAME)\n\tc = db.cursor()\n\tc.execute(\"select t2.title, count(*) as total from log as t1,articles as t2 where t1.path=concat('/article/',t2.slug) group by t2.title order by total desc limit 3 ;\")\n\tdata = c.fetchall()\n\tdb.close()\n\treturn data", "def GET_side_posts(self, *a, **kw):\r\n # Server side cache is also invalidated when new article is posted\r\n return self.render_cached('side-posts', RecentArticles, g.side_posts_max_age)", "def get_one_news(self): # pylint: disable=no-self-use\n return operations.get_one_news()", "def articleAlreadyStored(self, curr_url):\n\n table = self.dynamodb.Table('Articles-Table') \n\n try:\n response = table.get_item(\n Key={\n 'article-url': curr_url\n }\n )\n except Exception as e:\n log_line = \"Failed to access DynamoDB table: Articles-Table with following exception:\\n\"\n log_line += str(e)\n self.logger.writeToLog(log_line, False)\n log_line = \"Exited prematurely at: \"\n self.logger.writeToLog(log_line, True)\n exit(0)\n else:\n try:\n item = response['Item']\n except:\n return False # no item means article doesn't exist in database\n else:\n self.curr_url_stored_time = datetime.datetime.strptime(item[\"most-recent-update\"], \"%d/%m/%Y, %H:%M:%S\")\n return True", "def recent(perpage = 500):\n photos = request(\"flickr.photos.getRecent\", {\n \"per_page\": perpage, \n \"extras\": all_extras})\n for photo in photos.getiterator(\"photo\"):\n yield Photo.fromapi(photo.attrib)", "def recent_comic_titles():\r\n\treturn [comic.title for comic in Comic.objects.all().order_by('-created_on')[0:10]]", "def list(request):\n assert isinstance(request, HttpRequest)\n login = request.user and request.user.is_authenticated()\n article_list = get_article_list('-released_at',login)\n page_no = request.GET.get('page')\n page = _get_page(article_list, page_no, ARTICLER_LIST_PAGE_IN_COUNT )\n auth_form = AuthenticationForm(None, request.POST or None)\n return render(\n request,\n 'app/article_list.html',\n {\n 'title':'ニュース一覧',\n 'year':datetime.now().year,\n 'articles':article_list[:5],\n 'blogs':EntryView.get_entry_list('-posted_at',-1, -1 if not login else request.user.pk )[:5],\n 'contents':range(1,6),\n 'article_list':page.object_list,\n 'auth_form':auth_form,\n 'current_user':request.user,\n 'page' : page,\n 'current_page':request.path #'article_list'\n }\n )", "def main():\n print get_latest_data()", "def get_monitor_details():\n monitor_id = paranoid_clean(request.args.get('id'))\n monitors = mongo.db[app.config['MONITORS_COLLECTION']]\n monitor = monitors.find_one({'hashed': monitor_id}, {'_id': 0})\n if not monitor:\n return jsonify({'success': False, 'error': 'Monitor was not found.'})\n articles = mongo.db[app.config['ARTICLES_COLLECTION']]\n link = monitor['metadata']['rss_link']\n articles = list(articles.find({'feed_source': link}, {'_id': 0}))\n for idx, item in enumerate(articles):\n articles[idx]['title'] = html.unescape(item['title'])\n articles[idx]['date'] = item['collected'][:10]\n articles.sort(key=lambda x: x['collected'], reverse=True)\n return jsonify({'success': True, 'monitor': monitor, 'articles': articles})", "def get_list_articles(burl, max_list, section_header, category):\n ret = ''\n doc_cat = category\n title = ''\n uid = 0\n l_latest_articles = section_header\n \n ret = '<span class=\"sectiont\"><i class=\"fas fa-file-alt\"></i>&nbsp;'+\\\n l_latest_articles +'</span>'\n \n connection = pymysql.connect(host=DB_SRV,\n user=DB_USR,\n password=DB_PWD,\n db=DB_NAME,\n charset='utf8mb4',\n cursorclass=pymysql.cursors.DictCursor)\n cursor = connection.cursor(pymysql.cursors.SSCursor)\n sql = 'SELECT uid, title, '+\\\n '(SELECT ROUND((UNIX_TIMESTAMP() - UNIX_TIMESTAMP(date)) / 60) ) AS elapsed_time '+\\\n 'FROM documents WHERE category LIKE \"%'+ str(doc_cat) +'%\" '+\\\n 'ORDER BY date DESC LIMIT ' + str(max_list)\n cursor.execute(sql)\n res = cursor.fetchall()\n for row in res:\n uid = row[0]\n title = row[1]\n article_date = ''\n if category == 'article':\n article_date = get_elapsed_time(row[2])\n ret = ret +\\\n '<div class=\"col-lg-12 col-md-12 col-sm-12 col-xs-12\" '+\\\n 'style=\"border-top:0.5px; border-top-style: dotted; text-align: left;\">'+\\\n '<i class=\"fas fa-file-alt\"></i> '+\\\n '&nbsp;'+\\\n '<strong>'+\\\n '<a href=\"'+ str(burl) +'doc/?uid='+ str(uid) +'\" target=\"_blank\">'+\\\n '<span style=\"'+\\\n theme_return_this('color:black;', 'color:#00ffff;') +' \">'+\\\n str(article_date) +'</span>&nbsp;'+\\\n str(title) +'</a>'+\\\n '</strong>'+\\\n '</div>'\n cursor.close()\n connection.close()\n return ret", "def news():\n mesosite = iemdb.connect('mesosite', bypass=True)\n mcursor = mesosite.cursor(cursor_factory=psycopg2.extras.DictCursor)\n # Last dailyb delivery\n lastts = mx.DateTime.now() + mx.DateTime.RelativeDateTime(hour=11, days=-1)\n mcursor.execute(\"\"\"\n SELECT *, to_char(entered, 'DD Mon HH:MI AM') as nicedate \n from news WHERE entered > '%s' \n ORDER by entered DESC\"\"\" % (\n lastts.strftime(\"%Y-%m-%d %H:%M\"),) )\n\n textfmt = \"\"\"\n +----------------------------------------------\n | Title : %(title)s\n | Date : %(nicedate)s\n | Author: %(author)s\n | URL : %(url)s\n +----------------------------------------------\n\n%(body)s\n\n\"\"\"\n htmlfmt = \"\"\"\n<hr />\n<br /><strong>Title:</strong> <a href=\"http://mesonet.agron.iastate.edu/onsite/news.phtml?id=%(id)s\">%(title)s</a>\n<br /><strong>Date:</strong> %(nicedate)s\n<br /><strong>Author:</strong> %(author)s\n<br /><a href=\"%(url)s\">link</a>\n\n<p>%(body)s\n\n\"\"\"\n txt = \"> News\\n\"\n html = \"<h3>News</h3>\"\n\n for row in mcursor:\n txt += textfmt % row\n html += htmlfmt % row\n if mcursor.rowcount == 0:\n txt += \"\\n No news is good news\\n\\n\"\n html += \"<strong>No news is good news</strong>\"\n\n return txt, html", "def fetch_article_list(self, url):\n print(url)\n\n r = requests.get(url, headers=headers, timeout=10)\n html = r.text\n time.sleep(1)\n\n if r.status_code is not 200:\n print('Server dinied. Status:[%s].'%r.status_code)\n return\n\n # local data test\n #with open('./dataset/sina-blog-list.html', 'r') as f:\n # html = f.read()\n\n #print(html)\n\n soup = BeautifulSoup(html, 'html5lib')\n tags = soup.select('div[class=articleList] > div[class~=articleCell] > p > span[class=atc_title] > a')\n\n for t in tags:\n print('Appened: '+t['href'])\n self.article_urls.append(t['href'])\n\n # Get the url of next blog-list page\n nxpage = soup.select('div[class=SG_page] > ul > li[class=SG_pgnext] > a')\n if len(nxpage) > 0:\n #print ('Next list page: '+nxpage[0]['href'])\n self.fetch_article_list(nxpage[0]['href'])\n else:\n print('Have reached to the botom of blog lists.')\n\n\n # backup lists to local file\n with open(self.path+'/blog-lists.txt', 'w') as f:\n f.write('\\n'.join(self.article_urls))", "def get_top_articles(\n limit: int = 5,\n date: int = int(datetime.now().strftime(\"%Y%m%d\"))\n):\n\n res = articles_db.get_top_articles_mongo(\n articles,\n limit,\n date\n )\n\n return res", "def newsfeed_en(request):\n article_list = Article.objects.order_by('published_date')\n context = {'article_list': article_list}\n return render(request, 'sacms/newsfeed_en.html', context)", "def post_get_recent(requst, limit):\n if requst.method == 'GET':\n recent_posts = Post.objects.order_by('-created_at')[:int(limit)]\n TopicNestedSerializer.Meta.depth = 1\n PostNestedSerializer.Meta.depth = 1\n serializer = PostNestedSerializer(recent_posts, many=True)\n return Response(serializer.data)", "def most_popular_articles():\n print '1. The most popular articles are...'\n return (\"\"\"SELECT articles.title, COUNT(*) as num FROM articles, log\"\"\"\n \"\"\" WHERE SUBSTRING (log.path FROM 10) = articles.slug and\"\"\"\n \"\"\" log.path != '/' Group By articles.title ORDER By num\"\"\"\n \"\"\" DESC LIMIT 3;\"\"\")", "def history():", "def get(self):\n return GlobalNews.retrieve()" ]
[ "0.686497", "0.62624764", "0.6229607", "0.6219513", "0.61116755", "0.60870814", "0.6046967", "0.60119355", "0.5992242", "0.5978869", "0.5909099", "0.58944285", "0.58672863", "0.58509743", "0.5836655", "0.5833346", "0.578833", "0.5772408", "0.5724148", "0.5714202", "0.57078743", "0.5703649", "0.57030153", "0.57008636", "0.56964386", "0.56841564", "0.56690747", "0.5668221", "0.56477165", "0.56293565", "0.56133854", "0.56043607", "0.55806255", "0.5574984", "0.5561733", "0.55591196", "0.55571747", "0.5554493", "0.55539954", "0.55527455", "0.5544132", "0.5532976", "0.5517745", "0.5497737", "0.54958755", "0.5488324", "0.5471742", "0.54505956", "0.5445047", "0.54356897", "0.5425378", "0.54242224", "0.5415464", "0.54013026", "0.53992635", "0.53934306", "0.53903013", "0.5387479", "0.53748846", "0.5372913", "0.53693944", "0.53627986", "0.5358195", "0.5355654", "0.5354886", "0.535413", "0.5351436", "0.5350565", "0.5336574", "0.5335588", "0.5334281", "0.5332345", "0.5328307", "0.5323016", "0.53203046", "0.53161424", "0.53149325", "0.53128177", "0.53001326", "0.52934355", "0.5283163", "0.52804875", "0.52715826", "0.52685237", "0.52672076", "0.52668893", "0.52611834", "0.5260004", "0.5253261", "0.5252669", "0.5239091", "0.5237252", "0.5233133", "0.5232247", "0.52285665", "0.52242553", "0.52205825", "0.52185345", "0.52176577", "0.5217228", "0.5210438" ]
0.0
-1
Get the latest new or updated events articles Event Registry.
def getUpdates(self): # execute the query ret = self._er.execQuery(self) if ret and ret.get("recentActivity") and ret["recentActivity"].get("articles"): # return the latest articles return ret["recentActivity"]["articles"]["activity"] # or empty return []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_events():\n url = app.config['EVENTS_ENDPOINT']\n response = requests.get(url, params={})\n if response.status_code == 200:\n return parse_events(response.json())\n raise RuntimeError('Error in retrieving events.')", "def _get_persistent_events(self) -> Dict[uuid.UUID, CronEvent]:\n if not self.storage.contains(StateHandler.EVENTS_ENTRY):\n self.storage.put(StateHandler.EVENTS_ENTRY, {})\n return self.storage.get(StateHandler.EVENTS_ENTRY)", "def get_events(self):\n return self.events", "def events(self):\r\n return resources.Events(self)", "def get_last_events(self):\n\n events = self._last_events\n self._last_events = list()\n return events", "def get_events(self):\r\n database = main.connect_to_cloudsql()\r\n cursor = database.cursor()\r\n\r\n query = \"\"\"\r\n SELECT DISTINCT E.eid, E1.ename, E1.description,\r\n E.category, E1.start_date, E1.end_date, E1.num_cap,\r\n E1.num_attending, L.lname, L.address_1, E.tag, L.lat, L.lon\r\n FROM {}.EventTags AS E, {}.UserTags AS U, {}.Events as E1, {}.Locations as L\r\n WHERE U.username='{}' AND\r\n E.tag = U.tag AND\r\n E1.eid = E.eid AND\r\n E1.lid = L.lid AND\r\n E1.start_date >= {}\r\n ORDER by E1.start_date\r\n \"\"\".format(\r\n ENV_DB,\r\n ENV_DB,\r\n ENV_DB,\r\n ENV_DB,\r\n self.user.username,\r\n str(datetime.date.today())\r\n )\r\n\r\n cursor.execute(query)\r\n data = cursor.fetchall()\r\n database.close()\r\n\r\n return [i for i in data]", "def updateEvents(self):\n # Update calendar data\n d_start = datetime.datetime.today()\n d_end = d_start + datetime.timedelta(self.delta_days)\n results = self.cal_cal.date_search(d_start, d_end)\n\n # Flush the events dict\n self.events = []\n # Add each events\n for event in results:\n # Format the title of the event\n str_title = event.instance.vevent.summary.value\n if len(str_title) > 20:\n str_title = str_title[:17] + \"...\"\n # Format the date of the event\n vdate = event.instance.vevent.dtstart.value\n d = datetime.datetime.strptime(\n vdate.strftime(\"%d %m %Y\"), \"%d %m %Y\")\n str_date = \"%s %d %s\" % (\n self.days_french[d.weekday()],\n d.day,\n self.months_french[d.month -1])\n # Format the date gap\n gap = 1 + (d - d_start).days\n # Save the event\n self.events.append((str_title, str_date, gap))", "def get_events(self):\n raise NotImplementedError", "def get_events(self):\n raise NotImplementedError", "def read_events():\n service = setup_google_calendar()\n dict = {}\n # Call the Calendar API\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\n print('Getting the upcoming 10 events')\n events_result = service.events().list(calendarId='primary', timeMin=now,\n maxResults=10, singleEvents=True,\n orderBy='startTime').execute()\n events = events_result.get('items', [])\n\n if not events:\n print('No upcoming events found.')\n i = 0\n for event in events:\n start = event['start'].get('dateTime', event['start'].get('date'))\n print(start, event['summary'])\n dict[i] = (start, event['summary'])\n i += 1\n return dict", "def getEvent(self):\n year, month, day = self.date\n event = Event()\n event.add(\"summary\", \"%s release\" % (self.dict[\"name\"]))\n event.add(\"uid\", \"http://www.freebase.com/view/guid/%s\" % (self.dict['guid'][1:]))\n event.add(\"dtstart\", \"%04d%02d%02d\" % (year,month,day), encode=0)\n return event", "def _get_events(self):\n self.cache = []\n\n # Test if we have event table\n with datascope.closing(datascope.dbopen(self.db, 'r')) as db:\n dbtable = db.lookup(table='event')\n if dbtable.query(datascope.dbTABLE_PRESENT):\n steps = ['dbopen event']\n steps.extend(['dbjoin origin'])\n steps.extend(['dbsubset origin.orid != NULL'])\n steps.extend(['dbsubset origin.orid == prefor'])\n fields = ['evid']\n else:\n steps = ['dbopen origin']\n steps.extend(['dbsubset orid != NULL'])\n fields = []\n\n fields.extend(['orid','time','lat','lon','depth','auth','nass',\n 'ndef','review'])\n\n for v in extract_from_db(self.db, steps, fields, self.db_subset):\n if not 'evid' in v:\n v['evid'] = v['orid']\n\n self.logging.debug( \"Events(): new event #%s\" % v['evid'] )\n\n v['allmags'] = []\n v['magnitude'] = '-'\n v['maglddate'] = 0\n v['srname'] = '-'\n v['grname'] = '-'\n v['time'] = parse_sta_time(v['time'])\n v['strtime'] = readable_time(v['time'], self.timeformat, self.timezone)\n\n try:\n v['srname'] = stock.srname(v['lat'],v['lon'])\n except Exception,e:\n warninig('Problems with srname for orid %s: %s' % (v['orid'],\n v['lat'],v['lon'],e) )\n\n try:\n v['grname'] = stock.grname(v['lat'],v['lon'])\n except Exception,e:\n warninig('Problems with grname for orid %s: %s' % (v['orid'],\n v['lat'], v['lon'],e) )\n\n orid = v['orid']\n if orid in self.mags:\n for o in self.mags[orid]:\n v['allmags'].append(self.mags[orid][o])\n if self.mags[orid][o]['lddate'] > v['maglddate']:\n v['magnitude'] = self.mags[orid][o]['strmag']\n v['maglddate'] = self.mags[orid][o]['lddate']\n\n\n self.cache.append( v )", "def get_registry(self):\n response = {}\n delete_keys = []\n for heart_beat in self._registry:\n key = heart_beat.tag\n response[key] = {\n 'url': str(heart_beat)\n }\n last_seen = self._registry[heart_beat]\n now = time.time()\n ttl = (now - last_seen - self.age)\n if ttl > Registry.TTL_DELETE_ENTRY:\n # response[key]['status'] = Registry.LABELS[Registry.TTL_DELETE_ENTRY]\n delete_keys.append(heart_beat) # once done with this loop remove expired entries.\n elif ttl > Registry.TTL_OFFLINE:\n response[key]['status'] = Registry.LABELS[Registry.TTL_OFFLINE]\n elif ttl > Registry.TTL_WARNING:\n response[key]['status'] = Registry.LABELS[Registry.TTL_WARNING]\n else:\n response[key]['status'] = Registry.LABELS[Registry.TTL_ALIVE]\n\n for heart_beat in delete_keys:\n del self._registry[heart_beat]\n return response", "async def get_events(self) -> list[Event]:\n log.debug(\"Discovering events in branding repository.\")\n\n try:\n event_directories = await self.fetch_directory(\"events\", types=(\"dir\",)) # Skip files.\n except Exception:\n log.exception(\"Failed to fetch 'events' directory.\")\n return []\n\n instances: list[Event] = []\n\n for event_directory in event_directories.values():\n log.trace(f\"Attempting to construct event from directory: '{event_directory.path}'.\")\n try:\n instance = await self.construct_event(event_directory)\n except Exception as exc:\n log.warning(f\"Could not construct event '{event_directory.path}'.\", exc_info=exc)\n else:\n instances.append(instance)\n\n return instances", "def get():\n return jsonify({'events': 'Events API'}), 200", "def get_allpack_events(self):\n return self.comp('packmanager').get_allpack_events()", "def get_events(self):\n\n url = '/v2.4/'+self.page_id+'/events'\n data = self.graph.request(url)\n\n while 'next' in data['paging'].keys():\n print data['paging']['next']\n data = self.graph.request(url, args={\n 'limit' : 100,\n 'after' : data['paging']['cursors']['after']\n })\n\n return data", "def get_event_list(self):\n pass", "def events(self):\n return self.current_events", "def get_events(self):\n return self.s.query(Event).all()", "def get_events() -> list[Event]:\n g.ledger.changed()\n return [e for e in g.filtered.entries if isinstance(e, Event)]", "def get(self, *args):\n return _libsbml.ListOfEvents_get(self, *args)", "def get_events(self, limit=10, query=None):\n\n conn = http.client.HTTPSConnection(self.OPENFDA_API_URL)\n request = self.OPENFDA_API_EVENT + \"?limit=\" + str(limit)\n if query is not None:\n request += \"&\" + query\n conn.request(\"GET\", request)\n events_search = conn.getresponse()\n raw_data = events_search.read()\n events_str = raw_data.decode(\"utf8\")\n events = json.loads(events_str)\n events = events['results']\n\n return events", "def view_events():\n result = get_events_helper(Event)\n return jsonify(result[0]), result[1]", "def fetch_registry_content(self):\n for registry_name, registry in self.registries.items():\n if not registry.source:\n continue\n registry.get_repositories()", "def events(self):\n return self.properties.get('events', EventCollection(self.context, ResourcePath(\"events\", self.resource_path)))", "def __calender_events(self):\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('calendar', 'v3', http=http)\n\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\n pt=\"Getting the upcoming latest events\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % pt)\n self.speech.synthesize_text(pt)\n eventsResult = service.events().list(\n calendarId='primary', timeMin=now, maxResults=1, singleEvents=True,\n orderBy='startTime').execute()\n events = eventsResult.get('items', [])\n\n if not events:\n pq=\"No upcoming events found.\"\n requests.get(\"http://localhost:8080/statement?text=%s\" % pt)\n self.speech.synthesize_text(pq)\n for event in events:\n start = event['start'].get('dateTime', event['start'].get('date'))\n #start1=''.join(start)\n summary=event['summary']\n print start,summary\n requests.get(\"http://localhost:8080/statement?text=\"+start+\" \"+summary)", "def events(self) -> object:\n return self._events", "def get_event(self):\n return self.keys.events.get()", "def get_all_events(cls):\n try:\n events = list(events_coll.find())\n events_list = []\n if events is not None:\n for event in events:\n one_event = cls(**event)\n events_list.append(one_event)\n return events_list\n except Exception as e:\n print(e)", "def _default_events_fetcher(self):\n raise NotImplementedError", "def _default_events_fetcher(self):\n raise NotImplementedError", "def ajaxevent():\n return common.get_latest_event()", "def get_game_events(self):\n\t\tcontents = self.archive.read_file('replay.game.events')\n\t\treturn self.protocol.decode_replay_game_events(contents)", "def events(self):\n return self.search(comp_class=Event)", "def get_events(self):\n ret = []\n while True:\n event = self.event.get_event(wait=1, full=True)\n if event is None:\n return ret\n ret.append(event)", "def events(self):\r\n return e.Events(self)", "def getUpdates(self):\n # execute the query\n ret = self._er.execQuery(self)\n\n if ret and ret.get(\"recentActivity\") and ret[\"recentActivity\"].get(\"events\"):\n # return the updated information\n return ret[\"recentActivity\"][\"events\"]\n # or empty\n return {}", "def show_events_list():\r\n\tevents_list = Page.objects.filter(tags='events').order_by('-created')\r\n\treturn {'events_list': events_list}", "def events(self):\n return self._events", "def get_current_events():\n resp = requests.get(ICAL_FEED)\n if resp.status_code != 200:\n logger.error('> Error retrieving iCal feed!')\n return None\n\n try:\n cal = ics.Calendar(resp.text)\n except Exception as e:\n logger.error('> Error parsing iCal data ({})'.format(e))\n return None\n\n return cal", "def get_events():\n # reads the session\n session = request.args.get('session', type=str)\n process = request.args.get('process', default='receipt', type=str)\n\n dictio = {}\n\n if check_session_validity(session):\n user = get_user_from_session(session)\n if lh.check_user_log_visibility(user, process):\n caseid = request.args.get('caseid', type=str)\n events = lh.get_handler_for_process_and_session(process, session).get_events(caseid)\n i = 0\n while i < len(events):\n keys = list(events[i].keys())\n for key in keys:\n if str(events[i][key]).lower() == \"nan\" or str(events[i][key]).lower() == \"nat\":\n del events[i][key]\n i = i + 1\n dictio = {\"events\": events}\n ret = jsonify(dictio)\n return ret", "def events(self):\r\n return ev.Events(self)", "def events(self):\r\n return ev.Events(self)", "def get_events():\n\n all_calendar_events = {}\n\n # Suppress warning in logs\n # https://github.com/googleapis/google-api-python-client/issues/299\n service = build('calendar', 'v3', credentials=google_auth.creds, cache_discovery=False)\n\n now = datetime.datetime.utcnow().today().isoformat() + 'Z' # 'Z' indicates UTC time\n\n for calendar_name, calendar_id in config.GOOGLE_CALENDARS.items():\n all_events = []\n events_result = service.events().list(calendarId=calendar_id, timeMin=now,\n maxResults=10, singleEvents=True, orderBy='startTime').execute()\n events = events_result.get('items', [])\n if not events:\n all_events.append(['Ei tulevia tapahtumia'])\n for event in events:\n start = event['start'].get('dateTime', event['start'].get('date'))[:10]\n all_events.append([start, event[\"summary\"], event[\"htmlLink\"]])\n all_calendar_events[calendar_name] = all_events\n\n return all_calendar_events", "def get_events(self):\r\n return QtSql.QSqlQuery('''SELECT DISTINCT Event FROM presentations''')", "async def _get_events_from_cache(\n self, events: Iterable[str], update_metrics: bool = True\n ) -> Dict[str, EventCacheEntry]:\n event_map = self._get_events_from_local_cache(\n events, update_metrics=update_metrics\n )\n\n missing_event_ids = (e for e in events if e not in event_map)\n event_map.update(\n await self._get_events_from_external_cache(\n events=missing_event_ids,\n update_metrics=update_metrics,\n )\n )\n\n return event_map", "def events(self):\r\n return Events(self)", "def events(self):\r\n return Events(self)", "def events(self):\r\n return Events(self)", "def get(self, request, group):\n event = group.get_latest_event()\n\n try:\n return client.get('/events/{}/'.format(event.id), request.user, request.auth)\n except client.ApiError as e:\n return Response(e.body, status=e.status)", "async def _get_events_from_external_cache(\n self, events: Iterable[str], update_metrics: bool = True\n ) -> Dict[str, EventCacheEntry]:\n event_map = {}\n\n for event_id in events:\n ret = await self._get_event_cache.get_external(\n (event_id,), None, update_metrics=update_metrics\n )\n if ret:\n event_map[event_id] = ret\n\n return event_map", "def getListOfEvents(self):\n return self.model.getListOfEvents()", "def load_new_events_list(self):\n self._event_index_list = self.gdc.new_events_indices\n self.populate_event_list_from_index_list()", "def update_events(request):\n events_data = request.data\n events_manager.deserialize_event(events_data)\n # print(events_manager.serialize_events())\n events_manager.apply()\n return JsonResponse({'nodes': []})", "def news_and_events(self):\n return self._get_child_page_of_type(NewsAndEventsPage)", "def events():\n # Compare cache against a new GET request\n temp_cache = EVENTS_CACHED\n # events_new = get_calendar_events_today(CALENDAR_URL)\n events_new = get_calendar_events_limit(CALENDAR_URL, sort=False)\n\n # If not change is detected, tell the browser to keep it's current content.\n if temp_cache is None or compare_events(temp_cache, events_new):\n return \"false\"\n\n # Else, render the partial events template to return to the client.\n return render_template('events_sorted.html', events=sort_events_days(events_new))", "def getLatestStories(self, newest, alreadyReadList):\n\t\turl = \"http://news.ycombinator.com\"\n\t\tif newest == \"newest\":\n\t\t\turl += \"/newest\"\n\t\tsource = self.getSource(url)\n\t\tstories = self.getStories(source, alreadyReadList)\n\t\treturn stories", "def get_calendar_events(calendar_url, params=None):\n return cache_calendar_events(calendar_url, params=params)\n # return CALENDAR_CACHED or cache_calendar(calendar_url)", "def event_list(self):\n return self._event_list", "def fetch_events(self):\n while 1:\n try:\n self.events_local.append(self._q.get(False))\n except queue.Empty:\n break", "def get_events(self):\n self._events = []\n self.ircobj.process_once(timeout=0.1)\n return self._events", "def get_curr_events(self):\n today = datetime.date.today()\n return self.s.query(Event).filter(Event.time > today).all()", "def events(self) -> [redirect, HTMLBody]:\n\t\t# Get all events and split into 2 groups\n\t\teventsl, eventsr = prepare_events(get_events())\n\t\treturn render_template(\"events.jinja2\", eventsl=eventsl, eventsr=eventsr)", "def _get_registry_repodigest(self, context):\n registry_data = context.docker_client.images.get_registry_data(self._name())\n repo_digest = registry_data.attrs['Descriptor']['digest']\n return repo_digest", "def registry(self):\n return self._registry", "def get(self, request):\n return self.serviceHandler.getEvent(request.data)", "def get_events(events_id):\n # Filter events matching events_id and select the first one found\n events = Events.query.filter_by(id=events_id).first()\n # If no events matches album_id, respond HTTP 404\n if events is None:\n abort(404)\n # Serialize the album as a JSON object and return it\n schema = EventsSchema()\n return jsonify(schema.dump(events))", "def scrape_events(path, urls):\n seen_ids = set()\n result = []\n for url in urls:\n # Get all of the Network requests being sent out\n print(f'Processing {url}')\n driver.get(url)\n browser_log = driver.get_log('performance') \n events = [process_browser_log_entry(entry) for entry in browser_log]\n results = []\n # Find the Network request that sends a GET request to EventBrite API\n for event in events:\n if event['method'] == 'Network.responseReceived':\n # print(event)\n if 'event_ids' in event['params']['response']['url']:\n results.append(event)\n # Get the GET request URL\n get_url = \"\"\n # TODO: Sometimes returning 0 or more than 1... I'm not sure why :(\n if len(results) >= 1:\n get_url = results[0]['params']['response']['url']\n # Get the GET request response JSON\n json_response = get_request(get_url)\n event_list = json_response['events']\n # Find unique events in the response JSON \n unique_event_list = []\n for event in event_list:\n if event['id'] not in seen_ids:\n seen_ids.add(event['id'])\n unique_event_list.append(event)\n parsed_events = parse_event_page(unique_event_list)\n result.extend(parsed_events)\n else:\n print(results)\n print('yikes something went wrong')\n\n driver.close()\n return result\n # save_events(path, result)", "def get_last_events(self, limit=10):\n\n return self.get_events(limit)", "def getEvent(self, timeout=None):\n socks = self.poller.poll(timeout)\n if not socks:\n return\n msg = socks[0][0].recv()\n d = self.mh.unserialize(msg)\n e = Event.fromDict(d)\n if self.store:\n _id = self.store.addEvent(e)\n e.id = _id\n return e", "def events(self) -> Dict[EventCall, Set[Node]]:\n return self._events", "def get_latest_events(self, batch_size=1):\n\n # get latest event from the dispatcher\n queue_empty_reached, latest_dispatcher_events = \\\n self.dispatcher.get_events_batch(batch_size=batch_size)\n\n if queue_empty_reached:\n logger.debug(\"Empty queue reached!\")\n\n if latest_dispatcher_events:\n info = \"New events arrived [Total so far: {}]\".format(self._total_n_processed_events)\n logger.info(info)\n\n for ev in latest_dispatcher_events:\n logger.info(str(ev))\n\n # update internal list of events as appropriate\n self.update_events(latest_dispatcher_events)\n\n # update total n of processed events so far..\n self._total_n_processed_events += len(latest_dispatcher_events)\n\n # update the list of newly arrived events\n self.latest_events.extend(latest_dispatcher_events)\n\n # return the newly arrived events and empty the internal list\n all_latest_events = copy.deepcopy(self.latest_events)\n self.latest_events = []\n\n return all_latest_events", "def calendar_events(self):\r\n return calendars.CalendarEvents(self)", "def load_updated_events_list(self):\n self._event_index_list, self._event_id_list = \\\n zip(*self.gdc.updated_events_indices_and_ids)\n self.populate_event_list_from_index_list()", "def eventList(self):\n return self._eventList", "def events(self):\n return EventsTable(self.rpc, self.name)", "def available_events(self):\n return self.target.read_value(self.available_events_file).splitlines()", "def registry(self):\n return self.__registry", "async def async_get_events(self, hass, start_datetime, end_datetime):\n events = []\n startdates = {}\n garbages = {}\n calendar_lang = \"en\"\n friendly_name = \"\"\n if SENSOR_PLATFORM not in hass.data[DOMAIN]:\n return events\n #start_date = start_datetime.date()\n #end_date = end_datetime.date()\n for entity in self.entities:\n if entity not in hass.data[DOMAIN][SENSOR_PLATFORM]:\n continue\n attributes = self._hass.states.get(entity).attributes\n for key in attributes:\n x = re.search('^date', key)\n if x is not None:\n idx = key[x.end():]\n startdates[idx] = datetime.strptime(attributes[key].__str__(), \"%Y.%m.%d\").date()\n x = re.search('^garbage', key)\n if x is not None:\n idx = key[x.end():]\n garbages[idx] = attributes[key]\n if key == 'calendar_lang':\n calendar_lang = attributes[key]\n if key == 'friendly_name':\n friendly_name = attributes[key]\n\n i = 0\n while i < len(startdates):\n if startdates[str(i)] is not None:\n end = startdates[str(i)] + timedelta(days=1)\n if calendar_lang in self._translation:\n gtype = self._split_and_translate(calendar_lang, garbages[str(i)])\n else:\n gtype = self._split_and_translate(\"en\", garbages[str(i)])\n _LOGGER.debug(\"async_get_events: %s s: %s, e: %s, type: %s\", friendly_name, startdates[str(i)].strftime(\"%Y.%m.%d\"), end.strftime(\"%Y.%m.%d\"),gtype)\n\n event = {\n \"uid\": entity,\n \"summary\": friendly_name + \": \" + gtype,\n \"start\": {\"date\": startdates[str(i)].strftime(\"%Y-%m-%d\")},\n \"end\": {\"date\": end.strftime(\"%Y-%m-%d\")},\n \"allDay\": True,\n }\n events.append(event)\n i += 1\n return events", "def get_events(self):\r\n database = main.connect_to_cloudsql()\r\n cursor = database.cursor()\r\n\r\n result = []\r\n for tag in self.interests:\r\n query = \"\"\"\r\n SELECT DISTINCT E.eid, E1.ename, E1.description,\r\n E.category, E1.start_date, E1.end_date, E1.num_cap,\r\n E1.num_attending, L.lname, L.address_1, E.tag, L.lat, L.lon\r\n FROM {}.EventTags AS E, {}.UserTags AS U, {}.Events as E1, {}.Locations as L\r\n WHERE E.tag = '{}' AND\r\n E1.eid = E.eid AND\r\n E1.lid = L.lid AND\r\n E1.start_date > {}\r\n ORDER by E1.start_date\r\n \"\"\".format(\r\n ENV_DB,\r\n ENV_DB,\r\n ENV_DB,\r\n ENV_DB,\r\n tag,\r\n str(datetime.date.today())\r\n )\r\n\r\n cursor.execute(query)\r\n data = cursor.fetchall()\r\n result.extend([i for i in data])\r\n\r\n database.close()\r\n\r\n return result", "def apigw_event():\n with open(\"events/event.json\") as json_file:\n return json.load(json_file)", "def get_events(self):\n #Returne the capture events\n raise NotImplementedError", "def get_events(self):\n #Returne the capture events\n raise NotImplementedError", "def get(self):\n return {\"newest_msg\": newest_msg()}", "def events(self) -> \"EventList\":\n from cognite.client.data_classes import EventList\n\n return self._retrieve_related_resources(EventList, self._cognite_client.events)", "def fusion_api_get_events(self, uri=None, param='', api=None, headers=None):\n return self.event.get(uri=uri, api=api, headers=headers, param=param)", "def get(self, eventId):\n event = EventDao().get_by_id(event_id=eventId)\n event_dict = event.to_dict_view()\n return event_dict", "def ewriters():\n return dict(_ewriters)", "def events(self):\n self.add_events(Event.objects.filter(event_end__gt=timezone.now()).order_by('event_start'))\n self.filename = 'events'", "def events(self):\n self.add_events(Event.objects.filter(event_end__gt=timezone.now()).order_by('event_start'))\n self.filename = 'events'", "def _get_events_from_local_cache(\n self, events: Iterable[str], update_metrics: bool = True\n ) -> Dict[str, EventCacheEntry]:\n event_map = {}\n\n for event_id in events:\n # First check if it's in the event cache\n ret = self._get_event_cache.get_local(\n (event_id,), None, update_metrics=update_metrics\n )\n if ret:\n event_map[event_id] = ret\n continue\n\n # Otherwise check if we still have the event in memory.\n event = self._event_ref.get(event_id)\n if event:\n # Reconstruct an event cache entry\n\n cache_entry = EventCacheEntry(\n event=event,\n # We don't cache weakrefs to redacted events, so we know\n # this is None.\n redacted_event=None,\n )\n event_map[event_id] = cache_entry\n\n # We add the entry back into the cache as we want to keep\n # recently queried events in the cache.\n self._get_event_cache.set_local((event_id,), cache_entry)\n\n return event_map", "def json_events(request):\n if request.method == 'GET':\n ttrss_url = request.GET['feed']\n\n # need xml for this. \n university_url = 'http://events.uchicago.edu/widgets/rss.php?key=47866f880d62a4f4517a44381f4a990d&id=48'\n\n n = datetime.datetime.now()\n return JsonResponse(\n {\n 'events': flatten_events(get_events(university_url, ttrss_url, n, n + relativedelta(years=1), False))\n }\n )", "async def events(self) -> Iterable[Event]:", "def showEvents(self, year, month, language, filterIDs=None, negFilterIDs=None):\n try:\n returnEvents = self.newsFeedModel.getEvents(year, month, language, filterIDs, negFilterIDs)\n if language != 'de' and len(returnEvents) == 0:\n returnEvents = self.newsFeedModel.getEvents(year, month, 'de', filterIDs, negFilterIDs)\n returnEventsJSON = self.newsFeedView.toJSONEvents(returnEvents, self.newsFeedModel.getEventCategoriesLastChanged())\n return returnEventsJSON\n except Exception as e:\n print(\"there was a problem while retrieving events\")\n raise e", "def events_by_id(self, repository_id, access_token=None):\n return self._complete_request_by_id(\n repository_id, \"events\", access_token)", "def pull_event(self):\n self._buffer_buisy_mutex.acquire()\n event = None\n if self._events_buffer:\n event = self._events_buffer.pop(0)\n self._dilivered_events_stack.push(event.hash)\n self._buffer_buisy_mutex.release()\n if event:\n self.logger.info('Pulling new event: {}'.format(event))\n return event", "def main():\r\n credentials = get_credentials()\r\n http = credentials.authorize(httplib2.Http())\r\n service = discovery.build('calendar', 'v3', http=http)\r\n\r\n now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time\r\n print('Getting the upcoming 10 events')\r\n eventsResult = service.events().list(\r\n calendarId='primary', timeMin=now, maxResults=10, singleEvents=True,\r\n orderBy='startTime').execute()\r\n events = eventsResult.get('items', [])\r\n\r\n if not events:\r\n print('No upcoming events found.')\r\n for event in events:\r\n start = event['start'].get('dateTime', event['start'].get('date'))\r\n print(start, event['summary'])", "async def get_events(\n self,\n event_ids: Collection[str],\n redact_behaviour: EventRedactBehaviour = EventRedactBehaviour.redact,\n get_prev_content: bool = False,\n allow_rejected: bool = False,\n ) -> Dict[str, EventBase]:\n events = await self.get_events_as_list(\n event_ids,\n redact_behaviour=redact_behaviour,\n get_prev_content=get_prev_content,\n allow_rejected=allow_rejected,\n )\n\n return {e.event_id: e for e in events}", "def get_queryset(self):\n\t\treturn Event.objects.filter(eDate__gte= timezone.now()).order_by('-eDate')" ]
[ "0.6143702", "0.6038302", "0.6023639", "0.58556026", "0.58545154", "0.57816315", "0.5745347", "0.5734352", "0.5734352", "0.5719633", "0.5719397", "0.5698083", "0.5681828", "0.5623633", "0.56198883", "0.5609583", "0.5598598", "0.5597717", "0.5596118", "0.5584513", "0.55138314", "0.55124426", "0.5471624", "0.54603636", "0.5445712", "0.544413", "0.5442349", "0.5434901", "0.54308945", "0.5417472", "0.54167455", "0.54167455", "0.54059047", "0.5391335", "0.5362871", "0.5353374", "0.5345268", "0.5329983", "0.5324729", "0.5321384", "0.53097373", "0.5285628", "0.5282159", "0.5282159", "0.52710164", "0.5269628", "0.5268993", "0.5265617", "0.5265617", "0.5265617", "0.5264769", "0.52258176", "0.52252895", "0.5214819", "0.5187187", "0.5180489", "0.5177421", "0.51763815", "0.51741683", "0.51652986", "0.51626277", "0.5161952", "0.51500094", "0.51481795", "0.5148089", "0.51435727", "0.5128809", "0.5127234", "0.512219", "0.5121548", "0.51062053", "0.5099728", "0.5097862", "0.50966936", "0.508951", "0.5075901", "0.5062793", "0.50598145", "0.50523865", "0.50517786", "0.50437105", "0.5042798", "0.50402164", "0.50402164", "0.50384355", "0.50362", "0.50353456", "0.50283897", "0.50215685", "0.5011363", "0.5011363", "0.5009963", "0.5000775", "0.4976793", "0.49763125", "0.497247", "0.49718782", "0.4971531", "0.49676985", "0.49561617" ]
0.5413742
32
Basic landing page for unauthenticated users
def login(): if current_user.is_authenticated: return redirect(url_for('main.home')) form = LoginForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() if user and bcrypt.check_password_hash(user.password, form.password.data): login_user(user, remember=form.remember_me.data) next_page = request.args.get('next') # get next url parameter and after login redirect to requested page flash("Login Successful", 'success') # if there was request for specific page that needs authorization, then that argument assigned in # variable `next_page` keeps that and after login automatically user is redirect to that page return redirect(next_page) if next_page else redirect(url_for('main.home')) else: flash("Login Unsuccessful. Please check email and password", "danger") return redirect(url_for('users.login')) return render_template('login.html', title='Log In', form=form)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def landing():\n if g.user:\n return render_template('landing.html', user=g.user)\n return redirect(url_for('login'))", "def home_page():\n if not g.user:\n flash(\"Please login to view.\", \"warning\")\n return redirect('/login')\n return render_template('index.html')", "def homepage():\n if g.user:\n return redirect(f\"/user/{g.user.id}\")\n else:\n return redirect(\"/landing\")", "def index(self):\n\n # try and pull the user's data\n user = get_active_user_data()\n\n if not user:\n # they are not logged in give them the login form\n return render('/login_form.html')\n\n # they are logged in, pass them to the home page\n redirect('/')", "def index(request):\n try:\n if request.user.is_authenticated:\n return render(request, \"pages/index.html\")\n else:\n return redirect('login')\n\n except:\n return redirect('login')", "def defaultlanding():\n #send user to description page if not logged in\n if not g.user:\n return redirect(url_for('description'))\n #display leaderboard for competition if logged in\n return redirect(url_for('leaderboard'))", "def index():\n if current_user.is_authenticated:\n return redirect(url_for('home'))\n return render_template('index.html')", "def landing_page():\n\n print session\n\n if 'acct' in session:\n acct = get_current_account(session['acct'])\n search = False\n return render_template(\"index.html\", acct=acct, search=search)\n\n else:\n return redirect(\"/signup\")", "def home(request):\n if request.user.is_authenticated:\n return redirect('/start')\n return render(request, 'home/home.html')", "def index(request):\n if request.user.is_authenticated:\n return redirect('/dashboard')\n else:\n context = {'client_id': settings.OPENHUMANS_CLIENT_ID,\n 'oh_proj_page': settings.OH_ACTIVITY_PAGE}\n\n return render(request, 'main/index.html', context=context)", "def index(request):\n\n\tif request.user.is_authenticated:\n\t\treturn HttpResponseRedirect('home')\n\treturn HttpResponseRedirect('login')", "def home(request):\n # if request.user.is_authenticated():\n # return redirect('/fastapp')\n return context()", "def index():\n if auth.user:\n message=\"Welcome: \"\n user=auth.user\n else:\n message=\"Please use login for testing...\"\n user=None\n return dict(message=message, user=user)", "def index():\n if (session_get_int(\"user_id\") is not None):\n return render_template(\"dashboard.html\")\n else:\n return render_template(\"index.html\")", "def landing_page():\n\n return render_template('index.html')", "def get(self):\n if self.user:\n self.render('welcome.html', username = self.user.name)\n else:\n self.redirect('/signup')", "def home():\n if not session.get('logged_in'):\n return redirect(url_for('welcome'))\n return render_template('home.html', filename=\"yarg.jpg\")", "def home(request):\n if request.user.is_authenticated():\n return HttpResponseRedirect('done')\n else:\n return render_to_response('home.html', RequestContext(request))", "def index():\n is_admin = dbhandler.is_admin(current_user())\n return render_template('./welcome.html', username=current_user(), is_admin=is_admin)", "def home(request):\n assert isinstance(request, HttpRequest)\n iscapable =False\n if request.user.username in get_librarians():\n iscapable=True;\n\n return render(\n request,\n 'app/index.html',\n {\n 'title':'Home Page',\n 'iscapable':iscapable,\n 'year':datetime.now().year,\n }\n )", "def landing():\n return render_template('index.html', token=webview.token)", "def landing():\n return render_template('index.html', token=webview.token)", "def home(request):\n if request.user.is_authenticated():\n return HttpResponse(\"{0} <a href='/accounts/logout'>exit</a>\".format(request.user))\n else:\n return HttpResponse(\"<a href='/login/vk-oauth2/'>login with VK</a>\")", "def landing_page(request):\n return render(request, 'index.html')", "def index(request):\n context = {'is_logged_in': request.user.is_authenticated}\n return render(request, 'sacms/index.html', context)", "def landing(request):\n hiring=True\n context = RequestContext(request)\n\n \"\"\" if logged in show institution courses if appropriate \n other rules: superuser - sees all\n staff - sees all from institution?\n \"\"\"\n\n if not request.user.is_authenticated():\n course_list = Course.objects.filter(mode='ready', \n institution_only = 0)\n else:\n course_list = Course.objects.filter(Q(mode='ready', \n institution_only = 0) | Q(mode='ready', institution__id__in=request.user.get_profile().institutions.all()))\n \n r = render_to_response(\"landing.html\",\n {'hiring': hiring, \n 'course_list':course_list,\n 'display_login': request.GET.__contains__('login')},\n context_instance=context)\n return r", "def get(self):\n user = self.get_active_user()\n if user:\n self.render_newpage(user=user)\n else:\n self.redirect('/login')", "def show_index():\r\n if 'username' in flask.session:\r\n return flask.redirect(flask.url_for('home')) # Need to fix redirect\r\n\r\n return flask.render_template(\"index.html\")", "def landing():\n return render_template(\"landing.html\")", "def home_view(request):\n if request.authenticated_userid:\n return HTTPFound(location=request.route_url('app_view')) # pragma no cover\n return {} # pragma no cover", "def view_landing_page():\n return render_template(\"index.html\")", "def home(request):\n if 'member_id' not in request.session:\n return redirect(\"/login/\")\n return render(request, 'esihapp/index1.html')", "def home(request):\n #print (\"home\")\n if request.user.is_authenticated():\n return redirect('done')\n return context()", "def homepage( request ):\n if \"email\" in request.session:\n return redirect( '/home' )\n return render_to_response( 'index.html' )", "def home():\n settings = PageSetting.find_settings()\n if not (settings.enabled) and not (authenticated(session)):\n return render_template(\"errors/maintenance.html\")\n else:\n return render_template(\"layout/index.html\", settings=settings)", "def home(request):\n if request.user.is_authenticated():\n domain = request.get_host()\n profile_picture = request.user.default_profile_picture\n full_name = request.user.full_name\n phone_number = request.user.phone_number\n context = {\n 'domain': domain,\n 'profile_picture': profile_picture,\n 'full_name': full_name,\n 'phone_number': phone_number,\n }\n return render(request, 'home.html', context)\n return render(request, 'home.html', {})", "def index():\n if 'name' in session:\n return render_template('home.html')\n return redirect(url_for('log_in'))", "def landing(request):\n return render(request, 'staffing/landing.html')", "def home():\n # Check if user is loggedin\n if 'loggedin' in session:\n response = requests.get(\"http://localhost:8080/api/getcars\")\n print(response)\n cars = json.loads(response.text)\n return render_template('home.html', username=session['username'], cars=cars)\n # users is not loggedin redirect to login page\n return redirect(url_for('site.login'))", "def home():\n session_id = request.args.get('session-id', None)\n user_id = request.args.get('user-id', None)\n if check_authentication(session_id, user_id):\n return render_template('home.html', cars_list=get_cars_preview(), news_list=get_news_list(), user=user_id,\n session_id=session_id, authjs=True, preview_length=get_cars_preview().__len__())\n else:\n return render_template('home.html', cars_list=get_cars_preview(), news_list=get_news_list(), authjs=True,\n preview_length=get_cars_preview().__len__(), del_session_cookie=True)", "def home(result=None):\n print(inspect.stack()[1][3])\n\n if not session.get('logged_in') and not result:\n return render_template('login.html')\n else:\n # Based on the user_id passed, print Details, URLS and all.\n # return render_template('dashboard.html', username=result.name, user_id=result.user_type)\n return render_template('webpage/index1.html', username=result.name, user_id=result.user_type)", "def unauthorized():\n flash('You must be logged in to view that page')\n return redirect(url_for('catalog_bp.index'))", "def home():\n\n if not current_user.is_authenticated:\n return redirect(url_for('login'))\n else:\n return redirect(url_for('show_registrations'))", "def home():\n\n # sets the page to load depending on the type of user\n # if none specified the login screen will be displayed\n pageName = ''\n userType = session.get('UserType', None)\n if userType == None:\n pageName = 'anonHome.jade'\n elif userType == 'Seeker':\n pageName = 'indexJob.jade'\n elif userType == 'Manager':\n pageName = 'indexManager.jade'\n\n frogHop = url_for('static', filename='loop frog.gif')\n uName = session.get('UserName', 'Unknown') # load a default value if retrieval fails\n return render_template(\n pageName,\n title='Home',\n name=uName,\n getFrog=frogHop,\n year=datetime.now().year,\n )", "def main():\n if 'username' in session:\n flash(f'Logged in as {session[\"username\"]}')\n else:\n flash('You are not logged in.')\n return render_template(\"main.html\", title=\"Main\")", "def landing_page(request):\n return render(request, 'landing_page/landing.html', {})", "def home():\n return render_template('login.html')", "def landingPage():\n # Query all listings from the database and pass to landing page.\n return render_template(\"landing.html\")", "def root(request):\n\n return render(request, 'users/index.html')", "def index():\r\n\r\n # initializes page title\r\n page_title = 'Home'\r\n\r\n # renders the landing page\r\n return render_template('index.html', page_title=page_title)", "def welcome(self):\n if self.user:\n return self.render('welcome.html')\n self.redirect('/register')", "def root():\n if request.headers['Accept'] == 'application/json':\n return \"Welcome\\n\\n\", 200\n else:\n return redirect(url_for('index'))", "def index(request):\n \n user = get_user(request)\n\n # single auth system?\n if len(ENABLED_AUTH_SYSTEMS) == 1 and not user:\n return HttpResponseRedirect(reverse(AUTH_START, args=[ENABLED_AUTH_SYSTEMS[0]])+ '?return_url=' + request.GET.get('return_url', ''))\n\n #if DEFAULT_AUTH_SYSTEM and not user:\n # return HttpResponseRedirect(reverse(start, args=[DEFAULT_AUTH_SYSTEM])+ '?return_url=' + request.GET.get('return_url', ''))\n \n default_auth_system_obj = None\n if DEFAULT_AUTH_SYSTEM:\n default_auth_system_obj = AUTH_SYSTEMS[DEFAULT_AUTH_SYSTEM]\n\n #form = password.LoginForm()\n\n return render_template(request, 'index', {'return_url' : request.GET.get('return_url', '/'),\n 'enabled_auth_systems' : ENABLED_AUTH_SYSTEMS,\n 'default_auth_system': DEFAULT_AUTH_SYSTEM,\n 'default_auth_system_obj': default_auth_system_obj})", "def welcome_page():\n return redirect(\"/static/welcome.html\")", "def singapore():\n if \"username\" in session:\n return render_template(\"singapore.html\")\n return abort(401)", "def get(self):\n user = self.get_active_user()\n if not user:\n self.render(\"login_signupbase.html\",\n login=self.LOGIN_FORM,\n main_heading=self.MAIN_HEADING)\n else:\n self.render(\"redirect_in_8.html\",\n message=\"\"\"You are already signed in! <a href='/logout'>\n Log out</a> before signing in with a new\n account or return to the\n <a href='/'>front page</a>.\"\"\")", "def home():\n # if session.get('username'):\n # return redirect(url_for('categories'))\n # else:\n return render_template('home.html')", "def index():\n aaa.require(fail_redirect='/login')\n return 'Welcome! <a href=\"/admin\">Admin page</a> <a href=\"/logout\">Logout</a>'", "def ShowLogin():\n current_user = helpers.get_current_user()\n if current_user is None:\n return render_template('login.html')\n else:\n return redirect('/')", "def index():\n return render_template('index.html', username=session['username'])", "def index(request):\n if request.user.is_authenticated():\n return redirect('/matrix/')\n else:\n form = AuthenticationForm(request)\n return render(request, 'registration/login.html', {'form': form})", "def view_home(self):\n with self.client.get(\"/home\", catch_response=True) as response:\n for r_hist in response.history:\n if r_hist.status_code > 200 and r_hist.status_code < 400:\n response.failure(\"Not logged on: Got redirect to /login\")", "def home(request):\n return render(request, 'users/dashboard.html')", "def index(self):\n raise cherrypy.HTTPRedirect('/user')", "def index(request):\n user = request.user\n if user.is_authenticated:\n validar_usuario(request.user)\n return redirect('gestion:menu')\n else:\n return render(request,'index.html')", "def home(request):\n\n user = request.authenticated_userid\n return {'user': user}", "def home(request):\n if request.user.is_authenticated:\n return render(request, 'wantedly_app/home.html')\n\n # Execute the below if the user is not authenticated.\n if request.method == 'POST':\n user = authenticate(username=request.POST['username'], password=request.POST['password'])\n\n # If the user exists in the DB,\n if user is not None:\n\n # If the user is active,\n if user.is_active:\n auth_login(request, user, backend='django.contrib.auth.backends.ModelBackend')\n messages.add_message(request, messages.SUCCESS, 'ログインしました!')\n return redirect('home')\n\n # If the user is not active,\n else:\n messages.add_message(request, messages.ERROR, 'ユーザーのアクティベーションがまだ完了していません。')\n\n # If the user does not exists in the DB,\n else:\n messages.add_message(request, messages.ERROR, 'ログインに失敗しました。ユーザーが存在しないかパスワードが間違っています。')\n\n context = {'login_form': LoginForm()}\n return render(request, 'wantedly_app/top.html', context)", "def unauthorized():\n flask.flash('You must be logged in to view that page.')\n return redirect(url_for('auth.sign_in'))", "def test_get_main_page_without_logged_in_user(self):\n response = self.testapp.get('/')\n self.assertEqual(response.status_int, 200)", "def default():\n\treturn render_template(\"login.html\")", "def unauthorized():\n flash(\"You must be logged in to view that page.\")\n return redirect(url_for(\"auth.login_view\"))", "def login():\r\n return render_template(\r\n 'about.html',\r\n title='About',\r\n year=datetime.now().year,\r\n message='Your application description page.'\r\n )", "def get(self):\n if self.logged_in:\n self.render('home.html', {\n 'name': self.current_user.name,\n 'server': self.current_user.server,\n 'faction': factions.get(self.current_user.faction),\n 'home': True,\n 'page_id': 'home'\n })\n else:\n self.render('home.html', {\n 'servers': servers,\n 'factions': factions,\n 'destination_url': '/settings',\n 'home': True,\n 'page_id': 'home'\n })", "def unauthorized():\n #flash('You must be logged in to view that page.')\n return redirect(url_for('login'))", "async def index(request: Request, user: UserInfo) -> HTTPResponse:\n return redirect('home')", "def home_page():\n return redirect('/users')", "def index():\n return redirect(auth_flow.get_authorization_url())", "def hello_page(request):\n text = \"Welcome to test_project\"\n if not request.user.is_anonymous:\n text = \"Welcome '%s' to test_project\" % request.user.username\n return HttpResponse(text, content_type='text/plain')", "def home(request):\n if request.user.is_authenticated():\n return HttpResponseRedirect('logged-in')\n else:\n home_view = 1\n return render_to_response('content/home.html', {'version': version, \"home_view\":home_view},\n RequestContext(request))", "def welcome(request):\n return render(request, 'code_challenge/welcome.html', {})", "def unauthorized():\n flash('You must be logged in to view that page.', 'warning')\n return redirect(url_for('auth.login'))", "def index():\n # Redirect to dashboard if user is logged in already\n if current_user.is_authenticated:\n return redirect(url_for('dashboard.overview'))\n\n # Try and login with the POST form\n if request.method == 'POST':\n uid = request.form.get('uid')\n password = request.form.get('password')\n remember = request.form.get('remember-me') is not None\n\n # Try and login the user\n user = try_bind(uid, password)\n\n # Login failed\n if user is None:\n # TODO Add more specific error messages\n flash('Invalid username or password. Please try again.',\n 'danger')\n return render_template('index.html')\n\n log.debug('Current user: %s', user)\n # Login successful so update database with user info\n server.submissions.insert_user(user)\n # Login the user using flask_login\n login_user(user, remember=remember)\n flash('You have successfully logged in.', 'success')\n # Redirect to dashboard\n return redirect(url_for('dashboard.overview'))\n\n # Show index of the signin\n return render_template('index.html')", "def home():\n return render_template(\n 'index.html',\n title='Automation Center',\n year=datetime.now().year,\n message='Welcome to the Automation Center'\n )", "def test_04_admin_featured_apps_as_anonymous(self):\r\n res = self.app.get('/admin/featured', follow_redirects=True)\r\n assert \"Please sign in to access this page\" in res.data, res.data", "def game():\n\tif \"username\" in session:\n\t\treturn render_template(\"index.html\")\n\telse:\n\t\treturn redirect(url_for(\"default\"))", "def get(self):\n user = self.get_active_user()\n if user:\n self.render(\"redirect_in_8.html\",\n message=\"\"\"You are already signed in!\n <a href='/logout'>Log out<a>\n before creating a new account or return to\n the <a href='/'>front page</a>\"\"\")\n else:\n self.render(\"signup.html\", main_heading=self.MAIN_HEADING)", "def landingpage (request):\n # Define views here\n context = {}\n return render(request, 'landingpage.html', context=context)", "def home(request):\n user = request.user\n context = get_adventure_info()\n context.update(csrf(request))\n return render_to_response('coreapp/home.html',\n context,\n context_instance=RequestContext(request),)", "def landing():\n return app.send_static_file('landing.html')", "def deny_access():\n flash('You must login first.')\n return redirect(url_for('home'))", "def index():\n if session.get('user_id'):\n return redirect('/feed')\n \n return render_template('index.html')", "def index(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect(reverse('surveys-dashboard'))\n\n context = {}\n\n # Render the HTML template index.html with the data in the context variable\n return render(request, 'index.html', context=context)", "def authenticate():\n return Response(render_template('index.html', auth=False), 401,\n {'WWW-Authenticate': 'Basic realm=\"Login Required\"'})", "def special(request):\n return HttpResponse(\"You are logged in !\")", "def get(self):\n self.render(\"login.html\")", "def init(request):\n #logout(request)\n return render_to_response('home.html', context_instance=RequestContext(request))", "def welcome(request):\n return dict(\n user=request.user\n )", "def home(request):\n user = request.user\n\n # For administrators.\n if user.username == admin_username:\n return redirect(reverse('manageproblem'))\n\n #problems = Problem.objects.all()\n problems = Problem.objects.filter(visible=True)\n return render(request, 'code_challenge/global_stream.html', {'problems': problems})", "def test_01_front_page(self):\r\n url = '/'\r\n # As Anonymou user\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Top users should be shown to anonymous users\"\r\n assert dom.find(id='top_users') is not None, err_msg\r\n # As Authenticated user but NOT ADMIN\r\n self.signin()\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Top users should be shown to authenticated users\"\r\n assert dom.find(id='top_users') is not None, err_msg\r\n self.signout\r\n # As Authenticated user but ADMIN\r\n self.signin(email=self.root_addr, password=self.root_password)\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Top users should be shown to admin\"\r\n assert dom.find(id='top_users') is not None, err_msg\r\n self.signout()", "def index(request):\n if _use_new_ui(request):\n return _serve_new_ui(request)\n\n if request.user is None:\n return view_all(request, index_call=True)\n else:\n return mine(request)", "def show_home_page():\n\n login_form = LoginForm()\n # create register form instance to go in modal\n register_form = UserAddForm()\n\n # handle login form validation\n if login_form.validate_on_submit():\n email = login_form.email.data\n password = login_form.password.data\n\n user = User.authenticate(email, password)\n\n # handle use case for a user being returned with valid password entered\n if user and user != 'invalid password':\n do_login(user)\n flash(f'Hello, {user.username}!', 'secondary')\n return render_template('home.html', user=user)\n # handle invalid password entry\n elif user == 'invalid password':\n login_form.password.errors = [\"Incorrect Password.\"]\n return render_template('home_anon.html', login_form=login_form, register_form=register_form)\n # handle user being not found\n else:\n login_form.email.errors = [\n 'Invalid Credentials. Please check email/password and try again']\n return render_template('home_anon.html', login_form=login_form, register_form=register_form)\n if CURRENT_USER_KEY in session:\n user = User.query.get(session[CURRENT_USER_KEY])\n if user:\n return render_template('home.html', user=user, home_active='active')\n\n # redirect to sign in page if no user is logged in\n\n return render_template('home_anon.html', login_form=login_form, register_form=register_form, img_cls='hidden')" ]
[ "0.7869509", "0.766923", "0.7648024", "0.76207626", "0.75354123", "0.7504049", "0.74748087", "0.73743373", "0.72935927", "0.7250666", "0.72364444", "0.7209014", "0.71815675", "0.71794057", "0.7173211", "0.7159091", "0.7151061", "0.71451926", "0.70916605", "0.7072187", "0.706649", "0.706649", "0.7046315", "0.7027231", "0.7008966", "0.70026475", "0.69968307", "0.6991922", "0.69855624", "0.6985048", "0.6962865", "0.695793", "0.6918699", "0.6916778", "0.68960005", "0.68798965", "0.68511736", "0.68416834", "0.68384", "0.68380374", "0.68343765", "0.68194497", "0.680114", "0.67855006", "0.67667717", "0.6741064", "0.6735823", "0.67332286", "0.67247593", "0.66988224", "0.6677414", "0.6661329", "0.66603893", "0.66380256", "0.6621624", "0.66186166", "0.65909123", "0.6590472", "0.6588059", "0.65652144", "0.6563049", "0.6552345", "0.65438044", "0.6543597", "0.65430295", "0.65373164", "0.65206724", "0.6518331", "0.6517565", "0.6512364", "0.650801", "0.6503292", "0.6499924", "0.64945465", "0.64856976", "0.6462863", "0.64597565", "0.6443195", "0.6442344", "0.64270544", "0.6425749", "0.64164764", "0.6404865", "0.6391662", "0.639144", "0.6389662", "0.6383958", "0.6376176", "0.6372974", "0.6351167", "0.63497055", "0.6349427", "0.634177", "0.63411415", "0.63386554", "0.63384104", "0.6338214", "0.63337547", "0.63244337", "0.63222176", "0.63173616" ]
0.0
-1
Function allows for current user information modification. There is feature for change of default picture that is assigned during registration of new user. Part of change user picture is connected with save_image() function located in `utils.py` where name of original picture file is processing and then saved new_project_form in render_template() return is intentionally located here to allow for rendering tasks.new_project_2 function
def account(): form = UpdateAccountForm() new_project_form = ProjectForm() if form.validate_on_submit(): if form.picture.data: # if statement responsible for change of default picture picture_file = save_image(form.picture.data) current_user.img_file = picture_file current_user.user_name = form.user_name.data current_user.email = form.email.data db.session.commit() flash("Changes saved", "success") return redirect(url_for('users.account')) elif request.method == "GET": form.user_name.data = current_user.user_name form.email.data = current_user.email img_file = url_for('static', filename='images/' + current_user.img_file) return render_template('account.html', title="Account", form=form, img_file=img_file, new_project_form=new_project_form)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def account():\n\n form = UpdateUserForm()\n\n if form.validate_on_submit():\n print(form)\n if form.picture.data:\n username = current_user.username\n pic = add_profile_pic(form.picture.data,username)\n current_user.profile_image = pic\n\n current_user.username = form.username.data\n current_user.email = form.email.data\n db.session.commit()\n flash('User Account Updated')\n return redirect(url_for('users.account'))\n\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n\n profile_image = url_for('static', filename='profile_pics/' + current_user.profile_image)\n return render_template('account.html', profile_image=profile_image, form=form)", "def change_profile_img(self):\n get_photo = reddit_scrapper()\n get_photo.get_image()\n # Send image to instagram profile picture on the hidden input tag\n profile_pic_button = self.driver.find_elements_by_xpath(\n '//*[@id=\"react-root\"]/section/main/section/div[3]/div[1]/div[2]/form/input')[0].send_keys(os.getcwd() + '/daily_image/daily.jpg')\n\n time.sleep(1)\n save_profile_pic = self.driver.find_elements_by_xpath(\n '//button[contains(text(), \"Save\")]')[0].click()\n time.sleep(1)\n self.driver.get(base_url)", "def edit(request):\n if not request.user.is_authenticated():\n return redirect('/tasks/login/')\n args = {}\n args.update(csrf(request))\n images = ImageUpload.objects.all()\n args['user'] = auth.get_user(request)\n args['images'] = images\n data = Information.objects.get()\n form_information = InformationForm(instance=data)\n if request.method == 'POST' and request.FILES.get('photo') is None:\n form_post = InformationForm(request.POST, instance=data)\n if form_post.is_valid():\n form_post.save()\n return redirect('/')\n else:\n args['form'] = form_post\n elif request.method == 'POST' and request.FILES.get('photo') is not None:\n form_upload = ImageUploadForm(request.POST, request.FILES)\n if form_upload.is_valid():\n new_img = ImageUpload(photo=request.FILES['photo'])\n new_img.save()\n args['form'] = form_information\n args['success'] = 1\n else:\n args['form'] = form_information\n args['form_upload'] = form_upload\n else:\n args['form'] = form_information\n return render_to_response('tasks/edit.html', args, context_instance=RequestContext(request))", "def profile():\n\n if not g.user:\n flash(\"Access unauthorized.\", \"danger\")\n return redirect(\"/\")\n\n form = UserEditForm(obj=g.user)\n\n if form.validate_on_submit():\n if not User.authenticate(g.user.username, form.data[\"password\"]):\n flash(\"Invalid password.\", \"danger\")\n return render_template('/users/edit.html', form=form) \n # data = {k:v for k,v in form.data.items() if k != \"csrf_token\"}\n # data[\"image_url\"] = data[\"image_url\"] or None\n # data[\"header_image_url\"] = data[\"header_image_url\"] or None\n\n g.user.username = form.data[\"username\"]\n g.user.email = form.data[\"email\"]\n g.user.image_url = form.data[\"image_url\"] or None\n g.user.header_image_url = form.data[\"header_image_url\"] or None\n g.user.bio = form.data[\"bio\"]\n\n db.session.commit()\n\n flash(\"Profile edited!\", \"success\")\n return redirect(f'/users/{g.user.id}')\n\n return render_template('/users/edit.html', form=form)", "def profile(request, info=\"\", error_msg=\"\", messages=\"\"):\r\n try:\r\n user = _validate_and_get_geniuser(request)\r\n except LoggedInButFailedGetGeniUserError:\r\n return _show_failed_get_geniuser_page(request)\r\n\r\n email_form = forms.gen_edit_user_form(instance=user)\r\n affiliation_form = forms.gen_edit_user_form(instance=user)\r\n password_form = forms.EditUserPasswordForm()\r\n\r\n if request.method == 'POST':\r\n if 'affiliation' in request.POST:\r\n affiliation_form = forms.gen_edit_user_form(('affiliation',), request.POST, instance=user)\r\n if affiliation_form.is_valid():\r\n new_affiliation = affiliation_form.cleaned_data['affiliation']\r\n interface.change_user_affiliation(user, new_affiliation)\r\n info =\"Affiliation has been successfully changed to %s.\" % (user.affiliation)\r\n elif 'email' in request.POST:\r\n email_form = forms.gen_edit_user_form(('email',), request.POST, instance=user)\r\n if email_form.is_valid():\r\n new_email = email_form.cleaned_data['email']\r\n interface.change_user_email(user, new_email)\r\n info =\"Email has been successfully changed to %s.\" % (user.email)\r\n elif 'password1' in request.POST:\r\n password_form = forms.EditUserPasswordForm( request.POST, instance=user)\r\n if password_form.is_valid():\r\n new_password = password_form.cleaned_data['password1']\r\n interface.change_user_password(user, new_password)\r\n info =\"Password has been successfully changed\"\r\n\r\n username = user.username\r\n affiliation = user.affiliation\r\n email = user.email\r\n port = user.usable_vessel_port\r\n has_privkey = user.user_privkey != None\r\n #currently not used, needed if editing user port is allowed\r\n #port_range = interface.get_useable_ports()\r\n #port_range_min = port_range[0]\r\n #port_range_max = port_range[-1]\r\n\r\n return render_to_response('control/profile.html',\r\n {'email_form' : email_form,\r\n 'affiliation_form' : affiliation_form,\r\n 'password_form' : password_form,\r\n 'username' : username,\r\n 'affiliation' : affiliation,\r\n 'email' : email,\r\n 'port' : port,\r\n 'api_key' : user.api_key,\r\n 'has_privkey' : has_privkey,\r\n #'port_range_min' : port_range_min,\r\n #'port_range_max' : port_range_max,\r\n 'info' : info,\r\n 'error_msg' : error_msg,\r\n 'messages' : messages},\r\n context_instance=RequestContext(request))", "def form_valid(self, form):\n User.objects.filter(username=self.object).update(\n user_image =form.cleaned_data['user_image'],\n )\n myfile = self.request.FILES['user_image']\n fs = FileSystemStorage()\n filename = fs.save(myfile.name, myfile)\n messages.success(self.request, 'Image uploaded successfully')\n return super().form_valid(form)", "def post(self, request, *args, **kwargs):\n user_prof = UserProfile.objects.get(user=request.user)\n form = AboutFunderForm(request.POST, request.FILES)\n if form.is_valid():\n name = form.cleaned_data['name']\n content = form.cleaned_data.get('content')\n funder_or_adviser = form.cleaned_data.get('funder_or_adviser')\n x = form.cleaned_data.get('x')\n y = form.cleaned_data.get('y')\n w = form.cleaned_data.get('width')\n h = form.cleaned_data.get('height')\n image = form.cleaned_data.get('image')\n # ATTENTION! change email if you ever want to allow more users to be able to edit funders or contributors.\n if user_prof.user.email == \"relevate@outlook.com\":\n new_about_person = AboutPerson(\n name=name,\n content=content,\n image=image,\n funder_or_adviser=funder_or_adviser\n )\n new_about_person.save()\n # If user inputs image file instead of url\n if image:\n # Gets the original image to be cropped\n photo = Image.open(form.cleaned_data.get('image'))\n # Cropps the image using values x,y,w,and h from the form\n cropped_image = photo.crop((x, y, w + x, h + y))\n # Splits the file name and the extension\n filename, file_extension = os.path.splitext(\n os.path.basename(urlparse(new_about_person.image.url).path))\n cropped_image.save(settings.BASE_DIR + \"/media/about_person/image/\" + filename + file_extension)\n print(filename)\n print(file_extension)\n print(settings.BASE_DIR + \"/media/about_person/image/\" + filename + file_extension)\n new_about_person.image = \"about_person/image/\" + filename + file_extension\n print(new_about_person.image)\n new_about_person.save()\n messages.success(request, \"Funder or Advisor Was Successfully Added!\")\n return HttpResponseRedirect(reverse_lazy('contribution:about'))\n else:\n return HttpResponseRedirect(reverse_lazy(\"contribution:home\"))\n else:\n print(\"Invalid\")\n display_error(form, request)\n return render(request, 'about_create.html',\n {\n 'form': form,\n 'user_prof': user_prof,\n })", "def update_profile(name):\r\n user = User.query.filter_by(name=name).first()\r\n if not user:\r\n return abort(404)\r\n if current_user.id != user.id:\r\n return abort(403)\r\n show_passwd_form = True\r\n if user.twitter_user_id or user.google_user_id or user.facebook_user_id:\r\n show_passwd_form = False\r\n usr, apps, apps_created = cached_users.get_user_summary(name)\r\n # Extend the values\r\n current_user.rank = usr.get('rank')\r\n current_user.score = usr.get('score')\r\n # Title page\r\n title_msg = \"Update your profile: %s\" % current_user.fullname\r\n # Creation of forms\r\n update_form = UpdateProfileForm(obj=user)\r\n update_form.set_locales(current_app.config['LOCALES'])\r\n avatar_form = AvatarUploadForm()\r\n password_form = ChangePasswordForm()\r\n external_form = update_form\r\n\r\n\r\n if request.method == 'GET':\r\n return render_template('account/update.html',\r\n title=title_msg,\r\n user=usr,\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n show_passwd_form=show_passwd_form)\r\n else:\r\n # Update user avatar\r\n if request.form.get('btn') == 'Upload':\r\n avatar_form = AvatarUploadForm()\r\n if avatar_form.validate_on_submit():\r\n file = request.files['avatar']\r\n coordinates = (avatar_form.x1.data, avatar_form.y1.data,\r\n avatar_form.x2.data, avatar_form.y2.data)\r\n prefix = time.time()\r\n file.filename = \"%s_avatar.png\" % prefix\r\n container = \"user_%s\" % current_user.id\r\n uploader.upload_file(file,\r\n container=container,\r\n coordinates=coordinates)\r\n # Delete previous avatar from storage\r\n if current_user.info.get('avatar'):\r\n uploader.delete_file(current_user.info['avatar'], container)\r\n current_user.info = {'avatar': file.filename,\r\n 'container': container}\r\n db.session.commit()\r\n cached_users.delete_user_summary(current_user.name)\r\n flash(gettext('Your avatar has been updated! It may \\\r\n take some minutes to refresh...'), 'success')\r\n return redirect(url_for('.update_profile', name=current_user.name))\r\n else:\r\n flash(\"You have to provide an image file to update your avatar\",\r\n \"error\")\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n # Update user profile\r\n elif request.form.get('btn') == 'Profile':\r\n update_form = UpdateProfileForm()\r\n update_form.set_locales(current_app.config['LOCALES'])\r\n if update_form.validate():\r\n current_user.id = update_form.id.data\r\n current_user.fullname = update_form.fullname.data\r\n current_user.name = update_form.name.data\r\n current_user.email_addr = update_form.email_addr.data\r\n current_user.privacy_mode = update_form.privacy_mode.data\r\n current_user.locale = update_form.locale.data\r\n db.session.commit()\r\n cached_users.delete_user_summary(current_user.name)\r\n flash(gettext('Your profile has been updated!'), 'success')\r\n return redirect(url_for('.update_profile', name=current_user.name))\r\n else:\r\n flash(gettext('Please correct the errors'), 'error')\r\n title_msg = 'Update your profile: %s' % current_user.fullname\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n\r\n # Update user password\r\n elif request.form.get('btn') == 'Password':\r\n # Update the data because passing it in the constructor does not work\r\n update_form.name.data = user.name\r\n update_form.fullname.data = user.fullname\r\n update_form.email_addr.data = user.email_addr\r\n update_form.ckan_api.data = user.ckan_api\r\n external_form = update_form\r\n if password_form.validate_on_submit():\r\n user = db.session.query(model.user.User).get(current_user.id)\r\n if user.check_password(password_form.current_password.data):\r\n user.set_password(password_form.new_password.data)\r\n db.session.add(user)\r\n db.session.commit()\r\n flash(gettext('Yay, you changed your password succesfully!'),\r\n 'success')\r\n return redirect(url_for('.update_profile', name=name))\r\n else:\r\n msg = gettext(\"Your current password doesn't match the \"\r\n \"one in our records\")\r\n flash(msg, 'error')\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n else:\r\n flash(gettext('Please correct the errors'), 'error')\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n # Update user external services\r\n elif request.form.get('btn') == 'External':\r\n del external_form.locale\r\n del external_form.email_addr\r\n del external_form.fullname\r\n del external_form.name\r\n if external_form.validate():\r\n current_user.ckan_api = external_form.ckan_api.data or None\r\n db.session.commit()\r\n cached_users.delete_user_summary(current_user.name)\r\n flash(gettext('Your profile has been updated!'), 'success')\r\n return redirect(url_for('.update_profile', name=current_user.name))\r\n else:\r\n flash(gettext('Please correct the errors'), 'error')\r\n title_msg = 'Update your profile: %s' % current_user.fullname\r\n return render_template('/account/update.html',\r\n form=update_form,\r\n upload_form=avatar_form,\r\n password_form=password_form,\r\n external_form=external_form,\r\n title=title_msg,\r\n show_passwd_form=show_passwd_form)\r\n # Otherwise return 415\r\n else:\r\n return abort(415)", "def edit_user():\n if CURR_USER_KEY in session:\n user = g.user\n form = ProfileEditForm(obj=user)\n\n if form.validate_on_submit():\n user.first_name = form.first_name.data\n user.last_name = form.last_name.data\n user.description = form.description.data\n user.email = form.email.data\n user.image_url = form.image_url.data or \"/static/images/default-pic.png\"\n\n db.session.commit()\n\n flash(\"Profile edited.\")\n return redirect(\"/profile\")\n\n return render_template('/profile/edit-form.html', form=form)\n else:\n return redirect('/login')", "def edit_user_information():\n session_id = request.args.get('session-id', None)\n old_username = request.args.get('user-id', None)\n user = get_user_by_id(old_username)\n if request.method == 'POST':\n surname = request.form['surname']\n name = request.form['name']\n birthdate = request.form['birthdate']\n new_username = request.form['username']\n today = datetime.date.today()\n reservations_list = get_user_reservations_list(old_username)\n cars_reservations_list = get_cars_user_reservations_list(reservations_list)\n reservations_status_list = get_reservations_status_list(reservations_list)\n if check_authentication(session_id, old_username):\n are_changes_valid = edit_user_info(name, surname, birthdate, old_username, new_username)\n else:\n return render_template('home.html', cars_list=get_cars_preview(), news_list=get_news_list(), authjs=False,\n preview_length=get_cars_preview().__len__(), del_session_cookie=True)\n if are_changes_valid == \"OK\":\n edit_session(session_id, new_username)\n return render_template('user_area.html', user=new_username, session_id=session_id, edit_mode=False,\n surname=surname, name=name, birthdate=birthdate, today=today,\n reservations_list=reservations_list, cars_reservations_list=cars_reservations_list,\n reservations_status_list=reservations_status_list)\n else:\n return render_template('user_area.html', user=user.id, session_id=session_id, edit_mode=True,\n surname=user.surname, name=user.name, birthdate=user.birthdate,\n feedback_msg=are_changes_valid, today=today,\n reservations_list=reservations_list, cars_reservations_list=cars_reservations_list,\n reservations_status_list=reservations_status_list)", "def makeProfile(request):\n upr = UserProfile()\n upr.user = request.user\n upr.image = \"images/no-pic.png\"\n upr.save()", "def _populate_user_and_project(self, template_dictionary, escape_db_operations=False):\n logged_user = get_logged_user()\n template_dictionary[KEY_USER] = logged_user\n show_help = logged_user is not None and logged_user.is_online_help_active()\n template_dictionary[KEY_SHOW_ONLINE_HELP] = show_help\n\n project = get_current_project()\n template_dictionary[KEY_PROJECT] = project\n if project is not None and not escape_db_operations:\n self.update_operations_count()\n return template_dictionary", "def _add_profile_image(self):\r\n self.profile_image_is_set = True\r\n file_name = filedialog.askopenfilename(initialdir=\"/\", title=self.language.refactor(\"Select GIF file\"),\r\n filetypes=((\"GIF files\", \"*.gif\"),))\r\n if file_name == '':\r\n self.new_user_window.lift()\r\n return\r\n\r\n self.add_profile_gif_button.destroy()\r\n gif_canvas = Ctk.CCanvas(self.new_user_window, corners='angular', size=(180, 180),\r\n bg=self.new_user_window['background'])\r\n gif_canvas.create_gif(gif_path=file_name, corner='round', size=(175, 175), pos=(90, 90),\r\n transparent=True, speed='normal')\r\n gif_canvas.place(*(15, 50))\r\n\r\n self.gif_file_path = file_name\r\n\r\n self.new_user_window.lift()", "def user_edit(user_id):\n\n if not g.user:\n return _get_json_message(\n INVALID_CREDENTIALS_MSG,\n INVALID_CREDENTIALS_STATUS_CODE)\n\n current_user = User.query.get_or_404(user_id)\n received = request.form\n file = request.files.get(\"image_url\")\n form = UserEditForm(csrf_enabled=False, data=received)\n\n if form.validate_on_submit():\n if not User.authenticate(g.user.username, form.password.data):\n return _get_json_message(\n \"unable-to-update-user\",\n INVALID_CREDENTIALS_STATUS_CODE)\n\n try:\n # update non image_url fields\n current_user.email = form.email.data\n current_user.first_name = form.first_name.data,\n current_user.last_name = form.last_name.data,\n current_user.hobbies = form.hobbies.data,\n current_user.interests = form.interests.data,\n current_user.zip_code = form.zip_code.data,\n current_user.friend_radius_miles = form.friend_radius_miles.data\n\n current_user.coordinates = User.get_coords(form.zip_code.data)\n\n # update image_url with uploaded file\n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n url = upload_file_obj(file, S3_BUCKET, filename)\n\n current_user.image_url = url\n\n db.session.commit()\n\n return jsonify(user=current_user.serialize())\n except ClientError as e:\n print(e)\n return _get_json_message(\n \"image-upload-failed\",\n INVALID_CREDENTIALS_STATUS_CODE)\n\n return _get_json_message(\n \"unable-to-update-user\",\n INVALID_CREDENTIALS_STATUS_CODE)", "def update_picture(self, username, picture):\n self.update(('Picture', picture), username)", "def profile_pic(self, client_file_storage):\n\n # If we already have a profile picture, remove it\n if self.profile_pic_filename:\n filepath = os.path.join(\n current_app.config['UPLOADED_IMAGES_DEST'],\n self.profile_pic_filename)\n os.remove(filepath)\n self.profile_pic_filename = None\n self.profile_pic_url = None\n\n # This uploads & saves the file on the server\n # NOTE: It uses the secure_filename function...\n server_filename = images.save(client_file_storage)\n\n # Generate the URL to this file\n url = images.url(server_filename)\n\n # Store information with the user\n self.profile_pic_filename = server_filename\n self.profile_pic_url = url", "def account():\n \n form = UpdateAccountForm()\n \n # perform actions when the form is submitted\n if form.validate_on_submit():\n # checking if the form contains a picture file\n if form.picture.data:\n picture_file = save_picture(form.picture.data)\n current_user.image_file = picture_file\n # changing the current user details with the form data\n current_user.username = form.username.data\n current_user.email = form.email.data\n db.session.commit()\n flash('Your account has been updated!', 'success')\n return redirect(url_for('account'))\n # performs action if the form method is get\n elif request.method == 'GET':\n # setting the form data with the user data from the database\n form.username.data = current_user.username\n form.email.data = current_user.email\n image_file = url_for('static', filename='profile_pics/' + current_user.image_file)\n return render_template('account.html', title='Account',\n image_file=image_file, form=form)", "def add_profile_photo():\n pass", "def profile():\n\n form = EditUserForm(obj=g.user)\n\n if form.validate_on_submit():\n if User.authenticate(g.user.username, form.password.data):\n g.user.username = form.username.data\n g.user.email = form.email.data\n g.user.image_url = form.image_url.data\n g.user.header_image_url = form.header_image_url.data\n g.user.bio = form.bio.data\n g.user.private = form.private.data\n db.session.commit()\n return redirect(f'/users/{g.user.id}')\n flash('Incorrect password', 'danger')\n return render_template('users/edit.html', user_id=g.user.id, form=form)", "def generate_profile(request, pk=0):\n context = {}\n extra_dock = int(request.POST.get('extra_dock', 0))\n extra_firewall = int(request.POST.get('extra_firewall', 0))\n config = ConfigurationProfile.objects.filter(pk=pk).first()\n edit_mode = False\n if config is not None:\n edit_mode = True\n if request.method == 'POST':\n form = ProfileForm(request.POST, extra_dock=extra_dock, extra_firewall=extra_firewall, edit_mode=edit_mode)\n if form.is_valid() and request.POST['save'] != \"+ Add App\" and request.POST['save'] != \"Add App\":\n context['data'] = form.cleaned_data\n context['password'] = 'Nice Try!'\n context['payloads'] = get_payloads(request.POST)\n context['data']['static_apps'] = dock_app_list(context['data'])\n context['data']['firewall_apps'] = fw_app_list(context['data'])\n\n # If removal date, convert to string\n if context['data']['removal_date'] is not None:\n context['data']['removal_date'] = context['data']['removal_date'].strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n\n # Generate UUIDs for the payloads\n if not edit_mode:\n context['identifiers'] = generate_ids()\n else:\n profile_data = open(config.profile)\n data = json.load(profile_data)\n profile_data.close()\n context['identifiers'] = data['identifiers']\n\n # Save to file\n display_name = request.POST.get('display_name')\n filename = request.POST.get('filename')\n path = os.path.join(settings.MEDIA_ROOT, 'profiles', '{}.json'.format(filename))\n with open(path, 'w') as profile:\n profile.write(json.dumps(context))\n\n new_profile, created = ConfigurationProfile.objects.get_or_create(\n name=display_name,\n profile=os.path.join(settings.MEDIA_ROOT, 'profiles', '{}.json'.format(filename))\n )\n new_profile.scope = context['data']['scope']\n new_profile.save()\n\n # If 'Save and Redeploy' selected, configure MDM to update all previously installed copies as well\n if request.POST['save'] == 'Save and Redeploy':\n laptops = Laptop.objects.all().filter(mdm_enrolled=True, retired=False, installed__in=[new_profile])\n for laptop in laptops:\n laptop.installed.remove(new_profile)\n laptop.pending.add(new_profile)\n\n template = loader.get_template('default.html')\n return HttpResponse(template.render({\n 'title': \"Success!\",\n 'message': \"Your new configuration profile has been generated successfully! It is now available for \"\n \"download through the MDM.\",\n 'NO_FOOT': True,\n 'EXIT_BTN': True,\n 'EXIT_URL': reverse(\"mdm:list\")\n }, request))\n else:\n if request.POST['save'] == \"+ Add App\":\n extra_dock += 1\n elif request.POST['save'] == \"Add App\":\n extra_firewall += 1\n context['form'] = ProfileForm(request.POST, extra_dock=extra_dock, extra_firewall=extra_firewall,\n edit_mode=edit_mode)\n else:\n if edit_mode:\n profile_data = open(config.profile)\n file_data = json.load(profile_data)\n if file_data['data']['removal_date'] is not None:\n file_data['data']['removal_date'] = timezone.make_aware(\n datetime.datetime.strptime(file_data['data']['removal_date'], '%Y-%m-%dT%H:%M:%SZ'))\n profile_data.close()\n form = ProfileForm(None, initial=file_data['data'], extra_dock=file_data['data']['extra_dock'],\n extra_firewall=file_data['data']['extra_firewall'], edit_mode=True)\n else:\n identifier = str(uuid.uuid4())\n filename = \"profile-{}\".format(identifier[0:8])\n form = ProfileForm(initial={'filename': filename}, extra_dock=extra_dock, extra_firewall=extra_firewall,\n edit_mode=False)\n context['form'] = form\n\n # Ensure the automatic profile removal options are hidden if not being utilized\n context['custom_script'] = \"$(document).ready(function (){$('#id_auto_remove').change(function (){\" \\\n \"if (this.value == 'default') {$('#div_id_removal_date').hide();\" \\\n \"$('#div_id_removal_period').hide();}else{$('#div_id_removal_date').show();\" \\\n \"$('#div_id_removal_period').show();}});$('#id_auto_remove').change();});\"\n context['msg'] = \"Manage Configuration Profile\"\n return render(request, 'form_crispy.html', context)", "def userProfile(userid):\n images = get_uploaded_images()\n record = UserProfile.query.filter_by(id=userid).first()\n return render_template('userProfile.html', images=images, record =record)", "def my_team(request):\n template = loader.get_template('team/my_team.html')\n team = request.user.profile.team\n\n if team is not None:\n team_members = User.objects.filter(profile__team=team)\n\n context = {\n 'team_name': team.name,\n 'team_members': team_members,\n 'team_logo': team.logo,\n 'team_info': team.information\n }\n if request.POST.get('save'):\n if request.POST.get('new_name') != '':\n new_name = request.POST.get('new_name')\n team.name = new_name\n if 'logo_image' in request.FILES:\n team.logo = request.FILES['logo_image']\n new_info = request.POST.get('new_info')\n team.information = new_info\n team.save()\n\n context['team_name'] = team.name\n context['team_info'] = team.information\n context['team_logo'] = team.logo\n\n if request.POST.get('save_name'):\n new_name = request.POST.get('new_name')\n team.name = new_name\n team.save()\n context['team_name'] = team.name\n\n if request.POST.get('save_info'):\n new_info = request.POST.get('new_info')\n team.information = new_info\n team.save()\n context['team_info'] = team.information\n\n if request.POST.get('save_logo'):\n team.logo = request.FILES['logo_image']\n team.save()\n context['team_logo'] = team.logo\n\n if request.POST.get('leave_team'):\n request.user.profile.team = None\n request.user.profile.save()\n context = None\n return redirect('/')\n\n return CustomHttpResponse.send(template, context, request)\n\n else:\n return redirect('/team/new')", "def select_default_picture(sender, instance, **kwargs):\n if not instance.id:\n instance.picture = \"/static/user%s.png\"%(\"F\" if instance.female else \"M\")", "def account_view(request):\n \"\"\"if request.user.is_authenticated:\n form = None\n\n # TODO Objective 3: Create Forms and Handle POST to Update UserInfo / Password\n\n user_info = models.UserInfo.objects.get(user=request.user)\n context = { 'user_info' : user_info,\n 'form' : form }\n return render(request,'account.djhtml',context)\n request.session['failed'] = True\n return redirect('login:login_view')\n \"\"\"\n\n if request.user.is_authenticated:\n form = None\n # TODO Objective 3: Create Forms and Handle POST to Update UserInfo / Password\n existingUserInfo = models.UserInfo.objects.get(user=request.user)\n print(\"existingUserInfo:----------\",existingUserInfo.location)\n if request.method == 'POST':\n formName = request.POST.get('name')\n print(\"-------formName:\" + formName);\n\n if (formName == 'pwdForm'):\n password = request.POST['password']\n if password is not None and password != \"\":\n user = get_user(request)\n user.set_password(password)\n user.save()\n return redirect('login:login_view')\n else:\n request.user.employment = request.POST['employment']\n request.user.location = request.POST['location']\n request.user.birthday = request.POST['birthday']\n request.user.interests = request.POST['interests']\n inter = models.Interest(label=request.POST['interests'])\n inter.save()\n request.user.save()\n\n if request.POST['employment'] != '':\n existingUserInfo.employment = request.user.employment\n\n\n if request.POST['location'] != '':\n existingUserInfo.location = request.user.location\n\n if request.POST['birthday'] != \"\":\n existingUserInfo.birthday = request.user.birthday\n elif existingUserInfo.birthday==None:\n # existingUserInfo.birthday = datetime.strptime(str(existingUserInfo.birthday), '%Y-%m-%d')\n existingUserInfo.birthday = None\n\n if request.POST['interests'] != \"\" and request.POST['interests'] is not None:\n inter = models.Interest(label=request.POST['interests'])\n inter.save()\n existingUserInfo.interests.add(inter)\n\n existingUserInfo.save()\n\n\n context = {'user_info': existingUserInfo,\n 'login_form': form}\n return render(request, 'account.djhtml', context)\n request.session['failed'] = True\n return redirect('login:login_view')", "def modify_by_user():\n\n user_form = UserForm(request.form)\n\n user_form.username.data = current_user.username\n user_form.email.data = current_user.email\n user_form.password.data = current_user.password\n user_form.first_name.data = current_user.first_name\n user_form.last_name.data = current_user.last_name\n\n\n if user_form.validate_on_submit():\n\n if not request.form['username'] or request.form['username'] == '' :\n flash(\"No null or empty values are allowed.\",\"warn\")\n return render_template('user/modify_by_user.html', title='Modify Profile',\n user_form=user_form)\n if not request.form['email'] or request.form['email'] == '' :\n flash(\"No null or empty values are allowed.\",\"warn\")\n return render_template('user/modify_by_user.html', title='Modify Profile',\n user_form=user_form)\n if not request.form['password'] or request.form['password'] == '' :\n flash(\"No null or empty values are allowed.\",\"warn\")\n return render_template('user/modify_by_user.html', title='Modify Profile',\n user_form=user_form)\n if request.form['password'] != request.form['retype_password']:\n flash(\"Passwords are not the same!\",\"warn\")\n return render_template('user/modify_by_user.html', title='Modify Profile',\n user_form=user_form)\n\n\n hashed_password = user_manager.hash_password(request.form['password'])\n\n current_user.username = request.form['username']\n current_user.email = request.form['email']\n current_user.password = hashed_password\n current_user.first_name = request.form['first_name']\n current_user.last_name = request.form['last_name']\n current_user.confirmed_at = datetime.datetime.utcnow()\n\n try:\n correct = True\n db.session.commit()\n except Exception as e:\n # Catch anything unknown\n print(e)\n correct = False\n finally:\n if not correct:\n # Cleanup and show error\n db.session.rollback()\n flash('Error modifying user, make sure username and email are unique','error')\n return render_template('user/modify_by_user.html', title='Modify Profile',\n user_form=user_form)\n else:\n flash('The user was successfully modified.','success')\n return redirect(url_for('user_ksat.show_user'))\n\n return render_template('user/modify_by_user.html', title='Modify Profile',user_form=user_form)", "def upload_project(request):\n current_user = request.user\n current_user_name = current_user.username\n # project_ratings=Rating.objects.filter(id=project_id)\n if request.method == 'POST':\n form = ProjectForm(request.POST, request.FILES)\n if form.is_valid():\n project_post = form.save(commit=True) \n else:\n raise Http404 \n \n return redirect(view_projects)\n else: \n project_form=ProjectForm()\n \n return render(request, 'upload_project.html', {'project_form':project_form})", "def map(item):\n if item.deleted or item.profile_picture_data_url is not None:\n return\n\n user_services.generate_initial_profile_picture(item.id)", "def save(self, *args, **kwargs):\n c_d = self.cleaned_data\n if c_d.get('id') and c_d.get('avatar') and (\n isinstance(c_d.get('avatar'), UploadedFile)):\n person = get_object_or_404(Person, id=c_d.get('id'))\n try:\n old_avatar = person.avatar.file.name\n except ValueError:\n old_avatar = None\n person = super(PersonForm, self).save(*args, **kwargs)\n user = person.user\n user.username = c_d['username']\n user.first_name = c_d['first_name']\n user.last_name = c_d['last_name']\n user.email = c_d['email_address']\n pass1 = c_d.get('new_password')\n if pass1:\n user.set_password(pass1)\n user.save()\n if isinstance(c_d.get('avatar'), UploadedFile):\n os.remove(self.cleaned_data['avatar'].file.name)\n if old_avatar:\n os.remove(old_avatar)\n return person", "def edit_user_profile(request):\n user = request.user\n user_profile = UserProfile.objects.filter(user=user)[0]\n if request.method == 'POST':\n form = MemberProfileForm(request.POST)\n additional_form = MemberAdditionalProfileForm(request.POST)\n if form.is_valid() and additional_form.is_valid():\n cd = form.cleaned_data\n user.first_name = cd['first_name']\n user.last_name = cd['last_name']\n user.email = cd['email']\n user.save()\n if 'picture' in request.FILES:\n file = request.FILES['picture']\n user_profile.picture.save(file.name, file, save=True)\n user_profile.gravatar = additional_form.cleaned_data['gravatar']\n user_profile.save()\n return HttpResponseRedirect('/')\n else:\n form = MemberProfileForm(instance=request.user)\n additional_form = MemberAdditionalProfileForm(instance=user_profile)\n return render_to_response('edit_profile.html', locals())", "def set_user_profile_picture(user_id, file_name):\n\n user = User.query.get(user_id)\n \n user.profile_picture = file_name\n db.session.commit()", "def update_user_service(user: User, username: str, email: str, image_file: str = None) -> None:\n user.username = username\n user.email = email\n\n if image_file:\n user.image_file = image_file\n\n db.session.commit()", "def make_user_copy(module_name, user):\n\tstandard_name = frappe.db.get_value(\"Desktop Icon\", {\"module_name\": module_name, \"standard\": 1})\n\n\tif not standard_name:\n\t\tfrappe.throw(_(\"{0} not found\").format(module_name), frappe.DoesNotExistError)\n\n\toriginal = frappe.get_doc(\"Desktop Icon\", standard_name)\n\n\tdesktop_icon = frappe.get_doc(\n\t\t{\"doctype\": \"Desktop Icon\", \"standard\": 0, \"owner\": user, \"module_name\": module_name}\n\t)\n\n\tfor key in (\n\t\t\"app\",\n\t\t\"label\",\n\t\t\"route\",\n\t\t\"type\",\n\t\t\"_doctype\",\n\t\t\"idx\",\n\t\t\"reverse\",\n\t\t\"force_show\",\n\t\t\"link\",\n\t\t\"icon\",\n\t\t\"color\",\n\t):\n\t\tif original.get(key):\n\t\t\tdesktop_icon.set(key, original.get(key))\n\n\tdesktop_icon.insert(ignore_permissions=True)\n\n\treturn desktop_icon", "def insert_account_image():\n \n if 'username' in session:\n \n if request.method == 'POST' and 'image' in request.files:\n \n if \"filesize\" in request.cookies:\n \n if not allowed_image_filesize(request.cookies[\"filesize\"]):\n flash(f'Exceeds file size limit of 5MB', 'warning')\n return redirect(url_for('add_account_image'))\n \n image = request.files['image']\n \n if image.filename == '':\n flash('Your image is missing a filename', 'warning')\n return redirect(url_for('add_account_image'))\n \n if not allowed_image(image.filename):\n flash('Supported file types are \"png\", \"jpg\" or \"jpeg\"', 'warning')\n return redirect(url_for('add_account_image'))\n else:\n filename = secure_filename(image.filename)\n username = session['username']\n filename = f'{username}.jpg'\n \n s3_resource = boto3.resource('s3')\n bucket = s3_resource.Bucket(S3_BUCKET)\n bucket.Object(filename).put(Body=image)\n \n profile_image = f'{username}.jpg'\n \n user = mongo.db.user.find_one({'username': username})\n user_id = user['_id']\n \n mongo.db.user.find_one_and_update({'_id': ObjectId(user_id)},\n {'$set':\n {'profile_image': profile_image\n }\n }\n )\n flash(f'Your profile image has been updated to {image.filename}.', 'success')\n return redirect(url_for('dashboard'))\n \n flash('Something has gone wrong, please try again when you next login', 'info')\n return redirect(url_for('dashboard'))\n \n flash('You need to be logged in to access account settings.', 'warning')\n return redirect(url_for('login'))", "def save_profile_picture(current_user, url):\n # save profile_pic\n current_user.profile_pic = url\n current_user.save(current_user)", "def save(self, *args, **kwargs):\n\n kwargs[\"commit\"] = False\n user = super(JOSProfileForm, self).save(*args, **kwargs)\n\n try:\n profile = get_profile_for_user(user)\n profile_form = self.get_profile_fields_form()\n profile_form(self.data, self.files, instance=profile).save()\n except ProfileNotConfigured:\n pass\n\n return user", "def update_project_data(project_name):\n project_path = context.__PROJECTS_PATH__+ '/' + project_name\n f = open(project_path+'/.project', 'r')\n project_data = json.load(f)\n f.close()\n\n image_count = len(os.listdir(project_path)) - 2\n\n if image_count > 0:\n\n img = Image.open('{}/{}.jpg'.format(project_path, image_count-1))\n img = img.resize((640,480))\n buffered = BytesIO()\n img.save(buffered, format=\"JPEG\")\n img_str = base64.b64encode(buffered.getvalue()).decode('ascii')\n\n project_data['preview_data'] = img_str\n project_data['size'] = round(int(subprocess.check_output(['du', project_path, '-k']).split()[0]) / 1000,2)\n\n with open('{}/.project'.format(project_path), 'w') as config_file:\n json.dump(project_data, config_file, indent=4)\n config_file.close()", "def manage_myprofile(request):\n profile = request.user.get_profile()\n users_image = profile.users_image\n if not profile:\n raise Http404\n if request.method == 'POST':\n profile_form = MyProfileForm(request.POST, instance = profile)\n address_contact_form = AddressForm(request.POST,\n instance = profile.address_contact, prefix = 'contact')\n address_permanent_form = AddressForm(request.POST,\n instance = profile.address_permanent, prefix = 'permanent')\n\n if profile_form.is_valid() and address_contact_form.is_valid() \\\n and address_permanent_form.is_valid():\n address_contact = address_contact_form.save()\n address_permanent = address_permanent_form.save()\n\n profile_form.save(address_contact = address_contact,\n address_permanent = address_permanent)\n messages.success(request,\n _('your profile details saved sucessfully'))\n else:\n profile_form = MyProfileForm(instance = profile)\n address_contact_form = AddressForm(instance = profile.address_contact,\n prefix = 'contact')\n address_permanent_form = AddressForm(instance\n = profile.address_permanent, prefix = 'permanent')\n\n return render(request, 'myprofile.html', {\n 'profile_form': profile_form,\n 'address_contact_form': address_contact_form,\n 'address_permanent_form': address_permanent_form,\n 'users_image': users_image\n },\n )# Create your views here.", "def save(self, **kwargs):\n self.remove_file()\n if not self.image:\n self.generate(save=False)\n else:\n self.image.name = self.file()\n super(FormatedPhoto, self).save(**kwargs)", "def process_user_edit_form(user_id):\n first_name = request.form.get('first_name')\n last_name = request.form.get('last_name')\n image_url = request.form.get('image_url')\n\n # can add value in edit_user.html to remove this\n user = User.query.get_or_404(user_id)\n\n user.first_name = first_name\n user.last_name = last_name\n user.image_url = image_url\n\n db.session.add(user)\n db.session.commit()\n\n flash(f'Edited user info for: {first_name} {last_name}')\n\n return redirect('/users')", "def edit_basic_info(request):\n if request.POST:\n request.user.first_name = request.POST['first_name']\n request.user.last_name = request.POST['last_name']\n request.user.email = request.POST['email']\n request.user.save()\n request.user.userprofile.phone_number = request.POST['phone']\n request.user.userprofile.save()\n messages.add_message(request, messages.SUCCESS, 'Your changes have been saved.')\n return redirect('base_dashboard')\n\n return render(request, 'edit_basic_info.html', {'the_user': request.user})", "def show_new_user_form():\r\n return render_template('user-form.html')", "def partial_update(self, request, pk=None):\n\n user_to_update = WhoYouUser.objects.get(pk=pk)\n\n requester = WhoYouUser.objects.get(user=request.auth.user)\n if requester != user_to_update:\n return Response({\"message\": \"Permission denied\"}, status=status.HTTP_401_UNAUTHORIZED)\n\n if \"profile_image_path\" in request.data:\n format, imgstr = request.data[\"profile_image_path\"].split(';base64,')\n ext = format.split('/')[-1]\n data = ContentFile(base64.b64decode(imgstr), name=f'{pk}-{uuid.uuid4()}.{ext}')\n user_to_update.profile_image_path = data\n\n if \"cover_image_path\" in request.data:\n user_to_update.cover_image_path = request.data[\"cover_image_path\"]\n\n user_to_update.save()\n\n return Response({}, status=status.HTTP_204_NO_CONTENT)", "def home_edituser():\n\tpass", "def personalInformationsUpdate(request):\n\n user_update_form = UserInformationUpdateForm(\n initial={\n 'username_update': request.user.username,\n 'email_update': request.user.email,\n }\n )\n\n if request.method == 'POST':\n completed_form = UserInformationUpdateForm(request.POST)\n if completed_form.is_valid():\n \"\"\"get new user data from form.\"\"\"\n new_user_data = {\n 'username': completed_form.cleaned_data.get('username_update'),\n 'email': completed_form.cleaned_data.get('email_update'),\n }\n \"\"\" ask manager to change datas \"\"\"\n messages_from_user_manager = user_manager.update_user_data(\n request.user, new_user_data\n )\n if messages_from_user_manager:\n for message in messages_from_user_manager:\n for level, content in message.items():\n messages.add_message(request, level, content)\n user_update_form = UserInformationUpdateForm(\n initial={\n 'username_update': request.user.username,\n 'email_update': request.user.email,\n }\n )\n\n template = 'personal_informations.html'\n user_update_form.fields['username_update'].disabled = True\n user_update_form.fields['email_update'].disabled = True\n\n else:\n template = 'personal_informations_update.html'\n\n context = {'user_update_form': user_update_form}\n return render(request, template, context)", "def newAvatarImage(self, imgPath, imgName): \n img = ocempgui.draw.Image.load_image(imgPath)\n if not self.images[imgName]: \n imgOcemp = guiobjects.OcempImageMapTransparent(img)\n imgOcemp.topleft = 528, 114\n self.window.add_child(imgOcemp)\n self.images[imgName] = imgOcemp\n else:\n self.images[imgName].picture = img", "def post(self, request, slug):\n user_prof = UserProfile.objects.get(user=request.user)\n about_person = AboutPerson.objects.get(slug=slug)\n form = AboutFunderForm(request.POST, request.FILES)\n if form.is_valid():\n x = form.cleaned_data.get('x')\n y = form.cleaned_data.get('y')\n w = form.cleaned_data.get('width')\n h = form.cleaned_data.get('height')\n image = form.cleaned_data.get('image')\n # ATTENTION! change email if you ever want to allow more users to be able to edit funders or contributors.\n if user_prof.user.email == \"relevate@outlook.com\":\n about_person.name = form.cleaned_data['name']\n about_person.content = form.cleaned_data.get('content')\n about_person.funder_or_adviser = form.cleaned_data.get('funder_or_adviser')\n # If user inputs image file instead of url\n if image:\n # Gets the original image to be cropped\n photo = Image.open(form.cleaned_data.get('image'))\n # Cropps the image using values x,y,w,and h from the form\n cropped_image = photo.crop((x, y, w + x, h + y))\n # Splits the file name and the extension\n filename, file_extension = os.path.splitext(\n os.path.basename(urlparse(about_person.image.url).path))\n cropped_image.save(settings.BASE_DIR + \"/media/about_person/image/\" + filename + file_extension)\n about_person.image = \"about_person/image/\" + filename + file_extension\n about_person.save()\n messages.success(request, \"Funder or Advisor Was Successfully Added!\")\n return HttpResponseRedirect(reverse_lazy('contribution:about'))\n else:\n return HttpResponseRedirect(reverse_lazy(\"contribution:home\"))\n else:\n print(\"Invalid\")\n display_error(form, request)\n return render(request, 'about_create.html',\n {\n 'form': form,\n 'user_prof': user_prof,\n })", "def save(self, *args, **kwargs):\n self.name = unique_slugify(self.name, instance=self)\n\n if self.is_personal and self.user.username != self.name:\n self.user.username = self.name\n self.user.save()\n\n if self.is_customer:\n self.update_customer()\n\n if not self.image:\n self.set_image_from_name(should_save=False)\n\n return super().save(*args, **kwargs)", "def update_user():", "def _form_valid(self, form):\n self.object = form.save(commit=False)\n # extract usergroups from form\n userGroups = self.request.POST.getlist('userGroups')\n userGroups = sorted([g for g in userGroups if g]) # filter empty strings\n # get telephone/roomNumber from form\n room = form.cleaned_data.get('room')\n if room:\n s = re.search('([\\w\\s]+\\w).*Tel:\\s*(\\d+)', room.pk)\n self.object.roomNumber = s.group(1)\n self.object.telephoneNumber = s.group(2)\n else:\n\tself.object.roomNumber = ''\n\tself.object.telephoneNumber = ''\n # get uploaded image\n # photos will be saved under MEDIA_ROOT/user_fullname/\n # MEDIA_ROOT is set in settings.py\n photo = self.request.FILES.get('photo', None)\n if photo:\n # save on filesystem and rename existing one, so it is backed up\n path = getPhotoPath(self.object, self.object.photo.path)\n absPath = settings.MEDIA_ROOT + path\n if os.path.exists(absPath):\n os.rename(absPath, absPath + time.strftime(\"-%Y%m%d-%H%M%S\"))\n self.object.photo.save(path, photo, save=False)\n # put photo into ldap (jpegPhoto attribute\n photo.open()\n self.object.jpegPhoto = photo.read()\n photo.close()\n # build and save wanted thumbnails of photo on filesystem\n fileName, fileExtension = os.path.splitext(absPath)\n image = Image.open(absPath)\n imagefit = ImageOps.fit(image, (640, 512), Image.ANTIALIAS)\n imagefit.save(fileName + '-640x512.jpg', 'JPEG', quality=75)\n imagefit = ImageOps.fit(image, (200, 200), Image.ANTIALIAS)\n imagefit.save(fileName + '-200x200.jpg', 'JPEG', quality=75)\n # if password was changed save user with new usergroups and new password\n if len(form.cleaned_data.get('userPassword1')) > 0:\n self.object.save(userGroups=userGroups, password=form.cleaned_data.get('userPassword1'))\n # else call save() with new usergroups only\n else:\n self.object.save(userGroups=userGroups)\n return redirect(self.get_success_url())", "def upload_priviledge(request):\n try:\n check = UserProfile.objects.filter(contributor = 1)\n uncheck = UserProfile.objects.filter(contributor = 0)\n \n i = UserProfile.objects.filter(contributor=1).count()\n k = UserProfile.objects.filter(contributor = 0).count()\n \n j = 0\n while j < i:\n c = check[j]\n usr = User.objects.get(username=c.user)\n perm_id = Permission.objects.get(codename = 'add_task')\n if usr.has_perm('translation.add_task'):\n pass\n else:\n usr.user_permissions.add(perm_id)\n usr.save()\n j += 1\n j = 0\n while j < k:\n u = uncheck[j]\n usr = User.objects.get(username=u.user)\n if not usr.has_perm('translation.add_task'):\n pass\n else:\n usr.user_permissions.remove(perm_id)\n usr.save()\n j += 1\n \n data = {'msg':''}\n messages.success(request, \"User's upload priviledge updated successfully.\")\n return render_to_response('my_admin_tools/menu/background_task.html',data,context_instance=RequestContext(request))\n except:\n msg = traceback.format_exc()\n data = {'msg':msg}\n messages.error(request, \"Update user's upload priviledge failed.\")\n return render_to_response('my_admin_tools/menu/background_task.html',data,context_instance=RequestContext(request))", "def make_new_user():\n return render_template('users/new_user_form.html')", "def profile_update(request):\n LOGGER.info('Rendering WMT16 profile update view.')\n errors = None\n project_choices = Project.objects.all().values_list('name', flat=True).order_by('id')\n project_status = set()\n languages = set()\n \n focus_input = 'id_projects'\n \n if request.method == \"POST\":\n projects = request.POST.getlist('projects', None)\n languages = set(request.POST.getlist('languages', None))\n \n LOGGER.debug(projects)\n LOGGER.debug(languages)\n \n if projects and languages:\n try:\n # Update set of projects for this user.\n for project_name in projects:\n project_instance = Project.objects.filter(name=project_name)\n if project_instance.exists():\n project_instance[0].users.add(request.user)\n \n # Compute set of evaluation languages for this user.\n target_language_codes = set([x[0][3:] for x in LANGUAGE_PAIR_CHOICES])\n LOGGER.debug('Language codes: {0}'.format(target_language_codes))\n eval_groups = []\n for eval_language in target_language_codes:\n if eval_language in languages:\n eng2xyz = Group.objects.filter(name__endswith=eval_language)\n if eng2xyz.exists():\n eval_groups.extend(eng2xyz)\n\n # Also, add user to WMT16 group.\n wmt16_group = Group.objects.filter(name='WMT16')\n if wmt16_group.exists():\n eval_groups.append(wmt16_group[0])\n\n LOGGER.debug('Evaluation languages: {0}'.format(eval_groups))\n \n # Update group settings for the new user account.\n for eval_group in eval_groups:\n eval_group.user_set.add(request.user)\n \n # Redirect to WMT16 overview page.\n return redirect('appraise.wmt16.views.overview')\n \n # For any other exception, clean up and ask user to retry.\n except:\n from traceback import format_exc\n LOGGER.debug(format_exc())\n \n project_choices = Project.objects.all().values_list('name', flat=True).order_by('id')\n project_status = set()\n languages = set()\n \n # Detect which input should get focus for next page rendering.\n if not projects:\n focus_input = 'id_projects'\n errors = ['invalid_projects']\n elif not languages:\n focus_input = 'id_languages'\n errors = ['invalid_languages']\n \n # Determine user annotation projects\n for project in Project.objects.all():\n if request.user in project.users.all():\n project_status.add(project.name)\n \n # Determine user target languages\n for group in request.user.groups.all():\n if 'eng2' in group.name or '2eng' in group.name:\n languages.add(group.name[3:])\n \n context = {\n 'active_page': \"OVERVIEW\",\n 'errors': errors,\n 'focus_input': focus_input,\n 'project_choices': project_choices,\n 'project_status': project_status,\n 'languages': languages,\n 'title': 'WMT16 profile update',\n }\n context.update(BASE_CONTEXT)\n \n return render(request, 'wmt16/profile_update.html', context)", "def change_info(self):\n\t\ttry:\n\t\t\tnewName = self.ui.lista_act.currentItem().text()\n\t\t\tnewData = controller.search_data_act(newName)\n\t\t\tnewData = newData[0]\n\t\t\tnombre = newData[1]\n\t\t\tyear = newData[2]\n\t\t\tgenero = newData[3]\n\t\t\timg = newData[4]\n\t\texcept AttributeError as e:\n\t\t\tnombre = \"\"\n\t\t\tgenero = \"\"\n\t\t\tyear = \"\"\n\t\t\timg = \"\"\n\n\t\tself.ui.txt_nombre.setText(nombre)\n\t\tself.ui.txt_year.setText(year)\n\t\tself.ui.txt_genero.setText(genero)\n\t\tself.ui.img.setPixmap(QtGui.QPixmap(img))", "def edit_user_profile(user_id):\n if CURRENT_USER_KEY not in session or session[CURRENT_USER_KEY] != user_id:\n raise Unauthorized()\n\n user = User.query.get_or_404(user_id)\n\n form = UserEditForm(obj=user)\n\n if form.validate_on_submit():\n try:\n user.email = form.email.data\n user.username = form.username.data\n user.first_name = form.first_name.data.capitalize()\n user.last_name = form.last_name.data.capitalize()\n user.image_url = form.image_url.data or User.image_url.default.arg\n user.cover_url = form.cover_url.data or User.cover_url.default.arg\n user.bio = form.bio.data\n\n db.session.commit()\n except IntegrityError:\n db.session.rollback()\n flash(\n \"Email or Username already taken!! Please try again\", 'danger')\n return render_template('edit_profile.html', form=form, user=user, img_src=user.image_url)\n\n flash('Profile Successfully Updated!', 'success')\n return redirect(url_for('show_user_profile', user_id=user.id))\n return render_template('edit_profile.html', form=form, user=user, img_src=user.image_url)", "def user_edit(request):\n DEBUG = False\n\n if not has_permission('editUser', request.context, request):\n #print \"NOT has_permission !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n request.message = \"You do not have permissions to edit this user!\"\n raise HTTPForbidden\n\n # if no user_id in URL and not logged in, tell user to login\n\n try:\n user_id = request.matchdict['user_id']\n except KeyError, ke:\n #print ke\n return HTTPFound(location=request.route_url('not_found'))\n\n user = User.get_by_user_id(user_id)\n\n if user is None:\n msg = \"User was not founf in database.\"\n return HTTPFound(location=request.route_url('not_found'))\n\n form = Form(request, schema=UserSettingsSchema, obj=user)\n\n if 'form.submitted' in request.POST and not form.validate():\n # form didn't validate\n request.session.flash('Please check the form below for errors!')\n if DEBUG: # pragma: no cover\n print \"submitted but not validated!\"\n\n if 'form.submitted' in request.POST and form.validate():\n # ready for changing database entries!\n request.session.flash('form validated!')\n if DEBUG: # pragma: no cover\n print \"the form was submitted and validated.\"\n\n if form.data['surname'] != user.surname:\n if DEBUG: # pragma: no cover\n request.session.flash('surname was not same --> changing')\n print \"changing surname\"\n user.surname = form.data['surname']\n if form.data['lastname'] != user.lastname:\n if DEBUG: # pragma: no cover\n request.session.flash('lastname was not same --> changing')\n print \"changing lastname\"\n user.lastname = form.data['lastname']\n if form.data['email'] != user.email:\n request.session.flash('email was not same --> changing')\n user.email = form.data['email']\n if form.data['phone'] != user.phone:\n request.session.flash('phone was not same --> changing')\n user.phone = form.data['phone']\n if form.data['fax'] != user.fax:\n request.session.flash('fax was not same --> changing')\n user.fax = form.data['fax']\n if form.data['street'] != user.street:\n request.session.flash('street was not same --> changing')\n user.street = form.data['street']\n if form.data['number'] != user.number:\n request.session.flash('number was not same --> changing')\n user.number = form.data['number']\n if form.data['city'] != user.city:\n request.session.flash('city was not same --> changing')\n user.city = form.data['city']\n if form.data['postcode'] != user.postcode:\n request.session.flash('postcode was not same --> changing')\n user.postcode = form.data['postcode']\n if form.data['country'] != user.country:\n request.session.flash('country was not same --> changing')\n user.country = form.data['country']\n\n if DEBUG: # pragma: no cover\n print \"returning the form\"\n return {\n 'the_user_id': user_id,\n 'the_username': user.username,\n 'form': FormRenderer(form),\n }", "def add_user():\n if request.method == 'POST':\n print('In Adduser post method')\n data = request.form\n pics=[]\n try:\n #save in database here, use userid when actually saving\n for name in request.files:\n f = request.files[name]\n filename = secure_filename(f.filename)\n pics.append(filename)\n\n userid=data.get('userid',None)\n resp = AddUpdateUser(data['name'], data['dob'], data['email'], data['mentorfor'], \\\n data['learn'], data['about'], str(pics), userid)\n #error, return message\n if resp == -1:\n print('In Adduser post method: returning -1')\n return json.dumps({'msg':'error', 'code':-1})\n #else return userid\n else:\n #del pics[:]\n print('In Adduser post method: returning 200')\n for name in request.files:\n f = request.files[name]\n filename = str(resp) + '_' + secure_filename(f.filename)\n #pics.append(\"http://tushki1405.pythonanywhere.com/pics/\" + filename)\n f.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))\n return json.dumps({'msg':'success', 'code':200, 'userid':resp, 'name':data['name']})\n except Exception as e:\n return json.dumps({'msg':str(e), 'code':-1})\n return 'add user'", "def home():\r\n # Create form\r\n screamForm = ImageForm(prefix=\"screamForm\")\r\n laMuseForm = ImageForm(prefix=\"laMuseForm\")\r\n rainPrincessForm = ImageForm(prefix=\"rainPrincessForm\")\r\n udnieForm = ImageForm(prefix=\"udnieForm\")\r\n waveForm = ImageForm(prefix=\"waveForm\")\r\n wreckForm = ImageForm(prefix=\"wreckForm\")\r\n\r\n if screamForm.validate_on_submit() and screamForm.image.data:\r\n image = screamForm.image.data\r\n current_time_str = datetime.datetime.now().strftime(\"%I%M%S%p%B%d%Y\")\r\n filename = secure_filename(current_time_str + image.filename)\r\n img_path = APP_PATH + 'static\\\\images'\r\n # img_path = APP_PATH + 'static/images'\r\n file_path = os.path.join(\r\n img_path, filename\r\n )\r\n image.save(file_path)\r\n args = ['--checkpoint', scream_ckpt, '--in-path', file_path, '--out-path', img_path]\r\n transfer.main(args)\r\n scream_file_url = '/static/images/' + filename\r\n\r\n # if 'username' in session:\r\n # _username = session['username']\r\n # _user = db.Users.find_one({'name':_username})\r\n # _image = db.Images.insert_one({'filename':filename, 'name':_user.get('_id'), 'fileurl':scream_file_url})\r\n # image_id = _image.inserted_id\r\n # db.Users.update_one({'name': _username}, {'$push': {'images': image_id}})\r\n \r\n return redirect(url_for('displaytmp', image_src=filename))\r\n\r\n else:\r\n scream_file_url = None\r\n\r\n if laMuseForm.validate_on_submit() and laMuseForm.image.data:\r\n image = laMuseForm.image.data\r\n current_time_str = datetime.datetime.now().strftime(\"%I%M%S%p%B%d%Y\")\r\n filename = secure_filename(current_time_str + image.filename)\r\n img_path = APP_PATH + 'static\\\\images'\r\n # img_path = APP_PATH + 'static/images'\r\n file_path = os.path.join(\r\n img_path, filename\r\n )\r\n image.save(file_path)\r\n args = ['--checkpoint', la_muse_ckpt, '--in-path', file_path, '--out-path', img_path]\r\n transfer.main(args)\r\n laMuse_file_url = '/static/images/' + filename\r\n\r\n # if 'username' in session:\r\n # _username = session['username']\r\n # _user = db.Users.find_one({'name':_username})\r\n # _image = db.Images.insert_one({'filename':filename, 'name':_user.get('_id'), 'fileurl':laMuse_file_url})\r\n # image_id = _image.inserted_id\r\n # db.Users.update_one({'name': _username}, {'$push': {'images': image_id}})\r\n \r\n return redirect(url_for('displaytmp', image_src=filename))\r\n\r\n else:\r\n laMuse_file_url = None\r\n\r\n if rainPrincessForm.validate_on_submit() and rainPrincessForm.image.data:\r\n image = rainPrincessForm.image.data\r\n current_time_str = datetime.datetime.now().strftime(\"%I%M%S%p%B%d%Y\")\r\n filename = secure_filename(current_time_str + image.filename)\r\n img_path = APP_PATH + 'static\\\\images'\r\n # img_path = APP_PATH + 'static/images'\r\n file_path = os.path.join(\r\n img_path, filename\r\n )\r\n image.save(file_path)\r\n args = ['--checkpoint', rain_princess_ckpt, '--in-path', file_path, '--out-path', img_path]\r\n transfer.main(args)\r\n rainPrincess_file_url = '/static/images/' + filename\r\n\r\n # if 'username' in session:\r\n # _username = session['username']\r\n # _user = db.Users.find_one({'name':_username})\r\n # _image = db.Images.insert_one({'filename':filename, 'name':_user.get('_id'), 'fileurl':rainPrincess_file_url})\r\n # image_id = _image.inserted_id\r\n # db.Users.update_one({'name': _username}, {'$push': {'images': image_id}})\r\n \r\n return redirect(url_for('displaytmp', image_src=filename))\r\n\r\n else:\r\n rainPrincess_file_url = None\r\n\r\n if udnieForm.validate_on_submit() and udnieForm.image.data:\r\n image = udnieForm.image.data\r\n current_time_str = datetime.datetime.now().strftime(\"%I%M%S%p%B%d%Y\")\r\n filename = secure_filename(current_time_str + image.filename)\r\n img_path = APP_PATH + 'static\\\\images'\r\n # img_path = APP_PATH + 'static/images'\r\n file_path = os.path.join(\r\n img_path, filename\r\n )\r\n image.save(file_path)\r\n args = ['--checkpoint', udnie_ckpt, '--in-path', file_path, '--out-path', img_path]\r\n transfer.main(args)\r\n udnie_file_url = '/static/images/' + filename\r\n\r\n # if 'username' in session:\r\n # _username = session['username']\r\n # _user = db.Users.find_one({'name':_username})\r\n # _image = db.Images.insert_one({'filename':filename, 'name':_user.get('_id'), 'fileurl':udnie_file_url})\r\n # image_id = _image.inserted_id\r\n # db.Users.update_one({'name': _username}, {'$push': {'images': image_id}})\r\n\r\n return redirect(url_for('displaytmp', image_src=filename))\r\n\r\n else:\r\n udnie_file_url = None\r\n\r\n if waveForm.validate_on_submit() and waveForm.image.data:\r\n image = waveForm.image.data\r\n current_time_str = datetime.datetime.now().strftime(\"%I%M%S%p%B%d%Y\")\r\n filename = secure_filename(current_time_str + image.filename)\r\n img_path = APP_PATH + 'static\\\\images'\r\n # img_path = APP_PATH + 'static/images'\r\n file_path = os.path.join(\r\n img_path, filename\r\n )\r\n image.save(file_path)\r\n args = ['--checkpoint', wave_ckpt, '--in-path', file_path, '--out-path', img_path]\r\n transfer.main(args)\r\n wave_file_url = '/static/images/' + filename\r\n\r\n # if 'username' in session:\r\n # _username = session['username']\r\n # _user = db.Users.find_one({'name':_username})\r\n # _image = db.Images.insert_one({'filename':filename, 'name':_user.get('_id'), 'fileurl':wave_file_url})\r\n # image_id = _image.inserted_id\r\n # db.Users.update_one({'name': _username}, {'$push': {'images': image_id}})\r\n \r\n return redirect(url_for('displaytmp', image_src=filename))\r\n\r\n else:\r\n wave_file_url = None\r\n\r\n if wreckForm.validate_on_submit() and wreckForm.image.data:\r\n image = wreckForm.image.data\r\n current_time_str = datetime.datetime.now().strftime(\"%I%M%S%p%B%d%Y\")\r\n filename = secure_filename(current_time_str + image.filename)\r\n img_path = APP_PATH + 'static\\\\images'\r\n # img_path = APP_PATH + 'static/images'\r\n file_path = os.path.join(\r\n img_path, filename\r\n )\r\n image.save(file_path)\r\n args = ['--checkpoint', wreck_ckpt, '--in-path', file_path, '--out-path', img_path]\r\n transfer.main(args)\r\n wreck_file_url = '/static/images/' + filename\r\n \r\n # if 'username' in session:\r\n # _username = session['username']\r\n # _user = db.Users.find_one({'name':_username})\r\n # _image = db.Images.insert_one({'filename':filename, 'name':_user.get('_id'), 'fileurl':wreck_file_url})\r\n # image_id = _image.inserted_id\r\n # db.Users.update_one({'name': _username}, {'$push': {'images': image_id}})\r\n \r\n return redirect(url_for('displaytmp', image_src=filename))\r\n\r\n else:\r\n wreck_file_url = None\r\n \r\n login = False\r\n _username = None\r\n avatar_url = None\r\n\r\n if 'username' in session:\r\n # loginUrl = \"#\"\r\n # loginMessage = session['username']\r\n login = True\r\n _username = session['username']\r\n _user = db.Users.find_one({'name':_username})\r\n avatar_url = _user['avatar_url']\r\n\r\n return render_template('index.html', \r\n screamForm=screamForm, scream_file_url=scream_file_url, \r\n laMuseForm=laMuseForm, laMuse_file_url=laMuse_file_url,\r\n rainPrincessForm=rainPrincessForm, rainPrincess_file_url=rainPrincess_file_url,\r\n udnieForm=udnieForm, udnie_file_url=udnie_file_url,\r\n waveForm=waveForm, wave_file_url=wave_file_url,\r\n wreckForm=wreckForm, wreck_file_url=wreck_file_url,\r\n login=login, username=_username, avatar_url=avatar_url)", "def userSetup(self):\n if self.user[\"Save\"] == \"\":\n self.ui.b_run.setEnabled(False)\n else:\n name_split = self.splitPath(self.user[\"Save\"])[-1]\n name = name_split.split(\".\")[0]\n self.ui.l_save.setText(\"Save to: \" + name)\n\n if self.user[\"GT\"] != \"\":\n self.ui.l_ground_truth.setText(self.splitPath(self.user[\"GT\"])[-1])\n\n self.ui.l_colour.setText(self.user[\"Colour\"])", "def add_new_user():\n return render_template('new.html')", "def updateUser(database):\n name=str(input(\"Which user do you want to update : \"))\n usr,find=getByName(database,name)\n if not find:\n print(\"the User could not be found\")\n return\n if find:\n print(usr)\n print(\"What do you want to change :\\n1.name\\n2.field\\n3.year of study\\n4.areas of interest\\n5.Age\\n6.City\\n7.Quit\")\n choice=int(input(\"Your choice :\"))\n if choice==1:\n usr.name=input(\"Enter the new name of the user : \").lower()\n usr.lastname=input(\"Enter the new lastname of the user : \").lower()\n elif choice==2:\n usr.fieldStudy=input(\"Enter the new field of study of the user : \")\n elif choice==3: \n usr.yearStudy=int(input(\"Enter the new year of study of the user : \"))\n elif choice==4:\n nbinterest=int(input(\"how many new interests does he have? : \"))\n for i in range(nbinterest):\n usr.interest.append(input(\"Enter the interest of the user : \"))\n elif choice==5:\n usr.age=int(input(\"Enter the age of the user : \"))\n elif choice==6:\n usr.city=input(\"Enter the city of the user : \") \n elif choice==7:\n return\n saveDatabase(database,usr)", "def mugshot_file(self, instance):\r\n try:\r\n return admin_display_file(instance.user.user_files, 'mugshot')\r\n except Exception:\r\n return mark_safe('<i class=\"fa fa-times-circle red\" aria-hidden=\"true\"></i>')", "def form_valid(self, form):\n form.instance.founder = self.request.user\n print('Project Create user:', self.request.user)\n form.save()\n\n tc_lib.generate_user_matches(form)\n\n return super(ProjectCreate, self).form_valid(form)", "def userForm():\n \"\"\"If form criteria met, add and commit to the DB\"\"\"\n if request.method=='POST':\n username=request.form['username']\n userfname =request.form['fname']\n userlname=request.form['lname']\n userage=request.form['age']\n usergender=request.form['gender']\n userbio=request.form['bio']\n usertime=datetime.now()\n file = request.files['file']\n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n file.save(os.path.join(file_folder, filename))\n userimage=filename = secure_filename(file.filename)\n #init_db()\n \n db.Model.metadata.reflect(db.engine) \n user=User(userimage,username,userfname,userlname,userage,usergender,userbio,usertime)\n db.session.add(user)\n db.session.commit()\n\n\n return redirect(url_for('home'))\n\n \n \n return render_template('userForm.html')", "def upload(request):\r\n\tif (request.user.profile.is_developer): #checks if a developer is accessing the upload page\r\n\t\tupload_done = False\r\n\t\tif request.method == 'POST':\r\n\t\t\tform = GameUploadForm(request.POST, request.FILES)\r\n\t\t\tif form.is_valid():\r\n\t\t\t\tuploader = request.user.profile\r\n\t\t\t\tnew_game = form.save(commit=False)\r\n\t\t\t\tnew_game.developer = uploader\r\n\t\t\t\tnew_game.save()\r\n\t\t\t\tupload_done = True\r\n\t\t\telse:\r\n\t\t\t\tprint(form.errors)\r\n\t\telse:\r\n\t\t\tform = GameUploadForm()\r\n\t\t\tis_edit = False\r\n\r\n\r\n\t\treturn render(request, 'upload.html',{'form': form, 'MEDIA_URL': settings.MEDIA_URL,\r\n\t\t 'upload_done':upload_done})\r\n\r\n\telse:\r\n\t\treturn redirect('home')", "def personalInformations(request):\n\n user_update_form = UserInformationUpdateForm(\n initial={\n 'username_update': request.user.username,\n 'email_update': request.user.email,\n }\n )\n\n if request.method == 'POST':\n template = 'personal_informations_update.html'\n else:\n template = 'personal_informations.html'\n user_update_form.fields['username_update'].disabled = True\n user_update_form.fields['email_update'].disabled = True\n\n context = {'user_update_form': user_update_form}\n return render(request, template, context)", "def test_resource_user_resource_change_user_avatar_patch(self):\n pass", "def edit_show_user(user_id):\n edited_user = User.query.get_or_404(user_id)\n\n edited_user.first_name = request.form['first_name']\n edited_user.last_name = request.form['last_name']\n edited_user.image_url = request.form['image_url']\n\n db.session.add(edited_user)\n db.session.commit()\n\n return redirect('/')", "def user_profile(request):\n instance = Profile.objects.get(pk=request.user.pk)\n if request.method == \"POST\":\n\n form = ProfileForm(request.POST, request.FILES, instance=instance)\n form.save()\n return redirect(reverse('index'))\n messages.error(request, \"Profile Updated\")\n\n profile = ProfileForm(instance=instance)\n return render(request, 'profile.html', {'profile': profile, 'instance': instance})", "def view_user_edit(self):\n\n logged_in = authenticated_userid(self.request)\n message = ''\n form = Form(self.request, schema=UserEditSchema,\n state=State(request=self.request))\n if form.validate():\n password = self.request.params['password']\n if self.context.validate_password(password):\n if self.request.params['new_password']:\n password = self.request.params['new_password']\n message = 'Successfully saved'\n email = self.request.params['email']\n self.context.edit(password, email)\n else:\n message = msg['password_invalid']\n return {\n 'message': message,\n 'project': '',\n 'username': self.context.username,\n 'logged_in': logged_in,\n 'form': FormRenderer(form),\n 'email': self.context.email\n }", "def add_user(self, details):\n try:\n self.driver.switch_to.default_content() # Jump to the top of the frames hierachy\n self.driver.find_element_by_id('a151').click() # Add/Edit user button\n wait = WebDriverWait(self.driver, 10)\n wait.until(EC.frame_to_be_available_and_switch_to_it((By.NAME, 'Right')))\n wait.until(EC.frame_to_be_available_and_switch_to_it((By.NAME, 'appFrame')))\n # Add new user menu\n self.driver.find_element_by_id('AddButton').click()\n # self.wait.until(EC.element_to_be_clickable((By.ID, 'UsernameTextBox')))\n self.driver.find_element_by_id('UsernameTextBox').send_keys(details['username'])\n self.driver.find_element_by_id('PasswordTextBox').send_keys(details['newPassword'])\n self.driver.find_element_by_id('ConfirmPasswordTextBox').send_keys(details['newPassword'])\n if not self.driver.find_element_by_id('ChangePasswordCheckBox').is_selected(): # Should always be unticked on load\n self.driver.find_element_by_id('ChangePasswordCheckBox').click()\n self.driver.find_element_by_id('FullnameTextBox').send_keys(details['firstName'] + ' ' + details['surname'])\n self.driver.find_element_by_id('InitialsTextbox').send_keys(details['firstName'][:1] + details['surname'][:1])\n self.driver.find_element_by_id('DescriptionTextBox').send_keys(details['description']) # Description/Job title\n Select(self.driver.find_element_by_id('RoleList')).select_by_visible_text(details['role']) # Role dropdown\n # Locations Profile\n wait.until(EC.element_to_be_clickable((By.ID, 'imgLP')))\n self.driver.find_element_by_id('imgLP').click()\n Select(self.driver.find_element_by_id('LocationListBox')).select_by_visible_text(details['location']) #All Locations dropdown\n self.driver.find_element_by_id('AddButton').click()\n except:\n return \"There was a problem filling in the page. Can you check the role/location etc?\"\n try:\n self.driver.find_element_by_id('btnCommand').click() # Save user\n time.sleep(1)\n # Alert will display if a duplicate is found in the system\n alert = Alert(self.driver)\n alert_text = alert.text\n alert.accept()\n wait.until(EC.element_to_be_clickable((By.ID, 'btnCommand'))) # Wait for Save User button\n self.driver.find_element_by_id('btnGoToIndex').click()\n if alert_text[:13] == \"Create failed\" and alert_text[-30:] == \"already exists; cannot create.\":\n return \"Duplicate person found in the system\"\n else:\n return alert_text\n except NoAlertPresentException:\n # If you have a success message\n try:\n if self.driver.find_element_by_id('messageDisplay').text.strip() == \\\n 'The user has been successfully updated.'\\\n or self.driver.find_element_by_id('messageDisplay').text.strip() == \\\n 'The user has been successfully added.':\n return 'User added successfully'\n else:\n return self.driver.find_element_by_id('messageDisplay').text.strip()\n except NoSuchElementException:\n # You are now stuck on the page unable to save with an error (usually unrecoverable for add user)\n # Password problem?\n try:\n if self.driver.find_element_by_id('PasswordValidator').text == \\\n 'You have used this password before in your last three passwords.':\n return \"Couldn't save the user as password has been used before.\"\n else:\n return self.driver.find_element_by_id('PasswordValidator').text\n except NoSuchElementException:\n # Location correction\n try:\n if self.driver.find_element_by_id('spanLocationError').text == \\\n \"There must be at least one location in the user's profile.\":\n Select(self.driver.find_element_by_id('LocationListBox')).\\\n select_by_visible_text(details['location']) # All Locations dropdown\n self.driver.find_element_by_id('AddButton').click()\n self.driver.find_element_by_id('btnCommand').click() # Save user\n time.sleep(1)\n try: # If you have a success message\n if self.driver.find_element_by_id('messageDisplay').text.strip() == \\\n \"The user has been successfully updated.\":\n return \"Success (& location updated)\"\n except NoSuchElementException:\n pass\n except:\n pass\n return \"Couldn't save the user for some reason I can't determine.\"", "def test_set_display_name_own_template_as_user_saves(self):\n mock_request = create_mock_request(user=self.user1)\n template_api.set_display_name(\n self.fixture.user1_template, \"new_name\", request=mock_request\n )", "def edit_user_process(user_id):\n\n # extract form data, edit, commit, then redirect to /users\n first_name = request.form[\"first-name\"].strip()\n last_name = request.form[\"last-name\"].strip()\n image_url = request.form[\"image-url\"].strip()\n\n msg = db_edit_user(user_id, first_name, last_name, image_url)\n\n flash(msg[\"text\"], msg[\"severity\"])\n\n return redirect(f\"/users/{user_id}\")", "def edit_user(user_id):\n\n user = User.query.get_or_404(user_id)\n \n first = request.form['first_name']\n last = request.form['last_name']\n image = request.form['image_url']\n \n if not first or not last:\n flash(\"Please enter first and last name.\")\n return redirect(f\"/users/{user.id}/edit\")\n \n user.first_name = first\n user.last_name = last\n \n if image:\n user.image_url = image\n\n db.session.add(user)\n db.session.commit()\n\n return redirect(\"/users\")", "def settings(user_login):\n user = User.query.filter_by(user_github_login=user_login).first_or_404()\n form = SettingsForm(obj=user)\n\n if request.method == 'POST':\n if form.validate_on_submit():\n form.populate_obj(user)\n\n file_to_upload = request.files.get(form.user_avatar.name)\n if file_to_upload and file_to_upload.filename:\n file_name = avatars.save(file_to_upload)\n user.user_avatar_url = avatars.url(file_name)\n\n user.save()\n\n flash(\"Settings updated\", 'success')\n return redirect(\n url_for('user.profile', user_login=user.user_github_login))\n else:\n flash(form.get_post_invalid_message(), 'danger')\n\n return dict(form=form, user=user)", "def upload_new_photo(name, file, user_id=None):\n\t# Create photo entry\n\tphoto = create_photo(name)\n\n\t# Save photo\n\tupload_existing_photo(photo, file)\n\n\treturn photo", "def __save_display_image(self):\n\n try:\n self.photomosaic_generator.can_save_image()\n if self.generating:\n raise MissingComponentError('Cannot save. Image is currently being generated.')\n except MissingComponentError as error_msg:\n error_msg_box = QtWidgets.QMessageBox.critical(self, 'Error', str(error_msg))\n else:\n options = QtWidgets.QFileDialog.Options()\n options |= QtWidgets.QFileDialog.DontUseNativeDialog\n file_name, file_type = QtWidgets.QFileDialog.getSaveFileName(self, 'Save photomosaic', '..',\n 'jpg (*.jpg);;png (*.png)', options=options)\n file_type = file_type[-5:-1]\n if file_name != '':\n self.photomosaic_generator.save_image(file_name + file_type if file_name[-4:] != file_type else\n file_name)", "def edit_profile():\n form = EditProfileForm()\n if request.method == 'GET':\n form.first_name.data = current_user.first_name\n form.first_name.data = current_user.first_name\n form.last_name.data = current_user.last_name\n form.email.data = current_user.email\n form.address_1.data = current_user.address_1\n form.address_2.data = current_user.address_2\n form.city.data = current_user.city\n form.state.data = current_user.state\n form.zipcode.data = current_user.zipcode\n form.telephone.data = current_user.telephone\n if form.validate_on_submit():\n form.last_name.data = form.last_name.data\n current_user.first_name = form.first_name.data\n current_user.last_name = form.last_name.data\n current_user.email = form.email.data\n current_user.address_1 = form.address_1.data\n current_user.address_2 = form.address_2.data\n current_user.city = form.city.data\n current_user.state = form.state.data\n current_user.zipcode = form.zipcode.data\n current_user.telephone = form.telephone.data\n db.session.commit()\n flash(('Your changes have been saved.'))\n\n return redirect(url_for('edit_profile'))\n\n return render_template('edit_profile.html', title=('Edit Profile'),\n form=form)", "def profile_pic(request):\n if request.user.is_authenticated:\n profile_obj = CustomUser.objects.get(id__exact=request.user.id)\n pic = profile_obj.avatar\n return {'picture': pic}\n return {}", "def picture_change(request, pk):\n picture = get_object_or_404(Picture, pk=pk)\n\n if picture.author != request.user:\n data = {\n 'status': 'failed',\n 'details': 'Not allowed'\n }\n return JsonResponse(data, status=403)\n\n body = json.loads(request.body)\n picture.description = body['description']\n picture.save()\n\n data = {\n 'status': 'success',\n 'data': PictureDetailSerializer(picture).data\n }\n\n return JsonResponse(data, status=200)", "def create_project_form(request):\n \n # First we check to see the site has been set up, otherwise we throw the user to the config screen\n if not bool(os.path.isdir(Project.project_options.repository_directory)):\n request.user.message_set.create(message=\"The site has not been set up yet. Log in as your admin user and create your settings!\")\n return HttpResponseRedirect(reverse('site-config'))\n \n if request.is_ajax():\n template ='project/project_create_ajax.html'\n else:\n template = 'project/project_create.html'\n \n # Lets check if this form is being shown or processed\n if request.method == \"POST\":\n # We're processing the form, so lets create the instance\n form = NewProjectForm(request.POST, auto_id=False)\n # The form is correct, lets proceeed.\n if form.is_valid():\n # Lets check the user has conformed to a sites T&C's\n if form.cleaned_data['t_and_c'] == True:\n # Create the project instance\n project = Project(\n project_id = string.lower(form.cleaned_data['project_id']),\n project_name = form.cleaned_data['project_name'],\n short_description = form.cleaned_data['short_description'],\n full_description = form.cleaned_data['full_description'],\n project_manager = request.user,\n hgweb_style = form.cleaned_data.get('hgweb_style', ''),\n project_icon = form.cleaned_data['project_icon'],\n )\n # Ok, we're all good, so lets save.\n project.save()\n # We'll tell the user that there site has been saved\n request.user.message_set.create(message=_(\"The project \" + form.cleaned_data['project_name'] + \" has been created\"))\n if request.is_ajax():\n return HttpResponse(\n \"{'success': 'true', 'url': '\" + reverse('project-detail', kwargs={'slug':form.cleaned_data['project_id']}) + \"', 'project': \" + json_encode(project) + \"}\"\n , mimetype=\"application/json\")\n else:\n return HttpResponseRedirect(reverse('project-detail', kwargs={'slug': form.cleaned_data['project_id']}))\n else:\n return render_to_response(template,\n {\n 'form':form.as_table(),\n }, context_instance=RequestContext(request)\n )\n #return HttpResponseRedirect(reverse('project-detail', kwargs={'slug':form.cleaned_data['name_short']}))\n else:\n form = NewProjectForm()\n is_auth = request.user.is_authenticated()\n \n return render_to_response(template,\n {\n 'form':form.as_table(),\n 'is_auth': is_auth\n }, context_instance=RequestContext(request)\n )", "def show_new_user_page():\n\n return render_template(\"new_user.html\")", "def save(self, user, project, commit=True):\n task = super(TaskForm, self).save(commit=False)\n task.project = project\n task.editor = user\n if not task.id:\n task.author = user\n task.created_at = datetime.now()\n if commit:\n task.save()\n\n def assign_resource(\n resource): return task.assigned_resources.add(resource)\n map(assign_resource, self.cleaned_data['assigned_resources'])\n return task", "def new_upload_image():\n log_request(request)\n\n if not valid_params(['username', 'session_id'], request.form) or\\\n not valid_params(['file'], request.files):\n logging.debug(\"Missing parameters\")\n return jsonify({'error' : 500})\n \n username = request.form['username']\n sId = request.form['session_id']\n fil = request.files['file']\n\n \n # check session before upload\n if not user.verify(username, sId):\n logging.debug(\"Invalid username or session id\")\n return jsonify({'error' : 101})\n\n if fil and allowed_file(fil.filename):\n # get the file extension\n ext = os.path.splitext(fil.filename)[1]\n # create a temporary file\n f = tempfile.NamedTemporaryFile(delete=False, dir=\"/var/www/resources/tmp/\", suffix=\"{0}\".format(ext))\n os.chmod(f.name, 0644)\n name = os.path.basename(f.name)\n f.write(fil.read())\n f.close()\n # get the dividing points for the page\n i = Image.open(f.name)\n divs = divLines(i)\n del i\n # return the dividing points and the name of the page in json form\n return jsonify(\n name = name,\n divs = divs,\n error = 0)\n else:\n logging.debug(\"Image processing failed, invalid filetype?\")\n return jsonify({'error' : 200})", "def goto_make_new_user():\n\n return render_template('users/new.html')", "def update_user_profile(request):\n if request.method == 'POST':\n form = UserProfileForm(request.POST)\n\n if form.is_valid():\n user = get_object_or_404(User, pk=request.user.pk)\n user.first_name = request.POST['first_name']\n user.last_name = request.POST['last_name']\n user.profile_picture = request.POST['profile_picture']\n user.save()\n messages.success(request, 'Your profile has been updated!')\n else:\n messages.error(\n request, 'Unable to update your profile. Please try again later.')\n\n return HttpResponseRedirect(request.META.get('HTTP_REFERER', reverse('dev_panel')))", "def logo_update(request, slug):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n logo_form = CompanyLogoForm(instance=company)\n\n #verifies the person has access to the company or is an incubator employee\n edit = validate_user_company_access_or_redirect(request,company)\n\n #if the request is GET presents info, \n if request.method == 'GET':\n return render_to_response('pictures.html',{'form':logo_form },context_instance=RequestContext(request))\n else:\n logo_form = CompanyLogoForm(request.POST, request.FILES, instance=company)\n #if is POST Validates the form is well filled and save it redirecting to the company page \n if logo_form.is_valid():\n logo_form.save()\n\n\n # To FIX\n return HttpResponseRedirect('/company/%s/edit/' % str(slug))\n #if not well filled redirect to the original update page and display error\n else:\n return render_to_response('pictures.html', \n {'form': logo_form, 'form_errors': logo_form.errors},\n context_instance=RequestContext(request))", "def current_image(iati_import, activity, project, activities_globals):\n changes = []\n\n for document_link_element in activity.findall('document-link'):\n if 'url' in document_link_element.attrib.keys():\n image_url = document_link_element.attrib['url']\n image_filename = image_url.rsplit('/', 1)[1] if '/' in image_url else ''\n image_ext = image_filename.rsplit('.', 1)[1].lower() if '.' in image_filename else ''\n image_name_no_ext = image_filename.rsplit('.', 1)[0] if '.' in image_filename else ''\n\n if not image_ext in VALID_IMAGE_EXTENSIONS:\n continue\n\n if not project.current_image or \\\n (project.current_image\n and not image_name_no_ext in\n project.current_image.name.rsplit('/', 1)[1].rsplit('.', 1)[0]):\n tmp_file = NamedTemporaryFile(delete=True)\n tmp_file.write(urllib2.urlopen(image_url, timeout=100).read())\n tmp_file.flush()\n project.current_image.save(image_filename, File(tmp_file))\n project.save(update_fields=['current_image'])\n changes.append('current_image')\n\n # Image caption\n image_caption = ''\n\n title_element = document_link_element.find('title')\n if title_element is not None:\n image_caption = get_text(title_element, activities_globals['version'])\n if len(image_caption) > 50:\n add_log(iati_import, 'image_caption',\n 'caption too long (50 characters allowed)', project,\n IatiImportLog.VALUE_PARTLY_SAVED)\n image_caption = image_caption[:50]\n\n if project.current_image_caption != image_caption:\n project.current_image_caption = image_caption\n project.save(update_fields=['current_image_caption'])\n changes.append('current_image_caption')\n\n # Image credit\n image_credit = ''\n\n if '{%s}photo-credit' % settings.AKVO_NS in document_link_element.attrib.keys():\n image_credit = document_link_element.attrib[\n '{%s}photo-credit' % settings.AKVO_NS\n ]\n if len(image_credit) > 50:\n add_log(iati_import, 'image_credit',\n 'credit too long (50 characters allowed)', project,\n IatiImportLog.VALUE_PARTLY_SAVED)\n image_credit = image_credit[:50]\n\n if project.current_image_credit != image_credit:\n project.current_image_credit = image_credit\n project.save(update_fields=['current_image_credit'])\n changes.append('current_image_credit')\n\n break\n\n return changes", "def viewprofile():\n user = current_user\n form = UserUpdateForm(obj=user)\n form.populate_obj(user)\n if form.validate_on_submit():\n form.populate_obj(user)\n\n db.session.commit()\n\n flash('You have successfully edited your profile!')\n return render_template('user/user.html', title=\"View Profile\",\n user=user, form=form, action='Edit')", "def updateteam():\n if request.method == 'POST':\n result = request.form\n teamImage = request.files['teamImage'].read()\n team = Team.query.filter_by(team_id=result.get('team_id')).one()\n team.team_name = result.get('team_name')\n team.team_image = teamImage\n db.session.commit()\n teams = get_team()\n if teams:\n return render_template('team-players.html', teams=teams)", "def image_upload_view(request):\n if request.method == \"POST\":\n form = ImageForm(request.POST, request.FILES)\n if form.is_valid():\n form.save()\n # Get the current instance object to display in the template\n img_obj = form.instance\n img = Image.open(CURR_DIR + img_obj.image.url)\n img = img.resize((256, 256))\n f_res = face.generate_face(img)\n f_res = Image.fromarray(np.uint8(f_res))\n f_res = f_res.resize((256, 256))\n\n # f_res.show(title=\"Rostro generado\")\n face_name = img_obj.image.url.split(\"/\")[-1]\n a = os.path.split(os.getcwd())[:-1][0]\n print(a)\n f_res.save(CURR_DIR + \"/paint/static/img/\" + face_name)\n\n print(img_obj, img_obj.image.url)\n return render(\n request,\n \"paint/carga.html\",\n {\"form\": form, \"img_obj\": img_obj, \"face\": face_name},\n )\n else:\n form = ImageForm()\n return render(request, \"paint/carga.html\", {\"form\": form})", "def uploadimg():\n print(str(pathlib.Path(__file__).resolve().parents[1])+\"im hereeeeeeeeeeeeeeeeeeeeeeeee\")\n path = str(pathlib.Path(__file__).resolve().parents[1])\n target = os.path.join(path,'Facial recognition/dataset')\n email = session['username']\n target = target+'/'+email\n # app_root, 'C:/Users\\meetp\\OneDrive\\Desktop\\IotAssigment2\\src\\Facial recognition\\dataset/')\n # print(target)\n\n if not os.path.isdir(target):\n os.mkdir(target)\n\n for file in request.files.getlist(\"file\"):\n print(file)\n filename = file.filename\n destination = \"/\".join([target, filename])\n print(destination)\n file.save(destination)\n\n # encode the image\n # en = encode()\n # en.run(target)\n\n return render_template(\"imguploaded.html\")", "def profile():\n\n # User entered new password\n if request.method == \"POST\":\n\n # Ensure current password entered\n if not request.form.get(\"old\"):\n return apology(\"Please enter current password\")\n\n # Query database for current password\n rows = db.execute(\"SELECT * FROM users WHERE id = ?\", session[\"user_id\"])\n\n # Ensure old password matches current password\n if not check_password_hash(rows[0][\"hash\"], request.form.get(\"old\")):\n return apology(\"Invalid password\")\n\n # Ensure user entered a new password\n if not request.form.get(\"new\"):\n return apology(\"Please enter a new password\")\n\n # Ensure old and new passwords are different\n if request.form.get(\"new\") == request.form.get(\"old\"):\n return apology(\"Must enter a new password\")\n\n # Update new password in database\n db.execute(\"UPDATE users SET hash = ? WHERE id = ?\", generate_password_hash(\n request.form.get(\"new\"), method='pbkdf2:sha256', salt_length=8), session[\"user_id\"])\n\n # Redirect to homepage\n return redirect(\"/\")\n\n else:\n\n # User reached page via a link\n return render_template(\"profile.html\")", "def update_user_info(user, save=True):\n p = bayou.Person.from_default_services(user.username)\n\n user.email = p.email if p.email else user.email\n user.first_name = p.first_name if p.first_name else user.first_name\n user.last_name = p.surname if p.surname else user.last_name\n\n if save:\n user.save()\n\n return user", "def save(self, *args, **kwargs):\n step_numeral, step_name = kwargs.pop('step', (None, None))\n\n if step_numeral == 1:\n \"\"\"\n Basic Form: Application & File Uploader\n \"\"\"\n return self.cleaned_data\n if step_numeral == 2:\n \"\"\"\n Basic Form + Mapping Fields\n \"\"\"\n return self.cleaned_data\n\n if step_numeral == 3:\n pass # end-user is previewing", "def modify_user():\n\n id_hash = request.args.get('id')\n\n if not id_hash or id_hash=='':\n flash('There is no id.','error')\n return redirect(url_for('user_ksat.manage_user'))\n\n modify_user = User.query.filter_by(id=hashids_hasher.decode(id_hash)).first()\n\n if not modify_user:\n flash('There is no user to be changed.','error')\n return redirect(url_for('user_ksat.manage_user'))\n\n roles = Role.query.all()\n\n user_form = UserForm(\n username=modify_user.username,\n email=modify_user.email,\n password=modify_user.password,\n retype_password=modify_user.password,\n is_enabled=modify_user.is_enabled,\n first_name=modify_user.first_name,\n last_name=modify_user.last_name,\n locale=modify_user.locale,\n timezone=modify_user.timezone\n )\n # Metemos los valores actuales de los roles y los roles que no se anadieron anteriormente\n # para que puedan ser seleccionados\n user_form.roles.choices = [(i.name,i.name) for i in roles]\n user_form.roles.data = [i for i in modify_user.role_names]\n\n if user_form.validate_on_submit():\n\n if not request.form['username'] or request.form['username'] == '' :\n flash(\"No null or empty values are allowed.\",\"warn\")\n return render_template('user/add_edit_user.html', title='Modify User',\n user_form=user_form)\n if not request.form['email'] or request.form['email'] == '' :\n flash(\"No null or empty values are allowed.\",\"warn\")\n return render_template('user/add_edit_user.html', title='Modify User',\n user_form=user_form)\n if not request.form['password'] or request.form['password'] == '' :\n flash(\"No null or empty values are allowed.\",\"warn\")\n return render_template('user/add_edit_user.html', title='Modify User',\n user_form=user_form)\n if request.form['password'] != request.form['retype_password']:\n flash(\"Passwords are not the same!\",\"warn\")\n return render_template('user/add_edit_user.html', title='Modify User',\n user_form=user_form)\n\n hashed_password = user_manager.hash_password(request.form['password'])\n\n modify_user.username=request.form['username']\n modify_user.email=request.form['email']\n modify_user.password=hashed_password\n modify_user.confirmed_at=datetime.datetime.utcnow()\n\n if 'is_enabled' in request.form:\n modify_user.is_enabled=True\n else:\n modify_user.is_enabled=False\n\n modify_user.first_name=request.form['first_name']\n modify_user.last_name=request.form['last_name']\n modify_user.locale=request.form['locale']\n modify_user.timezone=request.form['timezone']\n\n # Si existe la lista de roles que hemos elegido se anadira al usuario\n if request.form.getlist('roles'):\n for rol in roles:\n if rol.name in request.form.getlist('roles'):\n modify_user.roles.add(rol)\n else:\n modify_user.roles = set()\n\n try:\n correct = True\n db.session.commit()\n\n except Exception as e:\n # Catch anything unknown\n print(e)\n correct = False\n\n finally:\n if not correct:\n # Cleanup and show error\n db.session.rollback()\n flash('Error modifying user, make sure username and email are unique','error')\n else:\n flash('Congratulations, you have modified a user!','success')\n return redirect(url_for('user_ksat.manage_user'))\n\n\n return render_template('user/add_edit_user.html', title='Modify User',user_form=user_form)", "def _edit_user(self):\n users = fileIO.load_json(\"users.json\")\n print(\"The list of users is as follows: \")\n for i in users:\n print(users[i][\"name\"])\n #List specific user's settings and get user id\n userID = self._list_user_settings(users)\n #Loop until valid option given\n option = False\n while not option:\n option = input(\"Please enter the setting you would like to change: \")\n if option not in users[userID]:\n option = False\n print(\"That setting is not valid.\")\n #Get input for new setting\n args = input(\"Please enter what you would like to change that setting to: \")\n #Output\n command = \"edit_user {0} {1} {2}\\r\\n\".format(userID, option, args)\n return(command)", "def make_personal(doc, image=False):\n \n \n # doc.append(Command('hrule'))\n # doc.append(VerticalSpace(\"-5pt\"))\n if image:\n with doc.create(MiniPage(width=r\"0.45\\textwidth\")):\n # with doc.create(Figure()) as profil:\n doc.append(Command(\"centering\"))\n doc.append(Command(\"includegraphics\", PROFIL,\"width=200pt\"))\n\n with doc.create(MiniPage(width=r\"0.6\\textwidth\")):\n with doc.create(Section(data['personal']['name'])):\n # doc.append(VerticalSpace(\"-3pt\"))\n with doc.create(Itemize(options=[ \n 'align=parleft',\n 'leftmargin=2.25cm',\n 'labelwidth=2cm' ]\n )):\n # doc.append(Command(\"hrule\"))\n doc.append(NoEscape(\"\\\\item[Phone]\"))\n doc.append(phone_format(data['personal']['phone']))\n doc.append(NoEscape(\"\\\\item[Email]\"))\n doc.append(Command(\"url\",data['personal']['email'][0]))\n # doc.append(Command(\"url\",data['personal']['email'][1]))\n doc.append(NoEscape(\"\\\\item[Website]\"))\n doc.append(Command(\"url\",data['personal']['website']))\n doc.append(NoEscape(\"\\\\item[Address]\"))\n doc.append(NoEscape(\",\\\\\\\\\".join(data['personal']['address'])))\n doc.append(NoEscape(\"\\\\item[Birth]\"))\n b=data['personal']['birth']\n birth=f\"{b['day']} {b['month']['name']} {b['year']}\"\n doc.append(NoEscape(birth+f\", {b['city']}, {b['country']}\"))\n\n # doc.append(Command('hrule'))\n doc.append(Command(\"hfill\"))\n # doc.append(LineBreak())", "def user():\r\n return render_base_template(\"user.html\", user=current_user)", "def modificarUserstory(request,id_proyecto, id_userstory):\n band = False\n\n rol_en_proyecto=Equipo.objects.get(usuario_id=request.user.pk, proyecto_id=id_proyecto)\n rol = Group.objects.get(id=rol_en_proyecto.rol.pk)\n user_permissions_groups = list(rol.permissions.all())\n\n for p in user_permissions_groups:\n if (p.codename == 'change_userstory'):\n band = True\n warning = False\n registered = False\n warningUS = False\n warningPorcentaje = False\n marca = False\n mensaje = 'ATENCION: \\nNo puede modificar el estado de un US en estado Comentario\\nDebe concluir con los US en Alta'\n mensajeCurso = 'ATENCION: \\nNo puede modificar el estado de este US en estado Curso, ya posee otro US en ese estado\\nFinalice su otro US o coloque a Comentario'\n mensajePorcentaje = 'ATENCION: \\nNo puede modificar el estado de este US a estado Resuelta, porque su porcentaje no esta en 100%\\n Cambie a 100% antes de realizar este cambio'\n us = Userstory.objects.get(id=id_userstory)\n estado_us = us.estado\n if (band == True):\n\n if request.method == 'POST':\n form = UserstoryModificadoForm(request.POST, estado_us=estado_us)\n if form.is_valid():\n form.clean()\n nombre = form.cleaned_data['Nombre']\n descripcion = form.cleaned_data['Descripcion']\n #usuarioasignado = form.cleaned_data['usuarioasignado']\n if estado_us == 'Resuelta':\n estado = form.cleaned_data['Estado']\n prioridad = form.cleaned_data['Prioridad']\n\n\n\n '''\n Procedimiento si se modifica la prioridad del us a 'Alta'\n '''\n\n # sprint = us.sprint\n if prioridad == 'Alta':\n cambioDePrioridades(us.usuarioasignado, us.sprint)\n\n '''\n Procedimiento necesario para definir el historial\n '''\n modificaciones = ''\n modificaciones = modificaciones + str(us.historial)\n if us.nombre != nombre or us.estado != estado or us.prioridad != prioridad:\n marca = 'True'\n modificaciones = modificaciones + \"\\nActualizado por \"\n modificaciones = modificaciones + str(us.usuarioasignado)\n ahora = datetime.date.today()\n modificaciones = modificaciones + \" el \" + str(ahora) + \"\\n\"\n\n if marca == 'True':\n if us.nombre != nombre and nombre!='':\n modificaciones = modificaciones + \" \\n \\t* NOMBRE -> Cambiado de \" + str(\n us.nombre) + \" por \" + str(nombre)\n us.nombre=nombre\n\n\n if us.descripcion != descripcion and descripcion!='':\n modificaciones = modificaciones + \" \\n \\t* DESCRIPCION -> Cambiado de \" + str(\n us.descripcion) + \" por \" + str(descripcion)\n us.descripcion = descripcion\n\n\n if (us.estado != estado):\n if estado == 'Validado':\n us.estado = estado\n elif estado == 'Rechazado':\n us.estado = 'InPlanning'\n proyecto_flujo_actividades = ProyectoFlujoActividad.objects.filter(userstory_id=us.pk)\n for pfa in proyecto_flujo_actividades:\n ProyectoFlujoActividad.objects.filter(id=pfa.pk).update(estado='ToDo')\n\n\n if us.prioridad != prioridad:\n modificaciones = modificaciones + \" \\n \\t* PRIORIDAD -> Cambiado de \" + str(\n us.prioridad) + \" por \" + str(prioridad)\n\n\n\n\n '''if (us.prioridad == 'Alta' and (estado == 'Resuelta' or estado == 'Validado')):\n userStories = Userstory.objects.filter(sprint_id=us.sprint.pk)\n\n if (tieneUsuarioUSAlta(us) is not True):\n for userStory in userStories:\n if (userStory.usuarioasignado == us.usuarioasignado) and (userStory.estado == 'Comentario'):\n Userstory.objects.filter(id=userStory.pk).update(estado='InPlaning')'''\n\n mensajePrioridadAlta=False\n if tieneUsuarioUSAlta(us):\n mensajePrioridadAlta = 'No puede asignar otro US con prioridad alta al usuario' + request.user.username\n else:\n us.prioridad = prioridad\n\n us.historial = us.historial + modificaciones\n #us.sprint = sprint\n\n us.save()\n '''\n Obtener Lider, scrum master del proyecto al que se corresponde este US\n '''\n sprint = us.sprint\n userstories_del_sprint = Userstory.objects.filter(sprint_id=sprint.pk)\n userstories_del_sprint_validado = Userstory.objects.filter(sprint_id=sprint.pk, estado='Validado')\n if len(userstories_del_sprint) == len(userstories_del_sprint_validado):\n FlujoProyecto.objects.filter(proyecto_id=id_proyecto, sprint_id=sprint.pk).update(estado='Done')\n Sprint.objects.filter(id=sprint.pk).update(estado='Finalizado')\n\n #scrum_master = Equipo.objects.get(proyecto_id=FlujoProyecto.objects.get(sprint_id=us.sprint.pk).proyecto_id, rol_id = 2).usuario\n\n '''\n Enviar correo electronico al SCRUM MASTER\n '''\n #send_mail('Modificaciones del US', modificaciones, settings.EMAIL_HOST_USER,\n # ['gabyvazquez92@gmail.com',Equipo.objects.get(proyecto_id=FlujoProyecto.objects.get(sprint_id=us.sprint.pk).proyecto_id, rol_id = 2).usuario.email],\n # fail_silently=False)\n\n registered = True\n template_name = './Userstories/userstory_modificado.html'\n return render(request, template_name,\n {'mensaje': mensaje, 'warning': warning, 'mensajeCurso': mensajeCurso,'mensajePorcentaje': mensajePorcentaje, 'warningUS': warningUS, 'warningPorcentaje': warningPorcentaje,'registered': registered,'mensajePrioridadAlta':mensajePrioridadAlta })\n else:\n data = {'Nombre': us.nombre, 'Estado': us.estado,\n 'Prioridad': us.prioridad,\n 'Descripcion': us.descripcion,\n }\n form = UserstoryModificadoForm(data, estado_us=estado_us)\n template_name = './Userstories/modificar_userstory.html'\n return render(request, template_name, {'form': form, 'id_userstory': id_userstory, 'id_proyecto': id_proyecto, 'us':us})\n else:\n raise Http404(\"No cuenta con los permisos necesarios\")", "def _ReCreateUserInfo(self, cred=None):\n if cred is None:\n cred = self.GetCredentials()\n user_info = FoursquareUser(cred.all_credentials.foursquare)\n self.SetUserInformation(user_info)\n return user_info" ]
[ "0.63029945", "0.6147011", "0.60409224", "0.5987003", "0.5972466", "0.59428054", "0.5842993", "0.5805387", "0.5803387", "0.5779269", "0.57783127", "0.5758194", "0.57569313", "0.5720132", "0.57105464", "0.57057136", "0.56755704", "0.56709665", "0.56596", "0.56305", "0.5622149", "0.5602108", "0.559116", "0.55641824", "0.55527705", "0.55467355", "0.55410516", "0.55386084", "0.5531462", "0.55249935", "0.5524404", "0.55227137", "0.5490549", "0.5477047", "0.54679984", "0.54521906", "0.54518926", "0.54244745", "0.54153454", "0.5406243", "0.5395214", "0.5378206", "0.5378005", "0.5374568", "0.5360155", "0.5355813", "0.5341255", "0.5326636", "0.5307585", "0.5303547", "0.5302058", "0.52970135", "0.5285025", "0.52821267", "0.52701014", "0.5262828", "0.52306485", "0.52262837", "0.52146035", "0.5214375", "0.5210926", "0.521025", "0.5205133", "0.5204944", "0.52044004", "0.5197677", "0.5194451", "0.51915133", "0.5177337", "0.5156303", "0.5148286", "0.5148117", "0.51480407", "0.5145876", "0.5144231", "0.5122464", "0.5121779", "0.5118817", "0.5118056", "0.5112282", "0.51076996", "0.51071316", "0.51052946", "0.5096988", "0.5092061", "0.50861067", "0.5080989", "0.50751424", "0.5075109", "0.50639147", "0.5062889", "0.50612974", "0.50515366", "0.5050671", "0.50469726", "0.5046064", "0.504069", "0.502961", "0.5022765", "0.50220424" ]
0.6888967
0
Function that render form for email input that is destination of utils.send_reset_email function responsible for sending email to user with token that is available for specific period of time and reset user's password
def reset_password(): if current_user.is_authenticated: return redirect(url_for('main.home')) form = RequestResetForm() if form.validate_on_submit(): user = User.query.filter_by(email=form.email.data).first() send_reset_email(user) # located in utils.py flash('An email has been sent with instruction to reset your password', 'info') return redirect(url_for('users.login')) return render_template('reset_password_request.html', form=form)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset_password_request():\n form = ResetPasswordRequestForm()\n if form.validate_on_submit():\n try:\n user = User.query.filter_by(email=form.email.data).first_or_404()\n except Exception:\n flash('This Email ID is Not Registered', 'error')\n return render_template('password_reset_request.html',\n form=form), 400\n\n if user:\n send_password_reset_email(user)\n flash('Please check your email for a password reset link.',\n 'success')\n return render_template('post_pass_reset_request.html',\n title=\"Reset Password\")\n else:\n flash(\n 'Your email address must be confirmed \\\n before attempting a password reset.',\n 'error')\n return redirect(url_for('auth.login'))\n\n return render_template('password_reset_request.html', form=form), 400", "def password_reset(request):\n\n\tcontext_dict = {}\n\tif request.method == 'POST':\n\t\temail = request.POST.get('email')\n\t\tif email:\n\t\t\tuser = models.Teacher.objects.get(\n\t\t\t\tsoft_delete=False, user__email=email\n\t\t\t)\n\t\t\tif not user:\n\t\t\t\tcontext_dict[\"message\"] = \"Email ID does'nt exist, Enter Correct details\"\n\t\t\tmail = {\n\t\t\t\t'email': email,\n\t\t\t\t'domain': request.META['HTTP_HOST'],\n\t\t\t\t'site_name': 'Placement Portal',\n\t\t\t\t'uid': urlsafe_base64_encode(force_bytes(user.pk)),\n\t\t\t\t'user': user,\n\t\t\t\t'token': ''.join([random.choice(ascii_letters+digits) for i in range (128)]),\n\t\t\t\t'protocol': 'http',\n\t\t\t}\n\t\t\ttry:\n\t\t\t\treset_token = models.PasswordReset(\n\t\t\t\t\tuser=user,\n\t\t\t\t\ttoken=mail['token'],\n\t\t\t\t\ttoken_consumed=False,\n\t\t\t\t)\n\t\t\t\treset_token.save()\n\t\t\texcept Exception as e:\n\t\t\t\tprint (e)\n\t\t\tsubject_template_name = 'password_reset_email_subject.txt'\n\t\t\temail_template_name = 'password_reset_email.html'\n\t\t\tsubject = loader.render_to_string(subject_template_name, mail)\n\t\t\tsubject = ''.join(subject.splitlines())\n\t\t\temail_data = loader.render_to_string(email_template_name, mail)\n\t\t\tsend_mail(subject, email_data, DEFAULT_FROM_EMAIL, [email], fail_silently=False)\n\t\t\tcontext_dict[\"message\"] = \"Email has been sent to your registered Email ID with instructions.\"\n\treturn render(request, \"password_reset_form.html\", context_dict)", "def send_recovery_password_email(token: str, email: str) -> None:\n\n # TODO ...\n # Load html templates and get the content from it.\n # html_content = ...\n\n # You must have to send this as a anchor\n # to my-domain.com/reset-password?token=ad5a....\n link = f\"{SERVER_HOST}/reset-password?token={token}\"\n content = f\"\"\"\n <h1>Reset your password</h1>\n <p></p>\n <a href=\"{link}\" target=\"_blank\" rel=\"noopener noreferrer\">Press here</a>\n \"\"\"\n email = sender.create_email(\n to_list=[email],\n subject=f\"Recovery Password\",\n html_content=content,\n )\n sender.send_email(email_to_send=email)", "def reset_password():\n form = ResetPassword()\n if form.validate_on_submit():\n user_email = form.email.data\n mail_exist = db.check_email(user_email)\n if mail_exist is not None:\n new_password = generate_password()\n new_password_hash = generate_password_hash(new_password)\n username = mail_exist['username']\n db.update_password_username(username, new_password_hash)\n flash('Your new password has been sent to your mailbox')\n redirect('login')\n # send_password_reset_email(user_email, new_password)\n return redirect(url_for('login'))\n else:\n flash('This email address is not registered')\n return redirect('reset_password')\n return render_template('resetpassword.html', form=form)", "def reset_request():\n if current_user.is_authenticated:\n return redirect('/home')\n form = RequestResetForm()\n if form.validate_on_submit():\n staff = Staff.query.filter_by(email=form.email.data).first()\n send_reset_email(staff)\n flash('An email has been sent with instructions to reset your password.', 'info')\n return redirect(url_for('login'))\n return render_template('reset_request.html', title='Reset Password',\n form=form)", "def forgot_password():\r\n form = ForgotPasswordForm(request.form)\r\n if form.validate_on_submit():\r\n user = model.user.User.query\\\r\n .filter_by(email_addr=form.email_addr.data)\\\r\n .first()\r\n if user and user.email_addr:\r\n msg = Message(subject='Account Recovery',\r\n recipients=[user.email_addr])\r\n if user.twitter_user_id:\r\n msg.body = render_template(\r\n '/account/email/forgot_password_openid.md',\r\n user=user, account_name='Twitter')\r\n elif user.facebook_user_id:\r\n msg.body = render_template(\r\n '/account/email/forgot_password_openid.md',\r\n user=user, account_name='Facebook')\r\n elif user.google_user_id:\r\n msg.body = render_template(\r\n '/account/email/forgot_password_openid.md',\r\n user=user, account_name='Google')\r\n else:\r\n userdict = {'user': user.name, 'password': user.passwd_hash}\r\n key = signer.signer.dumps(userdict, salt='password-reset')\r\n recovery_url = url_for('.reset_password',\r\n key=key, _external=True)\r\n msg.body = render_template(\r\n '/account/email/forgot_password.md',\r\n user=user, recovery_url=recovery_url)\r\n msg.html = markdown(msg.body)\r\n mail.send(msg)\r\n flash(gettext(\"We've send you email with account \"\r\n \"recovery instructions!\"),\r\n 'success')\r\n else:\r\n flash(gettext(\"We don't have this email in our records. \"\r\n \"You may have signed up with a different \"\r\n \"email or used Twitter, Facebook, or \"\r\n \"Google to sign-in\"), 'error')\r\n if request.method == 'POST' and not form.validate():\r\n flash(gettext('Something went wrong, please correct the errors on the '\r\n 'form'), 'error')\r\n return render_template('/account/password_forgot.html', form=form)", "def password_reset_token_created(sender, reset_password_token, *args, **kwargs):\n # send an e-mail to the user\n context = {\n 'current_user': reset_password_token.user,\n 'username': reset_password_token.user.username,\n 'email': reset_password_token.user.email,\n # ToDo: The URL can (and should) be constructed using pythons built-in `reverse` method.\n 'reset_password_url': \"http://some_url/reset/?token={token}\".format(token=reset_password_token.key)\n }\n\n # render email text\n email_html_message = render_to_string('email/user_reset_password.html', context)\n email_plaintext_message = render_to_string('email/user_reset_password.txt', context)\n\n msg = EmailMultiAlternatives(\n # title:\n \"Password Reset for {title}\".format(title=\"Some website title\"),\n # message:\n email_plaintext_message,\n # from:\n \"noreply@somehost.local\",\n # to:\n [reset_password_token.user.email]\n )\n msg.attach_alternative(email_html_message, \"text/html\")\n msg.send()", "def login_resetrequest():\n if request.method == \"GET\":\n # In browser request that user wants to reset the password\n return flask.render_template('reset-request.html', message=\"Please reset the password\")\n\n if request.method == \"POST\":\n # Create a token\n email = flask.request.form[\"email\"]\n\n # Find if an account with that name exists\n conn.register([model.User])\n admindb = conn[current_app.config[\"CONFIGDB\"]]\n\n userdoc = admindb[\"users\"].User.find_one({\"name\" : email, \"type\" : \"passwd\"})\n if userdoc == None:\n # user not found\n return flask.Response('{\"error\" : \"User not found\"}')\n\n # First reset the password\n name = userdoc[\"label\"]\n emailto = userdoc[\"name\"]\n\n # Create accout and a random tocken\n userdoc[\"token\"] = bson.ObjectId()\n userdoc[\"password_status\"] = \"reset-request\"\n\n # May only be useful for some\n if \"password_ready\" in userdoc:\n del userdoc[\"password_ready\"]\n\n userdoc.validate()\n userdoc.save()\n\n # Create email\n emailfrom = current_app.config[\"EMAIL_FROM\"] \n\n body = \"Hello \" + name + \",\\n\\n\"\n body = body + \"You recently requested a password reset for your account at https://slide-atlas.org.\"\n body = body + \"\\n To complete the request operation please follow the link below- \\n\"\n body = body + \"\\n \" + url_for('.login_confirm', _external=True) + \"?token=\" + str(userdoc[\"token\"]) + \" \\n\"\n body = body + \"\\nIf clicking on the link doesn't work, try copying and pasting it into your browser.\\n\"\n body = body + \"\\nThis link will work only once, and will let you create a new password. \\n\"\n body = body + \"\\nIf you did not request password reset, please disregard this message.\\n\"\n body = body + \"\\nThank you,\\nThe SlideAtlas Administration Team\\n\"\n\n # Create a text/plain message\n msg = MIMEText(body)\n\n # me == the sender's email address\n # you == the recipient's email address\n msg['Subject'] = 'Password reset confirmation for slide-atlas.org'\n msg['From'] = emailfrom\n msg['To'] = emailto\n print msg\n s = smtplib.SMTP(current_app.config[\"SMTP\"])\n try:\n out = s.sendmail(emailfrom, [emailto], msg.as_string())\n except:\n return flask.Response(\"{\\\"error\\\" : \\\"Error sending email\\\"}\")\n\n s.quit()\n return flask.Response(\"{\\\"success\\\" : \\\"\" + str(out) + \"\\\"}\")", "def forgotPassword():\n if request.method == 'POST':\n if emailform():\n email = request.form['email1']\n\n #Confirm the user exist\n if hl.confirmUser(email):\n user = hl.getUser(\"Email\",email)\n refLink = \"http://\"+request.headers['Host']+hl.genUrl(user[\"Name\"],\"Password\")\n #Send email\n msg = \"\"\"\n Dear {},\n\n You are receiving this email because you have requested your password be reset. \n Use the following link to reset your password:\n\n {}\n\n If you did not request that your password be changed, please reply to this email immediately.\n\n Regards,\n Onegroup Admin Team\n \"\"\".format(user[\"Name\"],refLink)\n\n emailMessage(\"Password Reset\", [user[\"Email\"]], msg)\n return redirect(url_for('confirm', confirmed = 'Password reset email has been sent.'))\n else:\n flash(\"User doesn't exists\")\n else:\n flash(\"Emails don't match\")\n \n return render_template('emailsend.html')", "def token_request(request):\n try:\n l_user = request.data[\"user\"] #or email\n except:\n return Response({'message':'No user information received.'}, status=status.HTTP_400_BAD_REQUEST)\n\n l_user = l_user.lower()\n\n try:\n user = User.objects.get(username=l_user)\n except:\n try:\n user = User.objects.get(email=l_user)\n except:\n return Response({'message': l_user + ' does not match any record.'}, status=status.HTTP_400_BAD_REQUEST)\n\n pin = random.randint(0, 1000000)\n try:\n subject = \"Password Reset Token.\"\n sendEmail(user, subject, \"Password Reset\", otp=pin)\n\n #Write to use record\n ResetRequests.objects.create(user = user, token = pin, use_case = 'password reset')\n \n #Add password reset request date here\n return Response({'message':'Token sent to registered email.', 'username' : user.username}, status=status.HTTP_200_OK)\n except Exception as e:\n return Response({'message':'We could not send an email', 'error':e}, status=status.HTTP_400_BAD_REQUEST)", "def forgot_password():\n\n if not current_user.is_anonymous():\n return redirect(url_for(\"forum.index\"))\n\n form = ForgotPasswordForm()\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n\n if user:\n token = user.make_reset_token()\n send_reset_token(user, token=token)\n\n flash((\"E-Mail sent! Please check your inbox.\"), \"info\")\n return redirect(url_for(\"auth.forgot_password\"))\n else:\n flash((\"You have entered an username or email that is not linked \\\n with your account\"), \"danger\")\n return render_template(\"auth/forgot_password.html\", form=form)", "def post(self):\n try:\n url = request.host_url + 'reset/password/'\n body = request.get_json()\n base_url = request.url_root\n email = body.get('email')\n\n if not email:\n raise SchemaValidationError\n\n user = User.objects.get(email=email)\n if not user:\n raise EmailDoesNotExistsError\n\n expires = datetime.timedelta(minutes=60)\n payload = {\"user_id\": str(user.id)}\n reset_token = create_access_token(payload, expires_delta=expires)\n\n return send_email('[Unboxit] Reset Your Password',\n sender='contact@tsantos.dev',\n recipients=[user.email],\n text_body=render_template(\n 'components/reset_password.txt',\n url=url + reset_token),\n html_body=render_template(\n 'components/reset_password.html',\n url=url + reset_token,\n first_name=user.first_name,\n base_url=base_url))\n except SchemaValidationError:\n raise SchemaValidationError\n except DoesNotExist:\n raise EmailDoesNotExistsError\n except Exception as e:\n raise InternalServerError", "def send_password_reset_mail(email, token):\n print(\"reset password\")\n url = f\"{settings.SITE_URL}/reset-password?email={email}&token={token}\"\n SUBJECT = \"Reset Password Request\"\n # The HTML body of the email.\n body = \"\"\"\n <html>\n <head></head>\n <body>\n <p>Here is your password reset link:</p>\n <p><a href='{0}'>{1}</a></p>\n </body>\n </html>\n \"\"\".format(url, url)\n send_mail(SUBJECT, body, email)", "def reset_post():\n if g.session:\n # User is already authenticated\n return jsonify({'redirect': url_for('index.index')})\n\n form = request.values.get('form', default='email')\n token = request.values.get('token', default='')\n email = request.values.get('email', default='')\n password = request.values.get('password', default='')\n\n if form == 'password':\n try:\n user: User = db.session.query(User) \\\n .filter((User.password_token == token) & User.reset_active) \\\n .one()\n if user.is_reset_expired():\n return jsonify({'success': False, 'reason': 'expired'}), 401\n\n if len(password) < 8:\n return jsonify({'success': False, 'reason': 'password'}), 401\n\n user.set_password(password)\n db.session.commit()\n next_url = url_for('auth.reset_status', success=True)\n return jsonify({'success': True, 'redirect': next_url})\n except NoResultFound:\n return jsonify({'success': False, 'reason': 'token not found'}), 401\n else:\n try:\n user: User = db.session.query(User) \\\n .filter(User.email == email).one()\n user.reset_password()\n db.session.commit()\n\n reset_url = urllib.parse.urljoin(\n request.host_url,\n url_for('auth.reset_get', token=user.password_token))\n kwargs = {\n 'subject': gettext('Reset Password'),\n 'body': reset_url,\n 'recipients': [user.email]\n }\n mail.send_mail(**kwargs)\n next_url = url_for('auth.reset_status', sent=True)\n return jsonify({'success': True, 'redirect': next_url})\n except NoResultFound:\n return jsonify({'success': False, 'reason': 'email'}), 401", "def request_password_reset_token():\n j = request.get_json(force=True)\n user_requested = j['user'].lower()\n\n # Disabled user accounts can not request for a new password.\n target_user = User.query.filter_by(mail=user_requested).first()\n\n if target_user is None:\n return Errors.UNKNOWN_USER.make_json_response(status.HTTP_400_BAD_REQUEST)\n\n if target_user.state == StateType.DEACTIVATED:\n return Errors.DEACTIVATED_USER.make_json_response(status.HTTP_400_BAD_REQUEST)\n\n target_user.generate_password_request_token()\n\n send_mail(target_user.mail, render_template(\"password/reset_password_mail.txt\",\n greeting=get_opening_greeting(target_user),\n wlink=\"{}/password/reset/{}\".format(\n app.config['BUZZN_BASE_URL'],\n target_user.password_reset_token\n )), 'Passwort zurücksetzen für Buzzn-App')\n\n db.session.commit()\n return '', status.HTTP_201_CREATED", "def _request_reset(self, email):\n response = self.client.post(reverse('users.send_password_reset'),\n {'email': email})\n return response.context['token']", "def reset_token(token):\n if current_user.is_authenticated:\n return redirect(url_for('home'))\n user = User.verify_reset_token(token)\n if user is None:\n message = \"This is an invalid or expired token\"\n return redirect(url_for(\"forgot\", message=message))\n form = ResetPasswordForm()\n if form.validate_on_submit():\n hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')\n user.password = hashed_password\n db.session.commit()\n message = f'Password has been updated for {user.display_username}'\n return redirect(url_for('login', message=message))\n return render_template(\"reset_token.html\", title=\"Reset Pasword\", form=form, offer_login=True, offer_register=True)", "def forgot_password():\n if request.method == 'POST':\n if 'username' in request.form:\n username = request.form['username']\n user = Users.query.get(username)\n if user:\n reset_slug = utils.encrypt(username)\n reset_url = request.host_url + 'reset_password' + '/' + reset_slug\n from_email = ('noreply@thescriptgroup.in', 'TSG Bot')\n to_email = [(user.email, user.name)]\n subject = 'Password reset for Hades account'\n content = f\"Hello {user.name}, please click <a href=\\\"{reset_url}\\\">here</a> to reset your password!\"\n utils.send_mail(from_email, to_email, subject, content)\n return redirect(url_for('login'))\n return render_template('forgot_password.html')", "def handle_emails():\n email = request.data['email'].strip()\n user = User.query.filter_by(email=email).first()\n option = \\\n request.data['option'].strip() # have a <select> in the frontend\n token = s.dumps(email, salt='email-confirm')\n\n msg = Message('Reset password', sender=app.config['ADMINS'][0],\n recipients=[email])\n link = 'http://localhost:3000/confirm_email/{}/{}'\\\n .format(option, token)\n if user:\n msg.body = 'Your link is {}'.format(link)\n else:\n msg.body = 'You attempted to reset your password but you do not \\\n have an account with us. Please Sign Up and Log in. {}'\\\n .format('http://localhost:3000/register')\n\n mail.send(msg)\n return jsonify({\"message\":\"Please confirm your email.\"}), 201", "def user_reset_password(request, token):\n\n if request.user.is_authenticated():\n return redirect(settings.AFTER_LOGIN_REDIRECT_URL)\n\n form = ResetPasswordForm(request.POST or None)\n\n if request.method == \"POST\":\n if form.is_valid():\n user_auth = get_object_or_404(PasswordResetAuth, token=token)\n user = get_object_or_404(User, email=user_auth.email)\n\n if user_auth.choose_me is True:\n new_password = form.cleaned_data[\"new_password\"]\n user.set_password(new_password)\n user.save()\n\n user_auth.choose_me = False\n user_auth.save()\n return redirect(\"/login/\")\n\n error_message = \"* Either you are not an identified user or \"\\\n \"token has been expired. So please click on back.\"\n return render_to_response(\"login/reset_password.html\", {\n \"form\": form,\n \"error_message\": error_message\n }, context_instance=RequestContext(request))\n\n return render_to_response(\"login/reset_password.html\", {\n \"form\": form\n }, context_instance=RequestContext(request))", "def send_password_reset_email():\n aaa.send_password_reset_email(\n username=post_get('username'),\n email_addr=post_get('email_address')\n )\n return 'Please check your mailbox.'", "def reset_password(token):\n\n if not current_user.is_anonymous():\n return redirect(url_for(\"forum.index\"))\n\n form = ResetPasswordForm()\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n expired, invalid, data = user.verify_reset_token(form.token.data)\n\n if invalid:\n flash((\"Your password token is invalid.\"), \"danger\")\n return redirect(url_for(\"auth.forgot_password\"))\n\n if expired:\n flash((\"Your password is expired.\"), \"danger\")\n return redirect(url_for(\"auth.forgot_password\"))\n\n if user and data:\n user.password = form.password.data\n user.save()\n flash((\"Your password has been updated.\"), \"success\")\n return redirect(url_for(\"auth.login\"))\n\n form.token.data = token\n return render_template(\"auth/reset_password.html\", form=form)", "def send_reset_email(staff):\n token = staff.get_reset_token()\n msg = Message('Password Reset Request', \n sender='NoReplyBloodBank@my.unt.edu', \n recipients=[staff.email])\n msg.body = f\"\"\"To reset your password, visit the following link:\n{url_for('reset_token', token=token, _external=True)}\nIf you did not make this request, then simply record this email and no changes will be made.\"\"\"\n try:\n mail.send(msg)\n except Exception as e:\n print(e)", "def forgot():\n form = ForgotForm()\n\n if form.validate_on_submit():\n db.session.add(form.pw_reset)\n db.session.commit()\n\n form.pw_reset.send()\n flash('A password reset link has been sent to your email', 'alert-success')\n return redirect(url_for('default.home'))\n else:\n flash_form_errors(form)\n return render_template('forgot.html', form=form)", "def user_password_reset(self, request):\n reset_password_form = ResetPasswordForm(request.form)\n\n if request.method == \"POST\":\n if reset_password_form.validate_on_submit():\n if check_password_hash(current_user.password, reset_password_form.old_password.data):\n new_hashed_password = generate_password_hash(reset_password_form.password.data)\n\n temp = current_user.get_id()\n (role, email) = temp.split(\":\")\n\n # if first element is `sysadmin` instead of a scheme_id\n # call function to reset `sysadmin` pass\n if role == \"sysadmin\":\n self._scheme_handler.update_hash_password(email, new_hashed_password)\n else:\n # regular user reset\n self._student_handler.update_hash_password(current_user.scheme_id, current_user.k_number, new_hashed_password)\n\n flash(\"Password successfully updated\")\n else:\n flash(\"Old password incorrect\")\n else:\n flash(\"Please double check your new password is valid.\")\n \n return render_template(\"user/reset_password.html\", reset_password_form=reset_password_form)", "def reset_token(token):\n if current_user.is_authenticated:\n return redirect(url_for('LoadDonor'))\n staff = Staff.verify_reset_token(token)\n if staff is None:\n flash('That is an invalid or expired token', 'warning')\n return redirect(url_for('reset_request'))\n form = ResetPasswordForm()\n if form.validate_on_submit():\n hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')\n staff.password = hashed_password\n db.session.commit()\n flash('Your password has been updated! You are now able to log in', 'success')\n return redirect(url_for('login'))\n return render_template('reset_token.html', title='Reset Password', form=form)", "def request_password_reset():", "def post(self):\n data = request.get_json()\n user = actions.get_user_by_email(data['email'])\n html = '<p>To reset your password </p>'\n subject = 'Request for changing password, ' + user['username']\n actions.send_email(data['email'], user['username'], user['password'], subject,\n '/reset_password/', html, False)\n pass", "def password_reset_confirm(request, uidb64, token):\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = User.objects.get(pk=uid)\n try:\n reset_form = ResetPasswordForm(instance=user)\n # urlsafe_base64_decode() decodes to bytestring on Python 3\n except (TypeError, ValueError, OverflowError, user.DoesNotExist):\n user = None\n if user is not None and default_token_generator.check_token(user, token):\n validlink = True\n title = ('Enter new password')\n if request.method == 'POST':\n if 'password-submit' in (request.POST):\n reset_form = ResetPasswordForm(request.POST,instance=user)\n password = request.POST.get(\"password_reset\", None)\n \n if reset_form.is_valid():\n user=reset_form.save(commit = False)\n user.save()\n return redirect('password_reset_complete')\n else:\n reset_form = ResetPasswordForm(instance=user)\n else:\n validlink = False\n reset_form = ResetPasswordForm(instance=user)\n title = ('Password reset unsuccessful')\n return redirect ('invalid_password_link')\n context = {\n 'reset_form': ResetPasswordForm,\n 'title': title,\n 'validlink': validlink,\n }\n return render(request, 'reset_confirm.html', context, {'reset_form': ResetPasswordForm})", "def reset_password(token):\n if current_user.is_authenticated:\n return redirect(url_for('main.index'))\n user = User.verify_reset_password_token(token)\n if not user:\n return redirect(url_for('main.index'))\n form = ResetPasswordForm()\n if form.validate_on_submit():\n user.set_password(form.password.data)\n user.email_confirmed = True\n db.session.commit()\n return render_template(\n 'successful_pass_reset.html', title=\"Password Reset\")\n return render_template('reset_password.html', title=\"Password Reset\",\n form=form), 417", "def send_token(user):\n if 'research' in user.get_domains():\n domain = 'research'\n else: domain = 'academic'\n subject = \"ECE/CIS Password Reset\"\n url = \"https://www.eecis.udel.edu/accounts/reset_password/token/%s\" % user.token\n message = \"A request has been made for a password reset for your ECE/CIS %s account: %s\\n\\n\" % (domain, user.username)\n message += \"To reset your password, please visit the follow the reset link below:\\n\\n%s\\n\" % url\n message += \"This token will expire 30 minutes after the initial request was made\\n\\n\"\n message += \"If this is not your ECE/CIS username, or you did not request a password reset, please\\n\"\n message += \"submit a Help Request at https://www.eecis.udel.edu/helprequest\\n\\nECE/CIS Labstaff\"\n\n send('account@eecis.udel.edu', 'ECE/CIS Account System', \\\n [user.email], subject, message, MAILHOST)", "def forgot_password():\n url = 'http://localhost:8080/' + 'user/reset/'\n body = request.get_json()\n email = body.get('email')\n if not email:\n return jsonify(msg.MISSING_PARAMETER), 400\n user_email = views.UserManagement().exists(email=email)\n\n if not user_email:\n return jsonify(msg.NO_DATA), 404\n expires = datetime.timedelta(hours=24)\n reset_token = create_access_token(identity=email, expires_delta=expires)\n\n send_email('[Shodita] Reset Your Password', sender='shodita@shodita.com', recipients=[email],\n text_body=render_template('email/reset_password.txt', url=url + reset_token),\n html_body=render_template('email/reset_password.html', url=url + reset_token))\n\n return jsonify(msg.SUCCESS), 200", "def password_resetenter(request, uidb64=None, token=None):\n\n\tcontext_dict = {}\n\tif request.method == 'POST':\n\t\tassert uidb64 is not None and token is not None\n\t\tuid = urlsafe_base64_decode(uidb64)\n\t\tuser = models.Teacher.objects.get(\n\t\t\tsoft_delete=False, pk=uid\n\t\t)\n\t\tdb_user = user.user\n\t\treset_token = models.PasswordReset.objects.get(\n\t\t\ttoken=token, user=user\n\t\t)\n\t\ttoken_check = models.PasswordReset.objects.filter(\n\t\t\tuser=user, soft_delete=False, token_consumed=False,\n\t\t).exclude(token=token).first()\n\t\tupdate_fields = []\n\t\ttoken_check.token_consumed = True\n\t\tupdate_fields.append('token_consumed')\n\t\ttoken_check.soft_delete = True\n\t\tupdate_fields.append('soft_delete')\n\t\ttoken_check.save(update_fields=update_fields)\n\t\ttime_threshold = timezone.now() - reset_token.password_request_created_at\n\t\tif time_threshold > timedelta(minutes=30):\n\t\t\ttry:\n\t\t\t\tupdate_fields = []\n\t\t\t\treset_token.token_consumed = True\n\t\t\t\tupdate_fields.append('token_consumed')\n\t\t\t\treset_token.soft_delete = True\n\t\t\t\tupdate_fields.append('soft_delete')\n\t\t\t\treset_token.save(update_fields=update_fields)\n\t\t\texcept Exception as e:\n\t\t\t\tprint (e)\n\t\tif reset_token.user == user and reset_token.token == token:\n\t\t\tif reset_token.token_consumed == False and reset_token.soft_delete == False:\n\t\t\t\ttry:\n\t\t\t\t\tupdate_fields = []\n\t\t\t\t\treset_token.token_consumed = True\n\t\t\t\t\tupdate_fields.append('token_consumed')\n\t\t\t\t\treset_token.soft_delete = True\n\t\t\t\t\tupdate_fields.append('soft_delete')\n\t\t\t\t\treset_token.save(update_fields=update_fields)\n\t\t\t\texcept Exception as e:\n\t\t\t\t\tprint (e)\n\t\t\t\tform = AdminPasswordChangeForm(user=db_user, data=request.POST)\n\t\t\t\tif form.is_valid():\n\t\t\t\t\tform.save()\n\t\t\t\t\thistory = models.History(\n\t\t\t\t\t\tuser=user,\n\t\t\t\t\t\tactivity = \"\",\n\t\t\t\t\t\tactivity_type = \"Reset Password\"\n\t\t\t\t\t)\n\t\t\t\t\thistory.save()\n\t\t\t\t\tcontext_dict[\"message\"] = \"Password changed successfully\"\n\t\t\t\telse:\n\t\t\t\t\tcontext_dict[\"message\"] = \"Password not changed\"\n\t\t\telse:\n\t\t\t\tcontext_dict[\"message\"] = \"Link is no longer valid\"\n\treturn render(request, \"reset.html\", context_dict)", "def send_reset_email(user, domain_override=None,\n subject_template_name='registration/password_reset_request_subject.txt',\n email_template_name=None, use_https=False,\n token_generator=default_token_generator, from_email=None, request=None,\n html_email_template_name='registration/password_reset_email.html', extra_email_context=None):\n if user.first_name != \"\":\n user_name = user.first_name.title()\n else:\n user_name = user.email\n\n context = {\n 'email': user.email,\n 'user_name': user_name,\n 'domain': settings.BASE_URL,\n 'site_name': \"Clubby\",\n 'uid': urlsafe_base64_encode(force_bytes(user.pk)),\n 'user': user,\n 'token': token_generator.make_token(user),\n 'protocol': 'https' if use_https else 'http',\n }\n send_mail(subject_template_name, email_template_name, context, from_email, user.email,\n html_email_template_name=html_email_template_name)", "def reset_password_email(request):\n if request.method == 'POST' :\n try:\n print(request.POST)\n user = models.UserProfile.objects.get(email=request.POST.get('email',''))\n current_site=get_current_site(request)\n email_subject='Password Reset'\n message=render_to_string('reset_password.html',{\n 'user':user,\n 'domain':current_site.domain,\n 'uid':urlsafe_base64_encode(force_bytes(user.id)),\n 'token':account_activation_token.make_token(user),\n })\n to_email= user.email\n email= EmailMessage(email_subject,message,to=[to_email])\n email.send()\n return JsonResponse(\n {\n \"status\":\"The Reset password email has been sent.\"\n }\n )\n except(TypeError, ValueError, OverflowError, models.UserProfile.DoesNotExist):\n user = None\n return JsonResponse(\n {\n \"status\":\"No matching account found\"\n }\n )\n else :\n return JsonResponse(\n {\n \"status\":\"only post method is available\"\n }\n )", "def post(self, request, token):\n form = PasswordResetForm(request.DATA)\n if form.is_valid():\n user_data = get_user_data(\n signing.loads(\n token,\n max_age=self.token_expires,\n salt=self.salt))\n if user_data:\n user_data.set_password(request.DATA['password1'])\n user_data.save()\n return render_to_response(\n 'registration/show_message.html',\n {\n 'title': \"Change successfully\",\n 'message': \"your password has Change successfully\"})\n return render_to_response(\n 'registration/show_message.html',\n {\n 'title': \"Sorry something wrong\",\n 'message': \"sorry try again to set new password\"})\n return render_to_response(\n 'registration/show_message.html',\n {\n 'title': \"Sorry something wrong\",\n 'message': \"sorry try again to set new password\"})", "def send_password_reset_email(user):\n\n token = user.get_password_token()\n reset_time=datetime.now()\n send_email('[SiteSurveyApp] Account password reset',\n recipients=[user.email],\n sender=app.config['MAIL_DEFAULT_SENDER'],\n text_body=render_template('auth/emails/reset_password.txt',\n user=user, token=token, reset_time=reset_time),\n html_body=render_template('auth/emails/reset_password.html',\n user=user, token=token, reset_time=reset_time))", "def POST(self):\n session = web.ctx.session\n nav = get_nav_bar(session)\n data = web.input(reset_token = \"\", new_password=\"\")\n \n reset_password_colum = reset_password_form()\n \n # check each field is endered values.\n if not reset_password_colum.validates():\n return render.reset_password(nav, reset_password_form, \"All fields must be valid.\")\n \n try:\n # log ip information\n ip_addr = web.ctx[\"ip\"]\n accessed_path = web.ctx[\"fullpath\"]\n\n # query user's name (username) and token (extra secruity)\n token = data.reset_token\n username = search_for_user(token, ip_addr, accessed_path)\n #print(\"-\"*16)\n #print(username)\n \n #update token to null database\n result_update_token = update_token_to_null(username, token, ip_addr, accessed_path)\n print(\"-\" * 16 + \"updated!\")\n\n # generate new password\n new_salt = generate_salt()\n hashed_password = hashed_value(data.new_password, new_salt)\n hashed_password = new_salt + hashed_password\n\n # update password \n result_update_password = update_user_password(username, hashed_password, ip_addr, accessed_path )\n raise web.seeother(\"/\")\n except Exception as e:\n print(e)\n except:\n print(exit[0])\n return render.login(nav, reset_password_form, \"- Something went wrong!\")", "def send_reset_email(s):\n \n email = s.email\n username = s.username\n sponsor_id = s.id\n reset_key = id_generator(size=20)\n\n cache.set('reset_%s' % reset_key, sponsor_id, 86400) \n\n message = \"We have received a request to reset your password for your \"\n message += \"Goo.im sponsor account. Please click the link below to reset your password.\\n\\n\"\n message += \"https://goo.im/sponsor/password?token=%s\" % reset_key\n message += \"\\n\\n\"\n message += \"If you feel that you received this message in error, or you did not request a password \"\n message += \"reset, please contact our admins by replying to this email.\"\n message += \"\\n\\n\"\n message += \"-- The Goo.im team\"\n\n send_mail('Password Request', message,\n 'support@snipanet.com', [email])", "def save(self, domain_override=None,\n subject_template_name='registration/password_reset_subject.txt',\n email_template_name='registration/password_reset_email.html',\n use_https=False, token_generator=default_token_generator,\n from_email=None, request=None,\n html_email_template_name=None):\n email = self.cleaned_data[\"email\"]\n User = get_user_model()\n active_users = User.objects.filter(email__iexact=email, is_active=True)\n for user in active_users:\n subject = _('Flisol - Restore your password')\n # send_email(\n # subject,\n # [user.email],\n # email_template_name,\n # {\n # 'email': user.email,\n # 'uid': urlsafe_base64_encode(force_bytes(user.pk)),\n # 'user': user,\n # 'token': token_generator.make_token(user),\n # 'protocol': settings.PROTOCOL,\n # },\n # )", "def ask_password_reset(request):\n output_data = {}\n\n # Here we do not send a JSON answer based on success or failure\n # in order to prevent attackers from knowing if email exists in db or not.\n\n if request.method == 'POST':\n\n email = request.POST.get('email')\n\n if not email:\n output_data['error_code'] = '1'\n output_data['error_details'] = errors_for_dev['1']\n return JsonResponse(\n output_data,\n status=status.HTTP_400_BAD_REQUEST\n )\n\n email = email.lower()\n\n try:\n user = User.objects.get(email=email)\n except exceptions.ObjectDoesNotExist:\n return JsonResponse(output_data)\n\n signer = TimestampSigner()\n timestamped_id = signer.sign(user.id)\n\n password_reset_url = \"%s%s\" % (\n settings.SITE_BASE_URL,\n reverse(set_new_password, args=(timestamped_id,))\n )\n\n send_password_reset_email(email, password_reset_url)\n\n return JsonResponse(output_data)\n\n else:\n\n output_data['error_code'] = '8'\n output_data['error_details'] = errors_for_dev['8']\n return JsonResponse(\n output_data,\n status=status.HTTP_400_BAD_REQUEST\n )", "def email_body_recover_your_password(url):\n\tmsg = '<table cellspacing=\"0\" cellpadding=\"0\" width=\"100%\" bgcolor=\"#ebebeb\"><tbody><tr><td align=\"center\" valign=\"top\"></td></tr></tbody></table>'\n\tmsg = msg + '<table cellspacing=\"0\" cellpadding=\"0\" width=\"100%\" bgcolor=\"#ebebeb\"><tbody><tr>'\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6; border-top: 2px solid #e6e6e6\" cellspacing=\"0\" cellpadding=\"10\" width=\"600\">'\n\tmsg = msg + '<tbody>'\n\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF; padding-top:35px\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<a href=\"https://insprite.co\"><img src=\"http://ryanfbaker.com/insprite/inspriteLogoB.png\" border=\"0\" alt=\"Insprite\" align=\"center\" width=\"200px\" height=\"55px\" /></a>'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</tbody>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/spacer-1.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"110\" width=\"600\" height=\"350\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;padding-top:50px;\" align=\"left\" valign=\"top\">'\n\tmsg = msg + '<font style=\"font-family:Helvetica Neue;color:#555555;font-size:16px;\">We get it&mdash;strong passwords can be tough to remember.<br><br>'\n\tmsg = msg + 'No biggie, simply <a href=\\\"' + url + '\\\" style=\"color:#1488CC\">follow the instructions to change it.</a> and you\\'ll be good to go.<br><br>'\n\tmsg = msg + 'Didn\\'t request for a password reset? <a href=\"mailto:thegang@insprite.co\" style=\"color:#1488CC\">Give us a holler ASAP</a>.</font>'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 5px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/facebookIcon.png\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/twitterIcon.png\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/instagramIcon.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 5px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/spacer-2.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<font style=\"font-family:Helvetica Neue;color:#555555;font-size:10px;\"> <a href=\"mailto:thegang@insprite.co\" style=\"color:#1488CC\">Contact Us</a>'\n\tmsg = msg + '| Sent by <a href=\\\"https://insprite.co\\\">Insprite</a>, California, USA. | <a href=\"#\" style=\"color:#1488CC\">Unsubscribe</a></font>'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr> <td style=\"border-top: 0px solid #333333; border-bottom: 0px solid #FFFFFF;\">'\n\tmsg = msg + '<img width=\"596px\" src=\"http://ryanfbaker.com/insprite/footerImage.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\treturn msg", "def send_pw_reset_email(user):\n token = user.get_token()\n message = Message(\n 'Reset Your Password',\n sender='storcwebsite@gmail.com',\n recipients=[user.email])\n message.body = f\"To verify reset your password, click the link \" \\\n f\"below:\\n\\n\" \\\n f\"{url_for('users.reset_password', token=token, _external=True)}\"\n mail.send(message)", "def reset_password(token):\n # this token just active in 300s\n if request.method == \"POST\":\n email = s.loads(token, salt='email-confirm', max_age=300)\n print(email)\n user = User.query.filter_by(email=email).first()\n\n newPassword1 = request.form.get('newPassword1')\n newPassword2 = request.form.get('newPassword2')\n\n print(user)\n if user:\n \"\"\"kiểm tra password hợp lệ-chỗ này cần chỉnh thêm file html-----------------------------------------------------\"\"\"\n if re.search(PASSWORD_PATTERN, newPassword1) is None:\n flash('Password must be from 6-10 characters, have a digit must occur at least , '\n 'a lower case letter must occur at least once, no whitespace allowed in the entire string.',\n category='error')\n elif newPassword1 != newPassword2:\n flash('Passwords don\\'t match.', category='success')\n else:\n user.password = generate_password_hash(newPassword1, method='sha256')\n db.session.commit()\n print(user.password)\n print(\"đang thay đổi đây.............\")\n flash('Change password successfully!.', category='success')\n return redirect(url_for('auth.login'))\n # ------------------------------------------------------------------------------------------------------------------\n return render_template(\"forgotPass.html\")", "def reset_password(self, reset_token_url, password):\n query = parse.parse_qs(parse.urlparse(reset_token_url).query)\n return self._action('resetPasswordSubmitForm', {\n 'token': query['token'][0],\n 'key': query['key'][0],\n 'newpassword': password,\n 'confirmpassword': password,\n }, api='resetpassword')", "def forgotpassword(request):\n if request.method == 'GET':\n return render(request, 'app/other/forgot_password.html', {'title':'Forgot Password?',})\n elif request.method == 'POST':\n username = request.POST['username']\n\n if User.objects.filter(username = username).exists():\n user = User.objects.get(username = username)\n if Referee.objects.filter(user = user).exists():\n referee = Referee.objects.get(user = user)\n # generate token\n passwordResetTokenGenerator = PasswordResetTokenGenerator()\n token = PasswordResetTokenGenerator.generate_token(passwordResetTokenGenerator, str(user.id))\n token = str(token.decode('utf-8'))\n # email to referee\n subject = \"[Password Reset Link]\"\n message = 'http:////localhost:8000//reset//token=//' + token\n content = \"<br>Dear sir,</br><br></br><br></br>Link is: \"+message+'. Please click on the link to change the credentials.'+\"<br></br><br></br>Regards,<br></br>PhDPortal.\"\n email = []\n receiver = referee.user\n email.append(receiver.email)\n send_email_task.delay(email, subject, content)\n # redirect to same page with status to check your mail and click on activation link\n \n dict = {'status' : 'Done', 'message' : 'An Activation link has been sent to your mail-id'}\n return HttpResponse(json.dumps(dict), content_type = 'application/json')\n else: # given username is not valid to use this feature\n dict = {'status': 'Error', 'message' : 'You are not Authorized to change password'}\n return HttpResponse(json.dumps(dict), content_type = 'application/json')\n else: # given username is not valid to use this feature\n dict = {'status': 'Error', 'message' : 'Invalid Username, Try Again!'}\n return HttpResponse(json.dumps(dict), content_type = 'application/json')\n else:\n return redirect(reverse(URL_BAD_REQUEST))", "def forgot_passwd(request):\n dc_settings = request.dc.settings\n\n return password_reset(\n request,\n template_name='gui/accounts/forgot.html',\n email_template_name='gui/accounts/forgot_email.txt',\n subject_template_name='gui/accounts/forgot_subject.txt',\n password_reset_form=partial(ForgotForm, request),\n post_reset_redirect=reverse('forgot_done'),\n from_email=dc_settings.DEFAULT_FROM_EMAIL,\n current_app='gui',\n extra_context={\n 'e_site_name': dc_settings.SITE_NAME,\n 'e_site_link': dc_settings.SITE_LINK,\n })", "def forgot_req(request):\n server = request.META['SERVER_NAME']\n recover_url = urljoin(full_url(request), 'recover')\n\n if request.POST and not request.user.is_authenticated():\n\ttry:\n\t username_or_email = request.POST['username']\n\texcept KeyError:\n\t pass\n\telse:\n\t if '@' in username_or_email:\n\t\tqs = User.objects.filter(email = username_or_email)\n\t else:\n\t\tqs = User.objects.filter(username = username_or_email)\n\n\t users = []\n\t user = None\n\n\t for user in qs:\n\t\tquery = 'salt=%s&user=%s' % (urlsafe_b64encode(urandom(8)),\\\n\t\t\t\t\t user.username)\n\t\turl = add_encrypted_query_string(recover_url, query,\n\t\t\t\t\t\t settings.SECRET_KEY)\n\n\t\turl = sign_query_string(settings.SECRET_KEY + user.password,\n\t\t\t\t\turl)\n\n\t\tusers.append(dict(username = user.username, url = url))\n\n\t template = get_template('registration/recover-password.txt')\n\t context = Context(dict(users = users, ApplianceName = server))\n\n\t if len(users) == 1:\n\t\tplural = ''\n\t else:\n\t\tplural = 's'\n\n\t if user:\n\t\tuser.email_user(subject = \"Your %s console account%s\" % (server, plural),\n\t\t\t\tfrom_email = FROM_EMAIL,\n\t\t\t\tmessage = template.render(context))\n\n\t return HttpResponseRedirect('sent')\n\n return render_to_response('registration/forgotten.html',\n\t\t\t dict(username=request.GET.get('username', ''),\n META=request.META, root=settings.ROOT_URL,\n media=settings.MEDIA_URL))", "def forgot_password():\n \n if 'username' in session: \n flash('You are already logged in, you can reset your password here.', 'info')\n return redirect(url_for('dashboard'))\n \n form = ForgotPasswordForm()\n \n if request.method == 'POST':\n if form.validate_on_submit(): \n user = mongo.db.user.find_one({'email':form.email.data})\n\n if user:\n flash('Please enter your security passphrase and create a new password', 'info')\n return redirect(url_for('reset_password')) \n \n flash('Email address not found!', 'danger')\n return render_template('pages/forgot.html', \n title='Forgot Password', \n form=form\n )\n \n return render_template('pages/forgot.html', title='Forgot Password', form=form)", "def reset_token(token):\n if current_user.is_authenticated:\n return redirect(url_for('users.home'))\n\n user = User.verify_secret_token(token)\n\n if user is None:\n flash('That is invalid or expired token', 'warning')\n\n return redirect(url_for('users.reset_password'))\n\n form = ResetPasswordForm()\n\n if form.validate_on_submit():\n hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')\n user.password = hashed_password\n db.session.commit()\n\n return redirect(url_for('users.login'))\n\n return render_template('reset_token.html', form=form)", "def reset_password():\n body = request.get_json()\n reset_token = body.get('reset_token')\n password = body.get('password')\n\n if not reset_token or not password:\n return jsonify(msg.MISSING_PARAMETER), 400\n\n user_email = decode_token(reset_token)['identity']\n is_changed = views.UserManagement().change_password(email=user_email, password=password)\n if not is_changed:\n return jsonify(msg.NO_DATA), 404\n\n send_email('[Shodita] Password reset successful', sender='shodita@shodita.com', recipients=[user_email],\n text_body='Password reset was successful', html_body='<p>Password reset was successful</p>')\n\n return jsonify(msg.SUCCESS), 200", "def send_email(request):\n if \"email\" in request.DATA:\n email_addr = request.DATA[\"email\"]\n try:\n user = User.objects.get(email=email_addr)\n except User.DoesNotExist:\n return JsonResponse(\n \"Bad request - No registered user with that email\",\n status=400,\n safe=False,\n )\n\n urlsafe_chars = string.ascii_letters + string.digits + \"-_\"\n code_str = \"\".join(random.choice(urlsafe_chars) for _ in range(100))\n\n # 30 minutes from now\n expiry_time = timezone.now() + datetime.timedelta(minutes=30)\n\n # overwrite old code\n if PasswordResetCode.objects.filter(user_id=user.id).exists():\n reset_code = PasswordResetCode.objects.get(user_id=user.id)\n reset_code.delete()\n\n PasswordResetCode.objects.create(\n user_id=user.id, code=code_str, expiry=expiry_time\n )\n\n message = build_email(\n email_addr, user.id, code_str, user.first_name, user.username\n )\n send_reset_email(message)\n\n return JsonResponse(\"OK - email sent\", status=200, safe=False)\n\n return JsonResponse(\n \"Bad request - Must provide email\", status=400, safe=False\n )", "def password_reset(request):\n try:\n with transaction.atomic():\n try:\n data = request.data\n data = validations_utils.email_validation(data) # Validates email id, it returns lower-cased email in data.\n user = validations_utils.user_validation_with_email(data['email'])\n except ValidationException as e: # Generic exception\n return Response(e.errors, status=e.status)\n current_site = get_current_site(request)\n domain = current_site.domain\n key = utils.create_reset_password_key(user.email)\n utils.send_reset_password_mail(user, key, domain) # Sends an email for resetting the password.\n return Response(messages.PASSWORD_RESET_LINK_SENT, status=status.HTTP_200_OK)\n except IntegrityError:\n return Response(messages.CAN_NOT_RESET_PASSWORD, status=status.HTTP_500_INTERNAL_SERVER_ERROR)", "def GET(self):\n session = web.ctx.session\n nav = get_nav_bar(session)\n self.token = web.input().reset_token\n return render.reset_password(nav, reset_password_form, \"\")", "def save(\n self,\n domain_override=None,\n subject_template_name=\"registration/password_reset_subject.txt\",\n email_template_name=\"registration/password_reset_email.html\",\n use_https=False,\n token_generator=default_token_generator,\n from_email=None,\n request=None,\n html_email_template_name=None,\n extra_email_context=None,\n ):\n from django.core.mail import send_mail\n\n email = self.cleaned_data[\"email\"]\n active_users = User._default_manager.filter(email__iexact=email, is_active=True)\n for user in active_users:\n # Make sure that no email is sent to a user that actually has\n # a password marked as unusable\n if not user.has_usable_password():\n continue\n from_email = settings.DEFAULT_FROM_EMAIL or from_email\n\n base_url = get_base_url()\n parsed = urllib.parse.urlparse(base_url)\n domain = parsed.netloc\n protocol = parsed.scheme\n\n kbsite = models.KegbotSite.get()\n site_name = kbsite.title\n c = {\n \"email\": user.email,\n \"site_name\": site_name,\n \"uid\": urlsafe_base64_encode(force_bytes(user.pk)),\n \"user\": user,\n \"token\": token_generator.make_token(user),\n \"domain\": domain,\n \"protocol\": protocol,\n }\n subject = loader.render_to_string(subject_template_name, c)\n # Email subject *must not* contain newlines\n subject = \"\".join(subject.splitlines())\n email = loader.render_to_string(email_template_name, c)\n send_mail(subject, email, from_email, [user.email])", "def do_reset(term, handle, email=u''):\n sep_ok = getattr(term, color_secondary)(u'::')\n sep_bad = getattr(term, color_primary)(u'::')\n email = u''\n\n for _ in range(passkey_max_attempts):\n handle = prompt_input(term=term,\n key='Username',\n content=handle or u'',\n width=username_max_length)\n\n if not handle:\n # canceled\n return False\n\n email = prompt_input(term=term,\n key='E-mail',\n content=email or u'',\n width=email_max_length)\n if not email:\n # canceled\n return False\n\n user = matches_email(handle, email)\n if not user:\n echo(fixate_next(term))\n echo(u'{0} Address is incorrect !'.format(sep_bad))\n # try e-mail address again\n continue\n\n echo(fixate_next(term))\n passkey = send_passkey(user)\n if not passkey:\n # failed to send e-mail\n term.inkey(1)\n echo(u'\\r\\n\\r\\n')\n return False\n\n echo(u'{0} E-mail successfully delivered !'.format(sep_ok))\n\n for _ in range(passkey_max_attempts):\n try_passkey = prompt_input(term=term,\n key='Passkey',\n width=password_max_length)\n\n if not try_passkey:\n # canceled\n return False\n\n if passkey.strip() != try_passkey.strip():\n # passkey does not match\n echo(fixate_next(term))\n echo(u'{0} Passkey does not verify !'.format(sep_bad))\n # try passkey again\n continue\n\n new_password = prompt_input(term=term,\n key='Password',\n hidden=hidden_char,\n width=password_max_length)\n if not new_password:\n # canceled\n return False\n\n user.password = new_password\n user.save()\n log.debug('password reset successful for user {0!r}.'\n .format(user.handle))\n echo(fixate_next(term))\n echo(u'{0} Password reset successful !'.format(sep_ok))\n return True\n\n echo(fixate_next(term))\n echo(u'{0} Too many authentication attempts.'.format(sep_bad))\n\n echo(fixate_next(term))\n echo(u'{0} Too many authentication attempts.'.format(sep_bad))", "def password_reset_confirm_wrapper(\r\n request,\r\n uidb36=None,\r\n token=None,\r\n):\r\n # cribbed from django.contrib.auth.views.password_reset_confirm\r\n try:\r\n uid_int = base36_to_int(uidb36)\r\n user = User.objects.get(id=uid_int)\r\n user.is_active = True\r\n user.save()\r\n except (ValueError, User.DoesNotExist):\r\n pass\r\n\r\n # tie in password strength enforcement as an optional level of\r\n # security protection\r\n err_msg = None\r\n\r\n if request.method == 'POST':\r\n password = request.POST['new_password1']\r\n if settings.FEATURES.get('ENFORCE_PASSWORD_POLICY', False):\r\n try:\r\n validate_password_length(password)\r\n validate_password_complexity(password)\r\n validate_password_dictionary(password)\r\n except ValidationError, err:\r\n err_msg = _('Password: ') + '; '.join(err.messages)\r\n\r\n # also, check the password reuse policy\r\n if not PasswordHistory.is_allowable_password_reuse(user, password):\r\n if user.is_staff:\r\n num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STAFF_PASSWORDS_BEFORE_REUSE']\r\n else:\r\n num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STUDENT_PASSWORDS_BEFORE_REUSE']\r\n err_msg = _(\"You are re-using a password that you have used recently. You must \"\r\n \"have {0} distinct password(s) before reusing a previous password.\").format(num_distinct)\r\n\r\n # also, check to see if passwords are getting reset too frequent\r\n if PasswordHistory.is_password_reset_too_soon(user):\r\n num_days = settings.ADVANCED_SECURITY_CONFIG['MIN_TIME_IN_DAYS_BETWEEN_ALLOWED_RESETS']\r\n err_msg = _(\"You are resetting passwords too frequently. Due to security policies, \"\r\n \"{0} day(s) must elapse between password resets\").format(num_days)\r\n\r\n if err_msg:\r\n # We have an password reset attempt which violates some security policy, use the\r\n # existing Django template to communicate this back to the user\r\n context = {\r\n 'validlink': True,\r\n 'form': None,\r\n 'title': _('Password reset unsuccessful'),\r\n 'err_msg': err_msg,\r\n }\r\n return TemplateResponse(request, 'registration/password_reset_confirm.html', context)\r\n else:\r\n # we also want to pass settings.PLATFORM_NAME in as extra_context\r\n extra_context = {\"platform_name\": settings.PLATFORM_NAME}\r\n\r\n if request.method == 'POST':\r\n # remember what the old password hash is before we call down\r\n old_password_hash = user.password\r\n\r\n result = password_reset_confirm(\r\n request, uidb36=uidb36, token=token, extra_context=extra_context\r\n )\r\n\r\n # get the updated user\r\n updated_user = User.objects.get(id=uid_int)\r\n\r\n # did the password hash change, if so record it in the PasswordHistory\r\n if updated_user.password != old_password_hash:\r\n entry = PasswordHistory()\r\n entry.create(updated_user)\r\n\r\n return result\r\n else:\r\n return password_reset_confirm(\r\n request, uidb36=uidb36, token=token, extra_context=extra_context\r\n )", "def password_reset_confirm(request, uidb64=None, token=None,\n template_name='registration/password_reset_confirm.html',\n token_generator=default_token_generator,\n set_password_form=SetPasswordForm,\n post_reset_redirect=None,\n current_app=None, extra_context=None):\n UserModel = get_user_model()\n assert uidb64 is not None and token is not None # checked by URLconf\n if post_reset_redirect is None:\n post_reset_redirect = reverse('session:password_reset_complete')\n else:\n post_reset_redirect = resolve_url(post_reset_redirect)\n try:\n # urlsafe_base64_decode() decodes to bytestring on Python 3\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = UserModel._default_manager.get(pk=uid)\n except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist):\n user = None\n\n if user is not None and token_generator.check_token(user, token):\n validlink = True\n title = _('Enter new password')\n if request.method == 'POST':\n form = set_password_form(user, request.POST)\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(post_reset_redirect)\n else:\n form = set_password_form(user)\n else:\n validlink = False\n form = None\n title = _('Password reset unsuccessful')\n context = {\n 'form': form,\n 'title': title,\n 'validlink': validlink,\n }\n if extra_context is not None:\n context.update(extra_context)\n\n if current_app is not None:\n request.current_app = current_app\n\n return TemplateResponse(request, template_name, context)", "def send_recovery_email(app: Flask, token: str, email: str) -> None:\n mail = Mail(app)\n with open(\"api/mailer/templates/invite.html\", \"r\") as f:\n template = Template(f.read())\n\n msg = Message(\"Account Recovery\", sender=\"App Admin\", recipients=[email])\n\n msg.html = template.render(\n url=f\"{FRONTEND_URL}/recovery/{email}/{token}\",\n title=\"OSUMC Cultural Awareness App Admin Recovery Email\",\n link_caption=\"Click the following link to recover your account\",\n header=\"Recover your Account\",\n action=\"Recover Account\",\n )\n\n mail.send(msg)", "def email_user(to_email, password=None, token=None):\n try:\n if password and token:\n raise Exception('No email has been sent. Both token and password is set.')\n mail = Mail(APP)\n if to_email and password:\n message = Message(\n 'Resela+ - Welcome!',\n sender=APP.iniconfig.get('flask', 'mail_username'),\n recipients=[to_email]\n )\n message.body = 'Greetings,\\nYour password: ' + password + \\\n '\\n\\nWhen you first log in to the system remember to change the ' \\\n 'password in settings.\\n\\n' + \\\n flask.url_for('default.index', _external=True) + \\\n '\\n\\nKind regards,\\nThe ReSeLa+ Group'\n elif to_email and token:\n message = Message(\n 'Resela+ - Reset password request, link valid for 10 minutes',\n sender=APP.iniconfig.get('flask', 'mail_username'),\n recipients=[to_email]\n )\n message.body = 'Greetings, \\nYou have requested to reset you password on ' \\\n 'ReSeLa+. Follow the link to complete the password reset ' \\\n 'process. \\n\\n' + \\\n flask.url_for('account.reset_password', _external=True,\n token=token) + \\\n '\\n\\nKind regards,\\nThe ReSeLa+ group'\n elif to_email:\n message = Message(\n 'Resela+ - Confirmation password reset',\n sender=APP.iniconfig.get('flask', 'mail_username'),\n recipients=[to_email]\n )\n message.body = 'Greetings,\\nYour password has now been reset. Log in to ' \\\n 'ReSeLa+:\\n\\n' + flask.url_for('default.index', _external=True) + \\\n '\\n\\nIf you did not make this request, please contact your ' \\\n 'ReSeLa+ administrator.\\n\\nKind regards,\\nThe ReSeLa+ Group'\n else:\n raise Exception('No email has been sent. Invalid parameters.')\n mail.send(message)\n except Exception as error:\n print(error)", "def password_reset(self, password, vtoken, welcomeEmailTemplate = ''):\n auth = 'appkey='+ self._lr_object._get_api_key()+ '&appsecret='+ self._lr_object._get_api_secret() + '&vtoken=' + vtoken\n payload = {'password': password}\n url = SECURE_API_URL + \"raas/v1/account/password/reset\" + \"?\" + auth\n return self._lr_object._post_json(url, payload)", "def reset_password():\n json_data = request.get_json()\n user_email = json_data.get('email') or None\n\n if user_email is None:\n raise BadRequest(description=INCORRECT_RESET_PARAMS_MSG)\n\n user_account = db.session.query(UserAccount).filter(\n UserAccount.email == user_email).first()\n if user_account is None:\n raise BadRequest(description=INCORRECT_RESET_PARAMS_MSG)\n\n # Generate password hash\n temp_password = str(random.randint(10000,99999))\n update_user = {'password_hashed': get_hashed_password(temp_password)}\n user_account.update(**update_user)\n user_account.save()\n\n email.send('reset_password', user_email, temp_password)\n\n return {'status_code': 200, 'message': 'Password reset success!'}", "def post(self):\n try:\n body = request.get_json()\n bearer = request.headers.get('Authorization')\n base_url = request.url_root\n token = bearer.split()[1]\n password = body.get('password')\n\n if not token or not password:\n raise SchemaValidationError\n\n user_id = decode_token(token)['sub']['user_id']\n\n user = User.objects.get(id=user_id)\n\n user.modify(password=password)\n user.hash_password()\n user.save()\n\n return send_email('[Unboxit] Password reset successful',\n sender='contact@tsantos.dev',\n recipients=[user.email],\n text_body='Password Reset',\n html_body=render_template(\n 'components/reset_password_response.html',\n first_name=user.first_name,\n base_url=base_url))\n\n except SchemaValidationError:\n raise SchemaValidationError\n except ExpiredSignatureError:\n raise ExpiredTokenError\n except (DecodeError, InvalidTokenError):\n raise BadTokenError\n except Exception as e:\n raise InternalServerError", "def save(self, domain_override=None,\n subject_template_name='registration/password_reset_subject.txt',\n email_template_name='registration/password_reset_email.html',\n use_https=False, token_generator=default_token_generator,\n from_email=None, request=None, html_email_template_name=None,\n extra_email_context=None):\n email = self.cleaned_data[\"email\"]\n for user in self.get_users(email):\n if not domain_override:\n current_site = get_current_site(request)\n site_name = current_site.name\n domain = current_site.domain\n else:\n site_name = domain = domain_override\n context = {\n 'email': email,\n 'domain': domain,\n 'site_name': site_name,\n 'uid': urlsafe_base64_encode(force_bytes(user.pk)),\n 'user': user,\n 'token': token_generator.make_token(user),\n 'protocol': 'https' if use_https else 'http',\n }\n if extra_email_context is not None:\n context.update(extra_email_context)\n self.send_mail(\n subject_template_name, email_template_name, context, from_email,\n email, html_email_template_name=html_email_template_name,\n )", "def verify_email(request):\n user = User.objects.get(username=request.user)\n if request.method == 'POST':\n otp = request.data.get('otp')\n if not otp:\n return Response({'message':\"We cannot find your otp\"}, status=status.HTTP_400_BAD_REQUEST)\n\n #Get token\n qs = ResetRequests.objects.filter(user=user, token=otp, use_case = 'account confirmation')\n if not qs.exists():\n return Response({'message':'Wrong Token.'}, status=status.HTTP_400_BAD_REQUEST)\n\n #Grab the last token\n token_request = qs.last()\n timer = token_request.created_at\n\n #Check token expiry\n if timezone.now() > timer + timezone.timedelta(minutes=10):\n return Response({'message':'Token Expired. Request another please.'}, status=status.HTTP_400_BAD_REQUEST)\n\n #Check whether token has been used.\n if token_request.consumed:\n return Response({\"message\":\"Pin has been used already\"}, status=status.HTTP_400_BAD_REQUEST)\n\n if int(otp) == int(token_request.token):\n #Set user as verified\n user.email_verified = True\n user.save()\n #Set token as consumed\n token_request.consumed = True\n token_request.save()\n\n #Send Confirmation Mail\n email_subject = \"SpendWise - Account Verified.\"\n email_msg = \"Your account has been verified. Welcome to the SpendWise Ecosystem\"\n try:\n sendEmail(user, email_subject, \"Account Verified\", information=email_msg)\n return Response({'message':'User account successfully verified.'}, status=status.HTTP_200_OK)\n except:\n return Response({'message':'We could not send a confirmation email'}, status=status.HTTP_200_OK)\n\n\n if request.method == 'GET':\n to = User.objects.get(username=request.user).email\n pin = random.randint(0, 1000000)\n #presumes this link is only reachable cos the user already has an email.\n to = user.email\n try:\n subject = \"Account Confirmation.\"\n message = f\"Your Account Confirmation code is {pin}\\n\\nExpires in 10 minutes.\"\n sendEmail(user, subject, \"Account Confirmation\", information=message, otp=pin)\n\n #Write to user's record\n ResetRequests.objects.create(\n user = user,\n token = pin,\n use_case = 'account confirmation'\n )\n #Add password reset request date here\n return Response({'message':'Token sent to registered email.',\n 'email' : to},\n status=status.HTTP_200_OK)\n except Exception as e:\n return Response({'message':'We could not send an email', 'error':e},\n status=status.HTTP_400_BAD_REQUEST)\n\n #Do the actual verification\n #Verified is alrady possibly True via sms. What happens now?", "def password_reset(request):\r\n if request.method != \"POST\":\r\n raise Http404\r\n\r\n # Add some rate limiting here by re-using the RateLimitMixin as a helper class\r\n limiter = BadRequestRateLimiter()\r\n if limiter.is_rate_limit_exceeded(request):\r\n AUDIT_LOG.warning(\"Rate limit exceeded in password_reset\")\r\n return HttpResponseForbidden()\r\n\r\n form = PasswordResetFormNoActive(request.POST)\r\n if form.is_valid():\r\n form.save(use_https=request.is_secure(),\r\n from_email=settings.DEFAULT_FROM_EMAIL,\r\n request=request,\r\n domain_override=request.get_host())\r\n else:\r\n # bad user? tick the rate limiter counter\r\n AUDIT_LOG.info(\"Bad password_reset user passed in.\")\r\n limiter.tick_bad_request_counter(request)\r\n\r\n return JsonResponse({\r\n 'success': True,\r\n 'value': render_to_string('registration/password_reset_done.html', {}),\r\n })", "def action_reset_password(self):\n # prepare reset password signup\n create_mode = bool(self.env.context.get('create_user'))\n\n # no time limit for initial invitation, only for reset password\n expiration = False if create_mode else now(days=+1)\n\n self.mapped('partner_id').signup_prepare(signup_type=\"reset\", expiration=expiration)\n\n # send email to users with their signup url\n template = False\n if create_mode:\n try:\n template = self.env.ref('loyalty.set_password_email', raise_if_not_found=False)\n except ValueError:\n pass\n if not template:\n template = self.env.ref('loyalty.reset_password_email')\n assert template._name == 'mail.template'\n\n template_values = {\n 'email_to': '${object.email|safe}',\n 'email_cc': False,\n 'auto_delete': True,\n 'partner_to': False,\n 'scheduled_date': False,\n }\n template.write(template_values)\n\n for user in self:\n if not user.email:\n raise UserError(_(\"Cannot send email: user %s has no email address.\") % user.name)\n with self.env.cr.savepoint():\n template.with_context(lang=user.lang).send_mail(user.id, force_send=True, raise_exception=True)\n _logger.info(\"Password reset email sent for user <%s> to <%s>\", user.login, user.email)", "def forgot_passwd_check(request, uidb64=None, token=None):\n assert uidb64 is not None and token is not None\n dc1_settings = DefaultDc().settings\n sms_registration = dc1_settings.SMS_REGISTRATION_ENABLED\n\n if sms_registration:\n set_password_form = SMSSendPasswordResetForm\n else:\n set_password_form = PasswordResetForm\n\n if request.method == 'POST':\n try:\n user = User.objects.get(id=urlsafe_base64_decode(uidb64))\n profile = user.userprofile\n except (ValueError, OverflowError, User.DoesNotExist):\n profile = None\n\n if profile and profile.email_token == token:\n # Email address is verified, we cant compare to token as register token is different to reset one.\n profile.email_token = ''\n profile.email_verified = True\n # This may look strange - setting the phone_verified before the user logs in. It is not :) We are sending\n # new password to phone number in profile, after the user logs in we would set phone_verified to True anyway\n if sms_registration:\n profile.phone_verified = True\n profile.save()\n\n return password_reset_confirm(\n request,\n uidb64=uidb64,\n token=token,\n template_name='gui/accounts/forgot_check.html',\n set_password_form=set_password_form,\n post_reset_redirect=reverse('forgot_check_done'),\n current_app='gui',\n extra_context={\n 'sms_registration': sms_registration,\n }\n )", "def save(self, domain_override=None,\n subject_template_name='registration/password_reset_subject.txt',\n email_template_name='registration/password_reset_email.html',\n use_https=False, token_generator=default_token_generator,\n from_email=None, request=None, html_email_template_name=None,\n extra_email_context=None):\n email = self.cleaned_data[\"email\"]\n for user in self.get_users(email):\n if not domain_override:\n current_site = get_current_site(request)\n site_name = current_site.name\n domain = current_site.domain\n else:\n site_name = domain = domain_override\n context = {\n 'email': email,\n 'domain': domain,\n 'site_name': site_name,\n 'uid': urlsafe_base64_encode(force_bytes(user.pk)).decode(),\n 'user': user,\n 'token': token_generator.make_token(user),\n 'protocol': 'https' if use_https else 'http',\n **(extra_email_context or {}),\n }\n self.send_mail(\n subject_template_name, email_template_name, context, from_email,\n email, html_email_template_name=html_email_template_name,\n )", "def password_reset_confirm(request, uidb36=None, token=None,\n template_name='gallery/password_reset_confirm.html',\n token_generator=default_token_generator,\n set_password_form=SetPasswordForm,\n post_reset_redirect=None):\n assert uidb36 is not None and token is not None # checked by URLconf\n if post_reset_redirect is None:\n post_reset_redirect = reverse('django.contrib.auth.views.password_reset_complete')\n try:\n uid_int = base36_to_int(uidb36)\n except ValueError:\n raise HttpResponseNotFound\n\n user = get_object_or_404(authmodels.User, id=uid_int)\n context_instance = RequestContext(request)\n\n if token_generator.check_token(user, token):\n context_instance['validlink'] = True\n if request.method == 'POST':\n form = set_password_form(user, request.POST)\n if form.is_valid():\n # we can't use form.save b/c that will update the p/w on the\n # model object, we need to do it in LDAP\n if settings.USE_LDAP:\n ldapper = get_ldap_connection()\n dn = get_user_dn(user.username)\n new_password = request.POST.get('new_password1')\n ldapper.passwd_s(dn, None, new_password)\n ldapper.unbind_s()\n request.notifications.add(_('Password change successful.'))\n else:\n form.save()\n return HttpResponseRedirect(post_reset_redirect)\n else:\n form = set_password_form(None)\n else:\n context_instance['validlink'] = False\n form = None\n context_instance['form'] = form\n return render_to_response(template_name, context_instance=context_instance)", "def generate_forgot_password_token(self, email):\n payload = {'appkey': self._lr_object._get_api_key(), 'appsecret': self._lr_object._get_api_secret(),\n 'email': email}\n url = SECURE_API_URL + \"raas/v1/account/password/forgot\"\n return self._lr_object._get_json(url, payload)", "def start_reset_password_process_step_2(self):\n # fill in the form with no error\n email_field = self.driver.find_element_by_id(\"id_email\")\n email_field.send_keys(\"toto@mail.com\")\n submit_button = self.driver.find_element_by_id(\"submit-id-submit\")\n submit_button.click()\n # wait for email receiving\n actions = ActionChains(self.driver)\n actions.pause(1)\n actions.perform()\n # test that one message has been sent\n self.assertEqual(len(mail.outbox), 1)\n # get the mail content\n mail_content = mail.outbox[0].body\n # extract \"reset password link\"\n match = re.search(\n \"choisir un nouveau mot de passe :\\n(.*)\\nPour mémoire\",\n mail_content\n )\n return match", "def password_reset(request):\n\tif not request.user.is_authenticated():\n\t\treturn django.contrib.auth.views.password_reset(request,\n template_name='usermgr/password_reset_form.html',\n email_template_name= 'usermgr/password_reset_email.html',\n post_reset_redirect='/usermgr/password_reset/done/')\n\telse:\n\t\treturn HttpResponseRedirect(settings.LOGIN_REDIRECT_URL)", "def send_token(email, token):\n with open(\"templates/email_password_recovery.txt\", mode=\"r\") as file_pointer:\n string = file_pointer.read()\n\n string = string % (token, email, token)\n sendemail.send_email(email, \"Skvaderhack Password Recovery\", string, \"baron@skvaderhack.xyz\")", "def login_reset():\n # Start with the currently logged in user\n\n if request.method == \"GET\":\n # In browser request that user wants to reset the password\n # Create a token\n # Send out an email\n #\n return flask.render_template('profile.html', name=session[\"user\"][\"label\"], email=session[\"user\"][\"email\"])\n\n if request.method == \"POST\":\n # In browser request that user wants to reset the password\n label = flask.request.form[\"label\"]\n passwd = flask.request.form[\"passwd\"]\n\n # Verify that the user is logged in or return\n if not session.has_key(\"user\"):\n return flask.Response('{\"error\" : \"User not logged in\" }')\n else:\n # Chagne the information in the session\n session[\"user\"][\"label\"] = label\n # Locate the record\n conn.register([model.User])\n dbobj = conn[current_app.config[\"CONFIGDB\"]]\n userdoc = dbobj[\"users\"].User.find_one({'_id' : ObjectId(session[\"user\"][\"id\"])})\n userdoc[\"passwd\"] = passwd\n userdoc[\"password_status\"] = \"ready\"\n userdoc[\"label\"] = label\n userdoc.validate()\n userdoc.save()\n\n return flask.Response('{\"success\" : \"\" }')", "def get(self, request):\n form = ConfirmPasswordForm()\n token = request.GET.get('token')\n if not token:\n raise Http404('Page not found.')\n token_obj = PasswordResetTokens.objects.filter(token=token)\n import pdb\n pdb.set_trace()\n if not token_obj:\n raise Http404('Fake token supplied.')\n # tz = pytz.timezone(\"UTC\")\n # if tz.localize(datetime.now(), is_dst=None) > token_obj[0].expired_time:\n # raise Http404('Token Expired. Try again')\n return render(request, 'user_registrations/set_password.html', {'form': form, 'token': token})", "def reset_password_token(email: str) -> str:\n data = {'email': email}\n response = requests.post(f'{URL}/reset_password', data=data)\n assert response.status_code == 200\n token = response.json()\n return token.get('reset_token')", "def user_forgotpassword(): \n data = user_obj.user_forgotpassword(request.forms) \n return data", "def reset_password(): \n \n form = ResetPasswordForm()\n if request.method == 'POST':\n if form.validate_on_submit():\n \n hashed_pw = bcrypt.hashpw(form.new_password.data.encode('utf-8'), bcrypt.gensalt())\n user = mongo.db.user.find_one({'username': form.username.data})\n \n if user and bcrypt.checkpw(request.form['passphrase'].encode('utf-8'), user['passphrase']):\n mongo.db.user.find_one_and_update({'username': form.username.data}, {'$set':{'hashed_password':hashed_pw}})\n \n flash(f'Password reset was successful, {form.username.data}, pleaselogin again with your new password.','success'\n )\n return redirect(url_for('login'))\n \n return render_template('pages/reset.html', title='Forgot Password', form=form)", "def forgot_password(self, version):\n form=cgi.FieldStorage(\n fp=self.rfile,\n headers=self.headers,\n environ={'REQUEST_METHOD':'POST','CONTENT_TYPE':self.headers['Content-Type'],}\n )\n version=version.split('/')[0]\n host = self.headers['Host']\n\n data={'email':form['email'].value}\n user = UserServices()\n response_data = user.forgot(data,host,version)\n return response_data", "def send_reset_email(user):\n msg = emails.reset_email(user)\n try:\n mail.send(msg)\n except Exception as e:\n traceback.print_exc()", "async def password_link_generate(mail: TextData, background_tasks: BackgroundTasks):\n email = mail.data\n mail, subject, body = await AccountProcessor.send_reset_link(email.strip())\n background_tasks.add_task(Utility.validate_and_send_mail, email=mail, subject=subject, body=body)\n return {\"message\": \"Success! A password reset link has been sent to your mail id\"}", "def validation_email_sent(request):\n assert(settings.EMAIL_VALIDATION == True)\n logging.debug('')\n data = {\n 'email': request.user.email,\n 'change_email_url': reverse('user_changeemail'),\n 'action_type': 'validate'\n }\n return render_to_response('authenticator/changeemail.html', RequestContext(request, data))", "def reset_password(token):\n\n expired, invalid, user = reset_password_token_status(token)\n\n if invalid:\n return redirect(url_for('frontend.forgot_password') + '?invalid')\n elif expired:\n send_reset_password_instructions(user)\n return redirect(url_for('frontend.forgot_password') + '?expired')\n elif request.method == 'GET':\n return redirect(url_for('frontend.reset_password', token=token))\n\n form = _security.reset_password_form()\n\n if form.validate_on_submit():\n after_this_request(_commit)\n update_password(user, form.newPassword.data)\n login_user(user)\n else:\n return jsonify({'errors': form.errors}), HTTPStatus.BAD_REQUEST\n\n return jsonify({\n 'token': user.get_auth_token(),\n 'user': user,\n })", "def test_password_reset_email(self, send_mail_mock):\n pw_reset_name = 'auth_password_reset'\n # ensure view exists\n pw_reset_get_response = self.client.get(reverse(pw_reset_name))\n self.assertEqual(pw_reset_get_response.status_code, 200)\n # post data to password reset; make Django send email\n data = {'email': self.email}\n self.client.post(reverse(pw_reset_name), data=data, follow=True)\n # verify that email sent with right template\n send_mail_mock.assert_called_with(\n ANY,\n 'registration/password_reset_email.txt',\n ANY, ANY, ANY,\n html_email_template_name=ANY)", "def send_reset_password_email(self, user, base_url):\n\n parsed_base_url = urlparse(base_url)\n if parsed_base_url.hostname != settings.PUBLIC_WEB_FRONTEND_HOSTNAME:\n raise BaseURLHostnameNotAllowed(\n f'The hostname {parsed_base_url.netloc} is not allowed.'\n )\n\n signer = self.get_reset_password_signer()\n signed_user_id = signer.dumps(user.id)\n\n if not base_url.endswith('/'):\n base_url += '/'\n\n reset_url = urljoin(base_url, signed_user_id)\n\n email = ResetPasswordEmail(user, reset_url, to=[user.email])\n email.send()", "def post(self, request, *args, **kwargs):\n data = request.data\n serializer = self.serializer_class(data=data)\n serializer.is_valid(raise_exception=True)\n try:\n user = get_object_or_404(User, email=data['email'])\n current_site = get_current_site(request)\n token = password_rest_token.make_token(user),\n uidb64 = urlsafe_base64_encode(force_bytes(data['email'])).decode()\n body = json.dumps({\n 'message': 'Please use the url below to rest your password,\\\n This expires after an hour, Thank you.',\n 'domain': current_site.domain + f'/api/reset/{uidb64}/{token[0]}',\n })\n from_email = settings.DEFAULT_FROM_EMAIL\n to_email = data['email']\n subject = 'Confirm Your Article Account Password Reset'\n send_mail(subject, body, from_email, [\n to_email], fail_silently=False)\n response = {\n 'message': 'Please check your email to confirm rest password',\n 'status_code': status.HTTP_200_OK}\n except Exception as e:\n response = {'error': e, 'status_code': status.HTTP_400_BAD_REQUEST}\n return Response(response, content_type='text/json')", "def send_password_reset(user):\n _log('++ sending password reset email for: {} {}'.format(user.first_name, user.last_name))\n secret_string = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(20))\n\n # if local set the domain to localhost\n if ENV_DICT['ENVIRON'] == 'LOCAL':\n secret_link = 'http://localhost:8080/reset/{}/'.format(secret_string)\n # otherwise use the subdomain of the tenancy\n else:\n secret_link = 'http://{}.cpisearch.io/reset/{}/'.format(user.tenancy, secret_string)\n\n reset_link_object = PasswordResetLink(\n user_id=user.user_id,\n secret_link=secret_string,\n tenancy=user.tenancy,\n )\n db.session.add(reset_link_object)\n db.session.commit()\n send_email(\n to_email=user.email,\n subject='SuccessKit Password Reset',\n template_path='emails/password_reset_email.html',\n template_vars={\n 'user': user,\n 'secret_link': secret_link\n }\n )", "def password_reset(*args, **kwargs):\n kwargs['password_reset_form'] = CustomPasswordResetForm\n return django_password_reset(*args, **kwargs)", "def create(self,request):\n try:\n print(request.data)\n user = models.UserProfile.objects.get(email=request.data['email'])\n current_site=get_current_site(request)\n email_subject='Reset Password'\n message=render_to_string('reset_password.html',{\n 'user':user,\n 'domain':current_site.domain,\n 'uid':urlsafe_base64_encode(force_bytes(user.id)),\n 'token':account_activation_token.make_token(user),\n })\n to_email= user.email\n email= EmailMessage(email_subject,message,to=[to_email])\n email.send()\n return Response(\n {\n \"status\":\"The Reset password email has been sent.\"\n }\n )\n except(TypeError, ValueError, KeyError, OverflowError, models.UserProfile.DoesNotExist):\n user = None\n return Response(\n {\n \"status\":\"No matching account found.\"\n }\n )", "def init_reset_pw(email: str) -> FluxData:\n current_app.logger.info(f'Trying to send password reset email to {email}')\n try:\n send_password_reset_mail(email)\n except BadCode as error:\n current_app.logger.error(f'Sending password reset e-mail for {email} failed: {error}')\n return error_response(message=error.msg)\n\n return success_response(message=ResetPwMsg.send_pw_success)", "def save(self, domain_override=None,\n subject_template_name='registration/password_reset_subject.txt',\n email_template_name='registration/password_reset_email.html',\n txt_email_template_name='registration/password_reset_email.txt',\n use_https=False, token_generator=default_token_generator,\n from_email=None, request=None):\n for user in self.users_cache:\n if not domain_override:\n current_site = get_current_site(request)\n site_name = current_site.name\n domain = current_site.domain\n else:\n site_name = domain = domain_override\n c = {\n 'email': user.email,\n 'domain': domain,\n 'site': site_name,\n 'uid': int_to_base36(user.id),\n 'user': user,\n 'token': token_generator.make_token(user),\n 'protocol': use_https and 'https' or 'http',\n }\n subject = loader.render_to_string(subject_template_name, c)\n # Email subject *must not* contain newlines\n subject = ''.join(subject.splitlines())\n \n text_content = render_to_string('registration/activation_email.txt', c)\n \n utils.send_mail(email_template_name, c, subject, text_content, \n settings.DEFAULT_FROM_EMAIL, [user.email,], None)", "async def renew_email(self, token: str) -> Optional[str]:\n return await self.signup_email(token)", "def save(self, domain_override=None,\r\n subject_template_name='registration/password_reset_subject.txt',\r\n email_template_name='registration/password_reset_email.html',\r\n use_https=False, token_generator=default_token_generator,\r\n from_email=None, request=None):\r\n from django.core.mail import send_mail\r\n UserModel = get_user_model()\r\n email = self.cleaned_data[\"email\"]\r\n username = self.cleaned_data[\"username\"]\r\n user = User.objects.get(username__exact=username)\r\n\r\n if user.is_active and user.has_usable_password():\r\n # Make sure that no email is sent to a user that actually has\r\n # a password marked as unusable\r\n if not domain_override:\r\n current_site = get_current_site(request)\r\n site_name = current_site.name\r\n domain = current_site.domain\r\n else:\r\n site_name = domain = domain_override\r\n c = {\r\n 'email': user.email,\r\n 'domain': domain,\r\n 'site_name': site_name,\r\n 'uid': urlsafe_base64_encode(force_bytes(user.pk)),\r\n 'user': user,\r\n 'token': token_generator.make_token(user),\r\n 'protocol': 'https' if use_https else 'http',\r\n }\r\n subject = loader.render_to_string(subject_template_name, c)\r\n # Email subject *must not* contain newlines\r\n subject = ''.join(subject.splitlines())\r\n email = loader.render_to_string(email_template_name, c)\r\n send_mail(subject, email, from_email, [user.email])", "def validate_password_reset_link(request, token):\n\n if request.method == \"GET\":\n passwordResetTokenGenerator = PasswordResetTokenGenerator()\n id = PasswordResetTokenGenerator.get_token_value(passwordResetTokenGenerator, token)\n\n if id != None:\n id = int(id)\n\n if User.objects.filter(id = id).exists():\n user = User.objects.get(id = id)\n request.session['user'] = user.username\n\n return render(request, 'app/referee/change_forgot_password.html', {\n 'title':'Change Password',\n 'user': user.username\n })\n else: # the user is invalid\n return redirect(reverse(URL_BAD_REQUEST))\n else: # either the link is expired or invalid\n return redirect(reverse(URL_BAD_REQUEST))\n else:\n return redirect(reverse(URL_BAD_REQUEST))", "def generate_password_reset_token(email: str) -> str:\n expires_delta = timedelta(hours=config.EMAIL_RESET_TOKEN_EXPIRE_HOURS)\n now = datetime.utcnow()\n expires = now + expires_delta\n exp = expires.timestamp()\n return jwt.encode(\n {\n \"exp\": exp,\n \"nbf\": now,\n \"sub\": PASSWORD_RESET_SUBJECT,\n \"email\": email\n },\n config.SECRET_KEY,\n algorithm=ALGORITHM\n )", "def test_reset_password_email(self, send_email):\r\n\r\n good_req = self.request_factory.post('/password_reset/', {'email': self.user.email})\r\n good_resp = password_reset(good_req)\r\n self.assertEquals(good_resp.status_code, 200)\r\n obj = json.loads(good_resp.content)\r\n self.assertEquals(obj, {\r\n 'success': True,\r\n 'value': \"('registration/password_reset_done.html', [])\",\r\n })\r\n\r\n (subject, msg, from_addr, to_addrs) = send_email.call_args[0]\r\n self.assertIn(\"Password reset\", subject)\r\n self.assertIn(\"You're receiving this e-mail because you requested a password reset\", msg)\r\n self.assertEquals(from_addr, settings.DEFAULT_FROM_EMAIL)\r\n self.assertEquals(len(to_addrs), 1)\r\n self.assertIn(self.user.email, to_addrs)\r\n\r\n #test that the user is not active\r\n self.user = User.objects.get(pk=self.user.pk)\r\n self.assertFalse(self.user.is_active)\r\n re.search(r'password_reset_confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/', msg).groupdict()", "def email_body_password_changed_confirmation(url):\n\tmsg = '<table cellspacing=\"0\" cellpadding=\"0\" width=\"100%\" bgcolor=\"#ffffff\"><tbody><tr><td align=\"center\" valign=\"top\"></td></tr></tbody></table>'\n\tmsg = msg + '<table cellspacing=\"0\" cellpadding=\"0\" width=\"100%\" bgcolor=\"#ebebeb\"><tbody><tr>'\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6; border-top: 2px solid #e6e6e6\" cellspacing=\"0\" cellpadding=\"10\" width=\"600\">'\n\tmsg = msg + '<tbody>'\n\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF; padding-top:35px\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<a href=\"https://insprite.co\"><img src=\"http://ryanfbaker.com/insprite/inspriteLogoB.png\" border=\"0\" alt=\"Insprite\" align=\"center\" width=\"200px\" height=\"55px\" /></a>'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</tbody>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/spacer-1.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"85\" width=\"600\" height=\"350\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;padding-top:50px;\" align=\"left\" valign=\"top\">'\n\tmsg = msg + '<font style=\"font-family:Helvetica Neue;color:#555555;font-size:14px;\">We\\'re just sending you a reminder: You changed your password.<br><br>'\n\tmsg = msg + 'We want to keep your information safe and secure, so if you didn\\'t change it yourself <a href=\"mailto:thegang@insprite.co\" style=\"color:#1488CC\">give us a holler ASAP</a> and we\\'ll get on it.<br><br></font>'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 5px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<img style=\"padding-right: 6px\" src=\"http://ryanfbaker.com/insprite/facebookIcon.png\">'\n\tmsg = msg + '<img style=\"padding-right: 6px\" src=\"http://ryanfbaker.com/insprite/twitterIcon.png\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/instagramIcon.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 5px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<img src=\"http://ryanfbaker.com/insprite/spacer-2.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr><td style=\"background-color: #ffffff; border-top: 0px solid #333333; border-bottom: 10px solid #FFFFFF;\" align=\"center\" valign=\"middle\">'\n\tmsg = msg + '<font style=\"font-family:Helvetica Neue;color:#555555;font-size:10px;\"> <a href=\"mailto:thegang@insprite.co\" style=\"color:#1488CC\">Contact Us</a>'\n\tmsg = msg + '| Sent by <a href=\"https://insprite.co\" style=\"color:#1488CC\">Insprite</a>, California, USA. | <a href=\"#\" style=\"color:#1488CC\">Unsubscribe</a></font>'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\n\tmsg = msg + '<table style=\"border-left: 2px solid #e6e6e6; border-right: 2px solid #e6e6e6;\" cellspacing=\"0\" cellpadding=\"0\" width=\"600\">'\n\tmsg = msg + '<tr> <td style=\"border-top: 0px solid #333333; border-bottom: 0px solid #FFFFFF;\">'\n\tmsg = msg + '<img width=\"596px\" src=\"http://ryanfbaker.com/insprite/footerImage.png\">'\n\tmsg = msg + '</td></tr>'\n\tmsg = msg + '</table>'\n\treturn msg", "async def reset_post_handler(req: web.Request) -> web.Response:\n db = req.app[\"db\"]\n\n client = req[\"client\"]\n\n form_data = await req.post()\n\n password = form_data.get(\"password\", \"\")\n confirm = form_data.get(\"confirm\", \"\")\n verification_key = form_data.get(\"verification\", None)\n return_to = get_return_to_from_query(req)\n\n # Check that the hidden verification key matches the one attached to the logging-in session. Redirect to `return_to`\n # URL if verification fails (this will end up on login page with correct query parameter.\n if not await virtool.db.sessions.check_verification_key(db, client.session_id, verification_key, mode=\"reset\"):\n return web.Response(status=302, headers={\"Location\": return_to})\n\n user_id = await virtool.db.utils.get_one_field(db.sessions, \"reset_user_id\", client.session_id)\n\n if not user_id:\n return web.Response(status=302, headers={\"Location\": return_to})\n\n errors = list()\n\n # Re-render the reset page with an error message if the new password is invalid.\n if password != confirm:\n errors.append(\"Passwords do not match\")\n\n minimum_password_length = req.app[\"settings\"][\"minimum_password_length\"]\n\n if len(password) < minimum_password_length:\n errors.append(f\"Password must contain at least {minimum_password_length} characters\")\n\n if errors:\n reset_code = await virtool.db.sessions.set_reset_errors(db, client.session_id, errors)\n return web.Response(status=302, headers={\"Location\": f\"/reset?return_to={return_to}&code={reset_code}\"})\n\n # Unset all reset page errors.\n await virtool.db.sessions.set_reset_errors(db, client.session_id)\n\n # Update the user password and disable the `force_reset`.\n await virtool.db.users.edit(db, user_id, force_reset=False, password=password)\n\n # Authenticate and return a redirect response to the `return_to` path. This is identical to the process used for\n # successful login requests.\n return await auth_response(req, return_to, user_id, False)", "def send_mail():\n email_address = request.args.get('emailAddress') # get email address from the form\n response = call_sendmail_endpoint(session['access_token'], session['alias'], email_address)\n print(session)\n if response == 'SUCCESS':\n show_success = 'true'\n show_error = 'false'\n else:\n print(response)\n show_success = 'false'\n show_error = 'true'\n\n session['pageRefresh'] = 'false'\n return render_template('main.html', name=session['alias'],\n emailAddress=email_address, showSuccess=show_success,\n showError=show_error)" ]
[ "0.7280214", "0.7197617", "0.7117962", "0.7043637", "0.704304", "0.6989297", "0.6984096", "0.69258046", "0.6875457", "0.6868493", "0.6868304", "0.6856506", "0.6787877", "0.6750242", "0.67212576", "0.6699849", "0.66945165", "0.66842115", "0.6680731", "0.66321975", "0.6630563", "0.66212755", "0.6620363", "0.66172636", "0.66118574", "0.6568382", "0.6567901", "0.6528503", "0.65186435", "0.6515892", "0.64961827", "0.6462295", "0.6437355", "0.643198", "0.6401477", "0.63717806", "0.63654107", "0.6335338", "0.63302004", "0.6329625", "0.63252854", "0.62911874", "0.62732404", "0.6247057", "0.6188994", "0.6185999", "0.6184019", "0.616427", "0.6138337", "0.6131507", "0.61170065", "0.6097317", "0.60709745", "0.6059395", "0.6059071", "0.6017966", "0.5998383", "0.5996638", "0.5988652", "0.59857273", "0.59776825", "0.5972075", "0.5964313", "0.5958839", "0.59581286", "0.59469795", "0.5938609", "0.5933287", "0.5923445", "0.59094065", "0.59041995", "0.59001845", "0.58802164", "0.5871864", "0.58523244", "0.5836944", "0.5802202", "0.57972026", "0.57955533", "0.57947284", "0.57876635", "0.5777424", "0.5775826", "0.5765698", "0.5765088", "0.5764911", "0.5739367", "0.57362014", "0.5730589", "0.5705214", "0.57030547", "0.5701672", "0.56964874", "0.56964153", "0.5682013", "0.56806904", "0.5679136", "0.5674427", "0.5669677", "0.5644075" ]
0.7324574
0
Routes handling reset password token (logic is coded in models.py), if token is valid user has chance to change his password and that change is committed to database, but when token is not valid or expired note is rendering to user
def reset_token(token): if current_user.is_authenticated: return redirect(url_for('users.home')) user = User.verify_secret_token(token) if user is None: flash('That is invalid or expired token', 'warning') return redirect(url_for('users.reset_password')) form = ResetPasswordForm() if form.validate_on_submit(): hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8') user.password = hashed_password db.session.commit() return redirect(url_for('users.login')) return render_template('reset_token.html', form=form)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def password_resetenter(request, uidb64=None, token=None):\n\n\tcontext_dict = {}\n\tif request.method == 'POST':\n\t\tassert uidb64 is not None and token is not None\n\t\tuid = urlsafe_base64_decode(uidb64)\n\t\tuser = models.Teacher.objects.get(\n\t\t\tsoft_delete=False, pk=uid\n\t\t)\n\t\tdb_user = user.user\n\t\treset_token = models.PasswordReset.objects.get(\n\t\t\ttoken=token, user=user\n\t\t)\n\t\ttoken_check = models.PasswordReset.objects.filter(\n\t\t\tuser=user, soft_delete=False, token_consumed=False,\n\t\t).exclude(token=token).first()\n\t\tupdate_fields = []\n\t\ttoken_check.token_consumed = True\n\t\tupdate_fields.append('token_consumed')\n\t\ttoken_check.soft_delete = True\n\t\tupdate_fields.append('soft_delete')\n\t\ttoken_check.save(update_fields=update_fields)\n\t\ttime_threshold = timezone.now() - reset_token.password_request_created_at\n\t\tif time_threshold > timedelta(minutes=30):\n\t\t\ttry:\n\t\t\t\tupdate_fields = []\n\t\t\t\treset_token.token_consumed = True\n\t\t\t\tupdate_fields.append('token_consumed')\n\t\t\t\treset_token.soft_delete = True\n\t\t\t\tupdate_fields.append('soft_delete')\n\t\t\t\treset_token.save(update_fields=update_fields)\n\t\t\texcept Exception as e:\n\t\t\t\tprint (e)\n\t\tif reset_token.user == user and reset_token.token == token:\n\t\t\tif reset_token.token_consumed == False and reset_token.soft_delete == False:\n\t\t\t\ttry:\n\t\t\t\t\tupdate_fields = []\n\t\t\t\t\treset_token.token_consumed = True\n\t\t\t\t\tupdate_fields.append('token_consumed')\n\t\t\t\t\treset_token.soft_delete = True\n\t\t\t\t\tupdate_fields.append('soft_delete')\n\t\t\t\t\treset_token.save(update_fields=update_fields)\n\t\t\t\texcept Exception as e:\n\t\t\t\t\tprint (e)\n\t\t\t\tform = AdminPasswordChangeForm(user=db_user, data=request.POST)\n\t\t\t\tif form.is_valid():\n\t\t\t\t\tform.save()\n\t\t\t\t\thistory = models.History(\n\t\t\t\t\t\tuser=user,\n\t\t\t\t\t\tactivity = \"\",\n\t\t\t\t\t\tactivity_type = \"Reset Password\"\n\t\t\t\t\t)\n\t\t\t\t\thistory.save()\n\t\t\t\t\tcontext_dict[\"message\"] = \"Password changed successfully\"\n\t\t\t\telse:\n\t\t\t\t\tcontext_dict[\"message\"] = \"Password not changed\"\n\t\t\telse:\n\t\t\t\tcontext_dict[\"message\"] = \"Link is no longer valid\"\n\treturn render(request, \"reset.html\", context_dict)", "def reset_password(token):\n # this token just active in 300s\n if request.method == \"POST\":\n email = s.loads(token, salt='email-confirm', max_age=300)\n print(email)\n user = User.query.filter_by(email=email).first()\n\n newPassword1 = request.form.get('newPassword1')\n newPassword2 = request.form.get('newPassword2')\n\n print(user)\n if user:\n \"\"\"kiểm tra password hợp lệ-chỗ này cần chỉnh thêm file html-----------------------------------------------------\"\"\"\n if re.search(PASSWORD_PATTERN, newPassword1) is None:\n flash('Password must be from 6-10 characters, have a digit must occur at least , '\n 'a lower case letter must occur at least once, no whitespace allowed in the entire string.',\n category='error')\n elif newPassword1 != newPassword2:\n flash('Passwords don\\'t match.', category='success')\n else:\n user.password = generate_password_hash(newPassword1, method='sha256')\n db.session.commit()\n print(user.password)\n print(\"đang thay đổi đây.............\")\n flash('Change password successfully!.', category='success')\n return redirect(url_for('auth.login'))\n # ------------------------------------------------------------------------------------------------------------------\n return render_template(\"forgotPass.html\")", "def reset_token(token):\n if current_user.is_authenticated:\n return redirect(url_for('home'))\n user = User.verify_reset_token(token)\n if user is None:\n message = \"This is an invalid or expired token\"\n return redirect(url_for(\"forgot\", message=message))\n form = ResetPasswordForm()\n if form.validate_on_submit():\n hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')\n user.password = hashed_password\n db.session.commit()\n message = f'Password has been updated for {user.display_username}'\n return redirect(url_for('login', message=message))\n return render_template(\"reset_token.html\", title=\"Reset Pasword\", form=form, offer_login=True, offer_register=True)", "def validate_password_reset_link(request, token):\n\n if request.method == \"GET\":\n passwordResetTokenGenerator = PasswordResetTokenGenerator()\n id = PasswordResetTokenGenerator.get_token_value(passwordResetTokenGenerator, token)\n\n if id != None:\n id = int(id)\n\n if User.objects.filter(id = id).exists():\n user = User.objects.get(id = id)\n request.session['user'] = user.username\n\n return render(request, 'app/referee/change_forgot_password.html', {\n 'title':'Change Password',\n 'user': user.username\n })\n else: # the user is invalid\n return redirect(reverse(URL_BAD_REQUEST))\n else: # either the link is expired or invalid\n return redirect(reverse(URL_BAD_REQUEST))\n else:\n return redirect(reverse(URL_BAD_REQUEST))", "def reset_token(token):\n if current_user.is_authenticated:\n return redirect(url_for('LoadDonor'))\n staff = Staff.verify_reset_token(token)\n if staff is None:\n flash('That is an invalid or expired token', 'warning')\n return redirect(url_for('reset_request'))\n form = ResetPasswordForm()\n if form.validate_on_submit():\n hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')\n staff.password = hashed_password\n db.session.commit()\n flash('Your password has been updated! You are now able to log in', 'success')\n return redirect(url_for('login'))\n return render_template('reset_token.html', title='Reset Password', form=form)", "def user_reset_password(request, token):\n\n if request.user.is_authenticated():\n return redirect(settings.AFTER_LOGIN_REDIRECT_URL)\n\n form = ResetPasswordForm(request.POST or None)\n\n if request.method == \"POST\":\n if form.is_valid():\n user_auth = get_object_or_404(PasswordResetAuth, token=token)\n user = get_object_or_404(User, email=user_auth.email)\n\n if user_auth.choose_me is True:\n new_password = form.cleaned_data[\"new_password\"]\n user.set_password(new_password)\n user.save()\n\n user_auth.choose_me = False\n user_auth.save()\n return redirect(\"/login/\")\n\n error_message = \"* Either you are not an identified user or \"\\\n \"token has been expired. So please click on back.\"\n return render_to_response(\"login/reset_password.html\", {\n \"form\": form,\n \"error_message\": error_message\n }, context_instance=RequestContext(request))\n\n return render_to_response(\"login/reset_password.html\", {\n \"form\": form\n }, context_instance=RequestContext(request))", "def reset_password(token):\n if current_user.is_authenticated:\n return redirect(url_for('main.index'))\n user = User.verify_reset_password_token(token)\n if not user:\n return redirect(url_for('main.index'))\n form = ResetPasswordForm()\n if form.validate_on_submit():\n user.set_password(form.password.data)\n user.email_confirmed = True\n db.session.commit()\n return render_template(\n 'successful_pass_reset.html', title=\"Password Reset\")\n return render_template('reset_password.html', title=\"Password Reset\",\n form=form), 417", "def token_request(request):\n try:\n l_user = request.data[\"user\"] #or email\n except:\n return Response({'message':'No user information received.'}, status=status.HTTP_400_BAD_REQUEST)\n\n l_user = l_user.lower()\n\n try:\n user = User.objects.get(username=l_user)\n except:\n try:\n user = User.objects.get(email=l_user)\n except:\n return Response({'message': l_user + ' does not match any record.'}, status=status.HTTP_400_BAD_REQUEST)\n\n pin = random.randint(0, 1000000)\n try:\n subject = \"Password Reset Token.\"\n sendEmail(user, subject, \"Password Reset\", otp=pin)\n\n #Write to use record\n ResetRequests.objects.create(user = user, token = pin, use_case = 'password reset')\n \n #Add password reset request date here\n return Response({'message':'Token sent to registered email.', 'username' : user.username}, status=status.HTTP_200_OK)\n except Exception as e:\n return Response({'message':'We could not send an email', 'error':e}, status=status.HTTP_400_BAD_REQUEST)", "def reset_password(token):\n\n if not current_user.is_anonymous():\n return redirect(url_for(\"forum.index\"))\n\n form = ResetPasswordForm()\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n expired, invalid, data = user.verify_reset_token(form.token.data)\n\n if invalid:\n flash((\"Your password token is invalid.\"), \"danger\")\n return redirect(url_for(\"auth.forgot_password\"))\n\n if expired:\n flash((\"Your password is expired.\"), \"danger\")\n return redirect(url_for(\"auth.forgot_password\"))\n\n if user and data:\n user.password = form.password.data\n user.save()\n flash((\"Your password has been updated.\"), \"success\")\n return redirect(url_for(\"auth.login\"))\n\n form.token.data = token\n return render_template(\"auth/reset_password.html\", form=form)", "def do_password(token):\n password_reset_token = token\n requested_password = request.form['password']\n requested_password_repeat = request.form['passwordRepeat']\n\n # Only pending states can be used.\n target_user = User.query.filter_by(\n password_reset_token=password_reset_token).first()\n\n if target_user is None:\n return Response(render_template('password/failure.html',\n message=('Unbekannter token. Stellen '\n 'sie sicher, dass Sie nicht mehrfach '\n 'eine Passwortzurücksetzung '\n 'angefordert haben und nehmen sie '\n 'immer die aktuelle.')))\n\n if not target_user.state == StateType.PASSWORT_RESET_PENDING:\n return Response(render_template('password/failure.html',\n message='User has no pending password reset.'))\n\n if not requested_password == requested_password_repeat:\n return Response(render_template('password/request.html',\n passwordResetToken=token,\n message='Passwörter stimmen nicht überein.'))\n\n if not target_user.check_password_length(requested_password):\n return Response(render_template('password/request.html',\n passwordResetToken=token,\n message=('Passwort zu kurz. Das '\n 'Passwort muss mindestens {} '\n 'Zeichen haben').format(PASSWORD_MIN_LENGTH)))\n\n if not target_user.check_password_format(requested_password):\n return Response(render_template('password/request.html',\n passwordResetToken=token,\n message='Falsches Passwort Format. Das '\n 'Passwort muss mindestens eine Ziffer enthalten.'))\n\n target_user.set_password(requested_password)\n target_user.state = StateType.ACTIVE\n db.session.commit()\n\n return Response(render_template('password/success.html'))", "def get(self, request):\n form = ConfirmPasswordForm()\n token = request.GET.get('token')\n if not token:\n raise Http404('Page not found.')\n token_obj = PasswordResetTokens.objects.filter(token=token)\n import pdb\n pdb.set_trace()\n if not token_obj:\n raise Http404('Fake token supplied.')\n # tz = pytz.timezone(\"UTC\")\n # if tz.localize(datetime.now(), is_dst=None) > token_obj[0].expired_time:\n # raise Http404('Token Expired. Try again')\n return render(request, 'user_registrations/set_password.html', {'form': form, 'token': token})", "def forgotpassword(request):\n if request.method == 'GET':\n return render(request, 'app/other/forgot_password.html', {'title':'Forgot Password?',})\n elif request.method == 'POST':\n username = request.POST['username']\n\n if User.objects.filter(username = username).exists():\n user = User.objects.get(username = username)\n if Referee.objects.filter(user = user).exists():\n referee = Referee.objects.get(user = user)\n # generate token\n passwordResetTokenGenerator = PasswordResetTokenGenerator()\n token = PasswordResetTokenGenerator.generate_token(passwordResetTokenGenerator, str(user.id))\n token = str(token.decode('utf-8'))\n # email to referee\n subject = \"[Password Reset Link]\"\n message = 'http:////localhost:8000//reset//token=//' + token\n content = \"<br>Dear sir,</br><br></br><br></br>Link is: \"+message+'. Please click on the link to change the credentials.'+\"<br></br><br></br>Regards,<br></br>PhDPortal.\"\n email = []\n receiver = referee.user\n email.append(receiver.email)\n send_email_task.delay(email, subject, content)\n # redirect to same page with status to check your mail and click on activation link\n \n dict = {'status' : 'Done', 'message' : 'An Activation link has been sent to your mail-id'}\n return HttpResponse(json.dumps(dict), content_type = 'application/json')\n else: # given username is not valid to use this feature\n dict = {'status': 'Error', 'message' : 'You are not Authorized to change password'}\n return HttpResponse(json.dumps(dict), content_type = 'application/json')\n else: # given username is not valid to use this feature\n dict = {'status': 'Error', 'message' : 'Invalid Username, Try Again!'}\n return HttpResponse(json.dumps(dict), content_type = 'application/json')\n else:\n return redirect(reverse(URL_BAD_REQUEST))", "def post(self, request, token):\n form = PasswordResetForm(request.DATA)\n if form.is_valid():\n user_data = get_user_data(\n signing.loads(\n token,\n max_age=self.token_expires,\n salt=self.salt))\n if user_data:\n user_data.set_password(request.DATA['password1'])\n user_data.save()\n return render_to_response(\n 'registration/show_message.html',\n {\n 'title': \"Change successfully\",\n 'message': \"your password has Change successfully\"})\n return render_to_response(\n 'registration/show_message.html',\n {\n 'title': \"Sorry something wrong\",\n 'message': \"sorry try again to set new password\"})\n return render_to_response(\n 'registration/show_message.html',\n {\n 'title': \"Sorry something wrong\",\n 'message': \"sorry try again to set new password\"})", "def reset_password(token):\n\n expired, invalid, user = reset_password_token_status(token)\n\n if invalid:\n return redirect(url_for('frontend.forgot_password') + '?invalid')\n elif expired:\n send_reset_password_instructions(user)\n return redirect(url_for('frontend.forgot_password') + '?expired')\n elif request.method == 'GET':\n return redirect(url_for('frontend.reset_password', token=token))\n\n form = _security.reset_password_form()\n\n if form.validate_on_submit():\n after_this_request(_commit)\n update_password(user, form.newPassword.data)\n login_user(user)\n else:\n return jsonify({'errors': form.errors}), HTTPStatus.BAD_REQUEST\n\n return jsonify({\n 'token': user.get_auth_token(),\n 'user': user,\n })", "def reset_post():\n if g.session:\n # User is already authenticated\n return jsonify({'redirect': url_for('index.index')})\n\n form = request.values.get('form', default='email')\n token = request.values.get('token', default='')\n email = request.values.get('email', default='')\n password = request.values.get('password', default='')\n\n if form == 'password':\n try:\n user: User = db.session.query(User) \\\n .filter((User.password_token == token) & User.reset_active) \\\n .one()\n if user.is_reset_expired():\n return jsonify({'success': False, 'reason': 'expired'}), 401\n\n if len(password) < 8:\n return jsonify({'success': False, 'reason': 'password'}), 401\n\n user.set_password(password)\n db.session.commit()\n next_url = url_for('auth.reset_status', success=True)\n return jsonify({'success': True, 'redirect': next_url})\n except NoResultFound:\n return jsonify({'success': False, 'reason': 'token not found'}), 401\n else:\n try:\n user: User = db.session.query(User) \\\n .filter(User.email == email).one()\n user.reset_password()\n db.session.commit()\n\n reset_url = urllib.parse.urljoin(\n request.host_url,\n url_for('auth.reset_get', token=user.password_token))\n kwargs = {\n 'subject': gettext('Reset Password'),\n 'body': reset_url,\n 'recipients': [user.email]\n }\n mail.send_mail(**kwargs)\n next_url = url_for('auth.reset_status', sent=True)\n return jsonify({'success': True, 'redirect': next_url})\n except NoResultFound:\n return jsonify({'success': False, 'reason': 'email'}), 401", "def request_password_reset():", "def reset_password(self, request):\n password = request.data['password']\n forgot_password_hash = request.data['uuid']\n url_expired_response = Response(\n {\"message\": 'Password reset url expired'}, status=status.HTTP_400_BAD_REQUEST)\n\n try:\n user = CurationUser.objects.get(\n forgot_password_hash=forgot_password_hash)\n\n # get pwd_expiry_cycle_days value from admin_settings table\n pwd_expiry_cycle_days = get_password_expiry_cycle_days()\n\n if user is None:\n return url_expired_response\n if user.forgot_password_hash_expiry_on.isoformat() < datetime.now().isoformat():\n return url_expired_response\n\n successs_response = {\n \"message\": \"password set successfully\"\n }\n\n mfaUrl = self._get_mfa_uri(user.email, user.otp_secret_key)\n\n if (not user.is_active and user.mfa_type == CurationUser.MfaType.google) or user.is_staff:\n successs_response[\"mfaUrl\"] = mfaUrl\n\n # if password reset is requrested by admin then reset the otp secret key and return mfa_url\n if user.reset_password_requested_by is not None:\n reset_password_requested_by = UserSerializer(\n user.reset_password_requested_by).data\n reset_password_requested_by_user_group = reset_password_requested_by.get('groups')[0]\n admin_group = Group.objects.get(name='admin')\n\n if admin_group is not None \\\n and admin_group.name == reset_password_requested_by_user_group.get('name') \\\n and user.mfa_type == CurationUser.MfaType.google:\n successs_response[\"mfaUrl\"] = mfaUrl\n\n if can_update_user_password(user, password):\n user.update_password(password, pwd_expiry_cycle_days)\n user.is_active = True\n user.reset_password_requested_by = None\n user.forgot_password_hash = None\n user.forgot_password_hash_expiry_on = None\n return Response(successs_response, status=status.HTTP_200_OK)\n else:\n error_response = {'errors': {}}\n error_response['errors']['new_password'] = [\"Password already used before.\"]\n raise serializers.ValidationError(error_response)\n except CurationUser.DoesNotExist:\n return url_expired_response\n except ValidationError as err:\n raise err\n except:\n return Response({\"message\": \"Unable to reset password\"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)", "def reset_password():\n password = request.get_json().get('password')\n access_token = authentication_request()\n\n if access_token:\n # Attempt to decode the token and get the User ID\n user_id = Users.decode_token(access_token)\n if not isinstance(user_id, str):\n user = Users.query.filter_by(id=user_id).first()\n try:\n if not user:\n raise exceptions.NotFound()\n\n valid_password = check_password_validation(password)\n user.password = Users.hash_password(valid_password)\n user.save()\n # db.session.commit()\n return {\"message\": \"you have succesfuly reset your password\"}, status.HTTP_200_OK\n \n except Exception as error:\n \n return {\"message\": str(error)}, status.HTTP_200_OK\n \n else:\n return {\"message\": user_id}, status.HTTP_401_UNAUTHORIZED\n\n return {\"message\": \"Provide a valid authentication token\"}, status.HTTP_403_FORBIDDEN", "def user_password_reset(self, request):\n reset_password_form = ResetPasswordForm(request.form)\n\n if request.method == \"POST\":\n if reset_password_form.validate_on_submit():\n if check_password_hash(current_user.password, reset_password_form.old_password.data):\n new_hashed_password = generate_password_hash(reset_password_form.password.data)\n\n temp = current_user.get_id()\n (role, email) = temp.split(\":\")\n\n # if first element is `sysadmin` instead of a scheme_id\n # call function to reset `sysadmin` pass\n if role == \"sysadmin\":\n self._scheme_handler.update_hash_password(email, new_hashed_password)\n else:\n # regular user reset\n self._student_handler.update_hash_password(current_user.scheme_id, current_user.k_number, new_hashed_password)\n\n flash(\"Password successfully updated\")\n else:\n flash(\"Old password incorrect\")\n else:\n flash(\"Please double check your new password is valid.\")\n \n return render_template(\"user/reset_password.html\", reset_password_form=reset_password_form)", "def password_reset(request):\n try:\n with transaction.atomic():\n try:\n data = request.data\n data = validations_utils.email_validation(data) # Validates email id, it returns lower-cased email in data.\n user = validations_utils.user_validation_with_email(data['email'])\n except ValidationException as e: # Generic exception\n return Response(e.errors, status=e.status)\n current_site = get_current_site(request)\n domain = current_site.domain\n key = utils.create_reset_password_key(user.email)\n utils.send_reset_password_mail(user, key, domain) # Sends an email for resetting the password.\n return Response(messages.PASSWORD_RESET_LINK_SENT, status=status.HTTP_200_OK)\n except IntegrityError:\n return Response(messages.CAN_NOT_RESET_PASSWORD, status=status.HTTP_500_INTERNAL_SERVER_ERROR)", "def reset_password(): \n \n form = ResetPasswordForm()\n if request.method == 'POST':\n if form.validate_on_submit():\n \n hashed_pw = bcrypt.hashpw(form.new_password.data.encode('utf-8'), bcrypt.gensalt())\n user = mongo.db.user.find_one({'username': form.username.data})\n \n if user and bcrypt.checkpw(request.form['passphrase'].encode('utf-8'), user['passphrase']):\n mongo.db.user.find_one_and_update({'username': form.username.data}, {'$set':{'hashed_password':hashed_pw}})\n \n flash(f'Password reset was successful, {form.username.data}, pleaselogin again with your new password.','success'\n )\n return redirect(url_for('login'))\n \n return render_template('pages/reset.html', title='Forgot Password', form=form)", "def reset_password(request):\n\n if \"user_id\" in request.DATA and \"code\" in request.DATA:\n try:\n reset_code = PasswordResetCode.objects.get(\n user_id=request.DATA[\"user_id\"], code=request.DATA[\"code\"]\n )\n except PasswordResetCode.DoesNotExist:\n return JsonResponse(\n \"Unauthorized - Invalid reset code\", status=401, safe=False\n )\n time_now = timezone.now()\n if time_now > reset_code.expiry:\n # current time is later than the expiry\n return JsonResponse(\n \"Unauthorized - Reset code has expired\", status=401, safe=False\n )\n\n if \"password\" not in request.DATA:\n # emit password field -> check validity of code\n return JsonResponse(\"OK - Code is valid\", status=200, safe=False)\n elif \"password_repeat\" not in request.DATA:\n # otherwise repeat must also be specified\n return JsonResponse(\n \"Bad request - Missing fields\", status=400, safe=False\n )\n\n password_invalid = validate_password(\n request.DATA[\"password\"], request.DATA[\"password_repeat\"]\n )\n\n if password_invalid:\n return JsonResponse(\n \"Bad request - \" + password_invalid, status=400, safe=False\n )\n\n # update the password\n try:\n user = User.objects.get(id=request.DATA[\"user_id\"])\n except User.DoesNotExist:\n return JsonResponse(\n \"Bad request - No user with that id\", status=400, safe=False\n )\n\n user.set_password(request.DATA[\"password\"])\n user.save()\n\n # code is single use\n reset_code.delete()\n\n return JsonResponse(\"OK - Password updated\", status=200, safe=False)\n\n return JsonResponse(\"Bad request - Missing fields\", status=400, safe=False)", "def password_reset(request):\n\n\tcontext_dict = {}\n\tif request.method == 'POST':\n\t\temail = request.POST.get('email')\n\t\tif email:\n\t\t\tuser = models.Teacher.objects.get(\n\t\t\t\tsoft_delete=False, user__email=email\n\t\t\t)\n\t\t\tif not user:\n\t\t\t\tcontext_dict[\"message\"] = \"Email ID does'nt exist, Enter Correct details\"\n\t\t\tmail = {\n\t\t\t\t'email': email,\n\t\t\t\t'domain': request.META['HTTP_HOST'],\n\t\t\t\t'site_name': 'Placement Portal',\n\t\t\t\t'uid': urlsafe_base64_encode(force_bytes(user.pk)),\n\t\t\t\t'user': user,\n\t\t\t\t'token': ''.join([random.choice(ascii_letters+digits) for i in range (128)]),\n\t\t\t\t'protocol': 'http',\n\t\t\t}\n\t\t\ttry:\n\t\t\t\treset_token = models.PasswordReset(\n\t\t\t\t\tuser=user,\n\t\t\t\t\ttoken=mail['token'],\n\t\t\t\t\ttoken_consumed=False,\n\t\t\t\t)\n\t\t\t\treset_token.save()\n\t\t\texcept Exception as e:\n\t\t\t\tprint (e)\n\t\t\tsubject_template_name = 'password_reset_email_subject.txt'\n\t\t\temail_template_name = 'password_reset_email.html'\n\t\t\tsubject = loader.render_to_string(subject_template_name, mail)\n\t\t\tsubject = ''.join(subject.splitlines())\n\t\t\temail_data = loader.render_to_string(email_template_name, mail)\n\t\t\tsend_mail(subject, email_data, DEFAULT_FROM_EMAIL, [email], fail_silently=False)\n\t\t\tcontext_dict[\"message\"] = \"Email has been sent to your registered Email ID with instructions.\"\n\treturn render(request, \"password_reset_form.html\", context_dict)", "def forgot_password():\n\n if not current_user.is_anonymous():\n return redirect(url_for(\"forum.index\"))\n\n form = ForgotPasswordForm()\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n\n if user:\n token = user.make_reset_token()\n send_reset_token(user, token=token)\n\n flash((\"E-Mail sent! Please check your inbox.\"), \"info\")\n return redirect(url_for(\"auth.forgot_password\"))\n else:\n flash((\"You have entered an username or email that is not linked \\\n with your account\"), \"danger\")\n return render_template(\"auth/forgot_password.html\", form=form)", "def forgot_password():\n \n if 'username' in session: \n flash('You are already logged in, you can reset your password here.', 'info')\n return redirect(url_for('dashboard'))\n \n form = ForgotPasswordForm()\n \n if request.method == 'POST':\n if form.validate_on_submit(): \n user = mongo.db.user.find_one({'email':form.email.data})\n\n if user:\n flash('Please enter your security passphrase and create a new password', 'info')\n return redirect(url_for('reset_password')) \n \n flash('Email address not found!', 'danger')\n return render_template('pages/forgot.html', \n title='Forgot Password', \n form=form\n )\n \n return render_template('pages/forgot.html', title='Forgot Password', form=form)", "def login_confirm():\n token = bson.ObjectId(request.args[\"token\"])\n\n # Check whether that user exists\n conn.register([model.User])\n admindb = conn[current_app.config[\"CONFIGDB\"]]\n\n user = admindb[\"users\"].User.find_one({\"token\" : token})\n\n if user == None:\n flash(\"Confirmation link expired or invalid\", \"error\")\n return redirect('/home')\n\n # Remove the token\n del user[\"token\"]\n\n if user[\"password_status\"] == \"new\":\n flash(\"Success, Your email is confirmed, please continue by setting the password here\", \"success\")\n\n elif user[\"password_status\"] == \"reset-request\":\n flash(\"Success, Your request for password reset is verified , please reset the password here\", \"success\")\n\n user[\"password_status\"] = \"reset\"\n user.validate()\n user.save()\n\n # Todo: do user login\n do_user_login(user)\n\n return redirect('/login.reset')", "def post(self, request):\n import pdb\n pdb.set_trace()\n form = ConfirmPasswordForm(request.POST)\n token = request.GET.get('token')\n if not token:\n raise Http404('Tocken not found.')\n if not form.is_valid():\n import pdb\n pdb.set_trace()\n return render(request, 'user_registrations/set_password.html', {'form': form, 'token': token, 'errors': form.errors})\n token_obj = PasswordResetTokens.objects.filter(token=token)\n if not token_obj:\n raise Http404('Fake token supplied.')\n password_1 = form.cleaned_data.get('password_1')\n user = token_obj[0].user\n user.set_password(password_1)\n user.save()\n token_obj[0].delete()\n return HttpResponseRedirect(reverse('login'))", "def password_reset_confirm(request, uidb64, token):\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = User.objects.get(pk=uid)\n try:\n reset_form = ResetPasswordForm(instance=user)\n # urlsafe_base64_decode() decodes to bytestring on Python 3\n except (TypeError, ValueError, OverflowError, user.DoesNotExist):\n user = None\n if user is not None and default_token_generator.check_token(user, token):\n validlink = True\n title = ('Enter new password')\n if request.method == 'POST':\n if 'password-submit' in (request.POST):\n reset_form = ResetPasswordForm(request.POST,instance=user)\n password = request.POST.get(\"password_reset\", None)\n \n if reset_form.is_valid():\n user=reset_form.save(commit = False)\n user.save()\n return redirect('password_reset_complete')\n else:\n reset_form = ResetPasswordForm(instance=user)\n else:\n validlink = False\n reset_form = ResetPasswordForm(instance=user)\n title = ('Password reset unsuccessful')\n return redirect ('invalid_password_link')\n context = {\n 'reset_form': ResetPasswordForm,\n 'title': title,\n 'validlink': validlink,\n }\n return render(request, 'reset_confirm.html', context, {'reset_form': ResetPasswordForm})", "def verify_email(request):\n user = User.objects.get(username=request.user)\n if request.method == 'POST':\n otp = request.data.get('otp')\n if not otp:\n return Response({'message':\"We cannot find your otp\"}, status=status.HTTP_400_BAD_REQUEST)\n\n #Get token\n qs = ResetRequests.objects.filter(user=user, token=otp, use_case = 'account confirmation')\n if not qs.exists():\n return Response({'message':'Wrong Token.'}, status=status.HTTP_400_BAD_REQUEST)\n\n #Grab the last token\n token_request = qs.last()\n timer = token_request.created_at\n\n #Check token expiry\n if timezone.now() > timer + timezone.timedelta(minutes=10):\n return Response({'message':'Token Expired. Request another please.'}, status=status.HTTP_400_BAD_REQUEST)\n\n #Check whether token has been used.\n if token_request.consumed:\n return Response({\"message\":\"Pin has been used already\"}, status=status.HTTP_400_BAD_REQUEST)\n\n if int(otp) == int(token_request.token):\n #Set user as verified\n user.email_verified = True\n user.save()\n #Set token as consumed\n token_request.consumed = True\n token_request.save()\n\n #Send Confirmation Mail\n email_subject = \"SpendWise - Account Verified.\"\n email_msg = \"Your account has been verified. Welcome to the SpendWise Ecosystem\"\n try:\n sendEmail(user, email_subject, \"Account Verified\", information=email_msg)\n return Response({'message':'User account successfully verified.'}, status=status.HTTP_200_OK)\n except:\n return Response({'message':'We could not send a confirmation email'}, status=status.HTTP_200_OK)\n\n\n if request.method == 'GET':\n to = User.objects.get(username=request.user).email\n pin = random.randint(0, 1000000)\n #presumes this link is only reachable cos the user already has an email.\n to = user.email\n try:\n subject = \"Account Confirmation.\"\n message = f\"Your Account Confirmation code is {pin}\\n\\nExpires in 10 minutes.\"\n sendEmail(user, subject, \"Account Confirmation\", information=message, otp=pin)\n\n #Write to user's record\n ResetRequests.objects.create(\n user = user,\n token = pin,\n use_case = 'account confirmation'\n )\n #Add password reset request date here\n return Response({'message':'Token sent to registered email.',\n 'email' : to},\n status=status.HTTP_200_OK)\n except Exception as e:\n return Response({'message':'We could not send an email', 'error':e},\n status=status.HTTP_400_BAD_REQUEST)\n\n #Do the actual verification\n #Verified is alrady possibly True via sms. What happens now?", "def reset_password_api():\n\n # get the data for this query\n data = request.get_json()\n if not data:\n response = jsonify({\n 'success': False,\n 'message': 'Missing request body'\n })\n response.status_code = 422\n return response\n\n # confirm the password is not blank\n new_password_plain = data.get('password')\n if not new_password_plain:\n response = jsonify({\n 'success': False,\n 'message': 'Cannot have empty password'\n })\n response.status_code = 200\n return response\n\n # check if there is a PasswordResetLink with this secret_link\n secret_link = data.get('secret_link')\n max_age = datetime.datetime.now() - datetime.timedelta(days=PASSWORD_RESET_LINK_EXPIRATION_DAYS)\n reset_link_object = db.session.query(PasswordResetLink).filter(\n PasswordResetLink.secret_link == secret_link,\n PasswordResetLink.expired == False,\n PasswordResetLink.created_at > max_age\n ).one_or_none()\n if not reset_link_object:\n response = jsonify({\n 'success': False,\n 'message': 'This password reset link is no longer active. Use forgot password again to create a new one.'\n })\n response.status_code = 200\n return response\n\n # get the user associated with this PasswordResetLink\n user = db.session.query(User).filter(User.user_id == reset_link_object.user_id).one_or_none()\n if not user:\n response = jsonify({\n 'success': False,\n 'message': 'Not Authorized: invalid user'\n })\n response.status_code = 403\n return response\n\n # generate and set new password\n new_password = generate_password_hash(new_password_plain)\n user.password = new_password\n reset_link_object.expired = True\n db.session.add(user)\n db.session.add(reset_link_object)\n db.session.commit()\n\n # return authenticated token\n token = generate_auth_token(user_id=user.user_id)\n response = jsonify({\n 'success': True,\n 'token': token\n })\n response.status_code = 200\n return response", "def get(self, request, token):\n try:\n primary_key = signing.loads(token, max_age=self.token_expires, salt=self.salt)\n except signing.BadSignature:\n return render_to_response(\n 'registration/show_message.html',\n {\n 'title': \"invalid token\",\n 'message': \"sorry invalid token try again to recover password\"})\n user = get_user_data(primary_key)\n return render_to_response(\n 'registration/recovery_form.html',\n {'user': user, 'form': self.form})", "def login_reset():\n # Start with the currently logged in user\n\n if request.method == \"GET\":\n # In browser request that user wants to reset the password\n # Create a token\n # Send out an email\n #\n return flask.render_template('profile.html', name=session[\"user\"][\"label\"], email=session[\"user\"][\"email\"])\n\n if request.method == \"POST\":\n # In browser request that user wants to reset the password\n label = flask.request.form[\"label\"]\n passwd = flask.request.form[\"passwd\"]\n\n # Verify that the user is logged in or return\n if not session.has_key(\"user\"):\n return flask.Response('{\"error\" : \"User not logged in\" }')\n else:\n # Chagne the information in the session\n session[\"user\"][\"label\"] = label\n # Locate the record\n conn.register([model.User])\n dbobj = conn[current_app.config[\"CONFIGDB\"]]\n userdoc = dbobj[\"users\"].User.find_one({'_id' : ObjectId(session[\"user\"][\"id\"])})\n userdoc[\"passwd\"] = passwd\n userdoc[\"password_status\"] = \"ready\"\n userdoc[\"label\"] = label\n userdoc.validate()\n userdoc.save()\n\n return flask.Response('{\"success\" : \"\" }')", "def password_reset(request):\r\n if request.method != \"POST\":\r\n raise Http404\r\n\r\n # Add some rate limiting here by re-using the RateLimitMixin as a helper class\r\n limiter = BadRequestRateLimiter()\r\n if limiter.is_rate_limit_exceeded(request):\r\n AUDIT_LOG.warning(\"Rate limit exceeded in password_reset\")\r\n return HttpResponseForbidden()\r\n\r\n form = PasswordResetFormNoActive(request.POST)\r\n if form.is_valid():\r\n form.save(use_https=request.is_secure(),\r\n from_email=settings.DEFAULT_FROM_EMAIL,\r\n request=request,\r\n domain_override=request.get_host())\r\n else:\r\n # bad user? tick the rate limiter counter\r\n AUDIT_LOG.info(\"Bad password_reset user passed in.\")\r\n limiter.tick_bad_request_counter(request)\r\n\r\n return JsonResponse({\r\n 'success': True,\r\n 'value': render_to_string('registration/password_reset_done.html', {}),\r\n })", "def reset_password():\n form = ResetPassword()\n if form.validate_on_submit():\n user_email = form.email.data\n mail_exist = db.check_email(user_email)\n if mail_exist is not None:\n new_password = generate_password()\n new_password_hash = generate_password_hash(new_password)\n username = mail_exist['username']\n db.update_password_username(username, new_password_hash)\n flash('Your new password has been sent to your mailbox')\n redirect('login')\n # send_password_reset_email(user_email, new_password)\n return redirect(url_for('login'))\n else:\n flash('This email address is not registered')\n return redirect('reset_password')\n return render_template('resetpassword.html', form=form)", "def login_resetrequest():\n if request.method == \"GET\":\n # In browser request that user wants to reset the password\n return flask.render_template('reset-request.html', message=\"Please reset the password\")\n\n if request.method == \"POST\":\n # Create a token\n email = flask.request.form[\"email\"]\n\n # Find if an account with that name exists\n conn.register([model.User])\n admindb = conn[current_app.config[\"CONFIGDB\"]]\n\n userdoc = admindb[\"users\"].User.find_one({\"name\" : email, \"type\" : \"passwd\"})\n if userdoc == None:\n # user not found\n return flask.Response('{\"error\" : \"User not found\"}')\n\n # First reset the password\n name = userdoc[\"label\"]\n emailto = userdoc[\"name\"]\n\n # Create accout and a random tocken\n userdoc[\"token\"] = bson.ObjectId()\n userdoc[\"password_status\"] = \"reset-request\"\n\n # May only be useful for some\n if \"password_ready\" in userdoc:\n del userdoc[\"password_ready\"]\n\n userdoc.validate()\n userdoc.save()\n\n # Create email\n emailfrom = current_app.config[\"EMAIL_FROM\"] \n\n body = \"Hello \" + name + \",\\n\\n\"\n body = body + \"You recently requested a password reset for your account at https://slide-atlas.org.\"\n body = body + \"\\n To complete the request operation please follow the link below- \\n\"\n body = body + \"\\n \" + url_for('.login_confirm', _external=True) + \"?token=\" + str(userdoc[\"token\"]) + \" \\n\"\n body = body + \"\\nIf clicking on the link doesn't work, try copying and pasting it into your browser.\\n\"\n body = body + \"\\nThis link will work only once, and will let you create a new password. \\n\"\n body = body + \"\\nIf you did not request password reset, please disregard this message.\\n\"\n body = body + \"\\nThank you,\\nThe SlideAtlas Administration Team\\n\"\n\n # Create a text/plain message\n msg = MIMEText(body)\n\n # me == the sender's email address\n # you == the recipient's email address\n msg['Subject'] = 'Password reset confirmation for slide-atlas.org'\n msg['From'] = emailfrom\n msg['To'] = emailto\n print msg\n s = smtplib.SMTP(current_app.config[\"SMTP\"])\n try:\n out = s.sendmail(emailfrom, [emailto], msg.as_string())\n except:\n return flask.Response(\"{\\\"error\\\" : \\\"Error sending email\\\"}\")\n\n s.quit()\n return flask.Response(\"{\\\"success\\\" : \\\"\" + str(out) + \"\\\"}\")", "def forgot_password():\r\n form = ForgotPasswordForm(request.form)\r\n if form.validate_on_submit():\r\n user = model.user.User.query\\\r\n .filter_by(email_addr=form.email_addr.data)\\\r\n .first()\r\n if user and user.email_addr:\r\n msg = Message(subject='Account Recovery',\r\n recipients=[user.email_addr])\r\n if user.twitter_user_id:\r\n msg.body = render_template(\r\n '/account/email/forgot_password_openid.md',\r\n user=user, account_name='Twitter')\r\n elif user.facebook_user_id:\r\n msg.body = render_template(\r\n '/account/email/forgot_password_openid.md',\r\n user=user, account_name='Facebook')\r\n elif user.google_user_id:\r\n msg.body = render_template(\r\n '/account/email/forgot_password_openid.md',\r\n user=user, account_name='Google')\r\n else:\r\n userdict = {'user': user.name, 'password': user.passwd_hash}\r\n key = signer.signer.dumps(userdict, salt='password-reset')\r\n recovery_url = url_for('.reset_password',\r\n key=key, _external=True)\r\n msg.body = render_template(\r\n '/account/email/forgot_password.md',\r\n user=user, recovery_url=recovery_url)\r\n msg.html = markdown(msg.body)\r\n mail.send(msg)\r\n flash(gettext(\"We've send you email with account \"\r\n \"recovery instructions!\"),\r\n 'success')\r\n else:\r\n flash(gettext(\"We don't have this email in our records. \"\r\n \"You may have signed up with a different \"\r\n \"email or used Twitter, Facebook, or \"\r\n \"Google to sign-in\"), 'error')\r\n if request.method == 'POST' and not form.validate():\r\n flash(gettext('Something went wrong, please correct the errors on the '\r\n 'form'), 'error')\r\n return render_template('/account/password_forgot.html', form=form)", "def POST(self):\n session = web.ctx.session\n nav = get_nav_bar(session)\n data = web.input(reset_token = \"\", new_password=\"\")\n \n reset_password_colum = reset_password_form()\n \n # check each field is endered values.\n if not reset_password_colum.validates():\n return render.reset_password(nav, reset_password_form, \"All fields must be valid.\")\n \n try:\n # log ip information\n ip_addr = web.ctx[\"ip\"]\n accessed_path = web.ctx[\"fullpath\"]\n\n # query user's name (username) and token (extra secruity)\n token = data.reset_token\n username = search_for_user(token, ip_addr, accessed_path)\n #print(\"-\"*16)\n #print(username)\n \n #update token to null database\n result_update_token = update_token_to_null(username, token, ip_addr, accessed_path)\n print(\"-\" * 16 + \"updated!\")\n\n # generate new password\n new_salt = generate_salt()\n hashed_password = hashed_value(data.new_password, new_salt)\n hashed_password = new_salt + hashed_password\n\n # update password \n result_update_password = update_user_password(username, hashed_password, ip_addr, accessed_path )\n raise web.seeother(\"/\")\n except Exception as e:\n print(e)\n except:\n print(exit[0])\n return render.login(nav, reset_password_form, \"- Something went wrong!\")", "def reset_password():\n if current_user.is_authenticated:\n return redirect(url_for('main.home'))\n\n form = RequestResetForm()\n\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n send_reset_email(user) # located in utils.py\n flash('An email has been sent with instruction to reset your password', 'info')\n return redirect(url_for('users.login'))\n\n return render_template('reset_password_request.html', form=form)", "def forgot_passwd_check(request, uidb64=None, token=None):\n assert uidb64 is not None and token is not None\n dc1_settings = DefaultDc().settings\n sms_registration = dc1_settings.SMS_REGISTRATION_ENABLED\n\n if sms_registration:\n set_password_form = SMSSendPasswordResetForm\n else:\n set_password_form = PasswordResetForm\n\n if request.method == 'POST':\n try:\n user = User.objects.get(id=urlsafe_base64_decode(uidb64))\n profile = user.userprofile\n except (ValueError, OverflowError, User.DoesNotExist):\n profile = None\n\n if profile and profile.email_token == token:\n # Email address is verified, we cant compare to token as register token is different to reset one.\n profile.email_token = ''\n profile.email_verified = True\n # This may look strange - setting the phone_verified before the user logs in. It is not :) We are sending\n # new password to phone number in profile, after the user logs in we would set phone_verified to True anyway\n if sms_registration:\n profile.phone_verified = True\n profile.save()\n\n return password_reset_confirm(\n request,\n uidb64=uidb64,\n token=token,\n template_name='gui/accounts/forgot_check.html',\n set_password_form=set_password_form,\n post_reset_redirect=reverse('forgot_check_done'),\n current_app='gui',\n extra_context={\n 'sms_registration': sms_registration,\n }\n )", "def password_reset_confirm_wrapper(\r\n request,\r\n uidb36=None,\r\n token=None,\r\n):\r\n # cribbed from django.contrib.auth.views.password_reset_confirm\r\n try:\r\n uid_int = base36_to_int(uidb36)\r\n user = User.objects.get(id=uid_int)\r\n user.is_active = True\r\n user.save()\r\n except (ValueError, User.DoesNotExist):\r\n pass\r\n\r\n # tie in password strength enforcement as an optional level of\r\n # security protection\r\n err_msg = None\r\n\r\n if request.method == 'POST':\r\n password = request.POST['new_password1']\r\n if settings.FEATURES.get('ENFORCE_PASSWORD_POLICY', False):\r\n try:\r\n validate_password_length(password)\r\n validate_password_complexity(password)\r\n validate_password_dictionary(password)\r\n except ValidationError, err:\r\n err_msg = _('Password: ') + '; '.join(err.messages)\r\n\r\n # also, check the password reuse policy\r\n if not PasswordHistory.is_allowable_password_reuse(user, password):\r\n if user.is_staff:\r\n num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STAFF_PASSWORDS_BEFORE_REUSE']\r\n else:\r\n num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STUDENT_PASSWORDS_BEFORE_REUSE']\r\n err_msg = _(\"You are re-using a password that you have used recently. You must \"\r\n \"have {0} distinct password(s) before reusing a previous password.\").format(num_distinct)\r\n\r\n # also, check to see if passwords are getting reset too frequent\r\n if PasswordHistory.is_password_reset_too_soon(user):\r\n num_days = settings.ADVANCED_SECURITY_CONFIG['MIN_TIME_IN_DAYS_BETWEEN_ALLOWED_RESETS']\r\n err_msg = _(\"You are resetting passwords too frequently. Due to security policies, \"\r\n \"{0} day(s) must elapse between password resets\").format(num_days)\r\n\r\n if err_msg:\r\n # We have an password reset attempt which violates some security policy, use the\r\n # existing Django template to communicate this back to the user\r\n context = {\r\n 'validlink': True,\r\n 'form': None,\r\n 'title': _('Password reset unsuccessful'),\r\n 'err_msg': err_msg,\r\n }\r\n return TemplateResponse(request, 'registration/password_reset_confirm.html', context)\r\n else:\r\n # we also want to pass settings.PLATFORM_NAME in as extra_context\r\n extra_context = {\"platform_name\": settings.PLATFORM_NAME}\r\n\r\n if request.method == 'POST':\r\n # remember what the old password hash is before we call down\r\n old_password_hash = user.password\r\n\r\n result = password_reset_confirm(\r\n request, uidb36=uidb36, token=token, extra_context=extra_context\r\n )\r\n\r\n # get the updated user\r\n updated_user = User.objects.get(id=uid_int)\r\n\r\n # did the password hash change, if so record it in the PasswordHistory\r\n if updated_user.password != old_password_hash:\r\n entry = PasswordHistory()\r\n entry.create(updated_user)\r\n\r\n return result\r\n else:\r\n return password_reset_confirm(\r\n request, uidb36=uidb36, token=token, extra_context=extra_context\r\n )", "def reset_password():\n body = request.get_json()\n reset_token = body.get('reset_token')\n password = body.get('password')\n\n if not reset_token or not password:\n return jsonify(msg.MISSING_PARAMETER), 400\n\n user_email = decode_token(reset_token)['identity']\n is_changed = views.UserManagement().change_password(email=user_email, password=password)\n if not is_changed:\n return jsonify(msg.NO_DATA), 404\n\n send_email('[Shodita] Password reset successful', sender='shodita@shodita.com', recipients=[user_email],\n text_body='Password reset was successful', html_body='<p>Password reset was successful</p>')\n\n return jsonify(msg.SUCCESS), 200", "def forgot_password():\n if request.method == 'POST':\n if 'username' in request.form:\n username = request.form['username']\n user = Users.query.get(username)\n if user:\n reset_slug = utils.encrypt(username)\n reset_url = request.host_url + 'reset_password' + '/' + reset_slug\n from_email = ('noreply@thescriptgroup.in', 'TSG Bot')\n to_email = [(user.email, user.name)]\n subject = 'Password reset for Hades account'\n content = f\"Hello {user.name}, please click <a href=\\\"{reset_url}\\\">here</a> to reset your password!\"\n utils.send_mail(from_email, to_email, subject, content)\n return redirect(url_for('login'))\n return render_template('forgot_password.html')", "def PasswordResetViaOTP(request):\n current_time = datetime.now(utc)\n request_data = json.loads(request.body)\n\n if not type(request_data) == dict:\n return StandardHttpResponse.login_bad_rsp([], 'Bad data')\n\n if not PasswordResetUtils().validate_request_data(request_data):\n return StandardHttpResponse.login_bad_rsp([], 'Missing data fields')\n\n if not request_data['password'] == request_data['confirm_password']:\n return StandardHttpResponse.login_bad_rsp([], 'Password Not Matched')\n\n result, response = MobileOtpService.validate_otp(otp=request_data['otp'],\n otp_ref=request_data['otp_ref'],\n o_type='ForgetPassword',\n current_time=current_time,\n return_obj=True)\n if not result:\n return StandardHttpResponse.login_bad_rsp([], response)\n if not response.mobile:\n return StandardHttpResponse.login_bad_rsp([], 'Invalid request. Please contact Admin.')\n user_profile_obj = UserModelQueries.get_user_profile_by_mobile(response.mobile)\n if not user_profile_obj:\n return StandardHttpResponse.login_bad_rsp([], 'Mobile not registered')\n user_obj = UserModelQueries.get_user_by_id(user_profile_obj.user_id)\n response.delete()\n user_obj.set_password(request_data['password'])\n user_obj.save()\n json_body = dict()\n json_body['username'] = user_obj.username\n json_body['password'] = request_data['password']\n json_body['grant_type'] = 'password'\n request._body = urllib.urlencode(json_body)\n json_body['redirect_uri'] = REDIRECT_URL\n request._post = json_body\n view = TokenView.as_view()\n request.META[\n \"HTTP_AUTHORIZATION\"] = \"Basic YlVhOFBCTTFCQTFGb3JPUlp5RVB0RmFBSUJZOGNhUWF5N2hTbTE4dDpla25ENkVzenlJZEZMeUM1RmhqSFlnUnVuMlk2alpqMTN0Mll1ZzZhVWNJc0ZYYk9VMnNyWDRjRmJLQkpwMnpVVVRZNnV6U0U5V3AyN3JJTmNPZ09FWmVDVlB4NXVEWXV0ZHBFMHhVcEROTTBGUlluRERMQzlTTnNNN3RRZUhEbA==\"\n request.META[\"CONTENT_TYPE\"] = \"application/x-www-form-urlencoded\"\n return StandardHttpResponse.login_rsp(json.loads(view(request).content), 'User Logged-In Successfully.')", "def GET(self):\n session = web.ctx.session\n nav = get_nav_bar(session)\n self.token = web.input().reset_token\n return render.reset_password(nav, reset_password_form, \"\")", "def password_reset(request):\n host = settings.TACC_USER_PORTAL_HOST\n return redirect(f\"{host}/password-reset?{urlencode(request.GET)}\")", "def password_reset(request):\n\tif not request.user.is_authenticated():\n\t\treturn django.contrib.auth.views.password_reset(request,\n template_name='usermgr/password_reset_form.html',\n email_template_name= 'usermgr/password_reset_email.html',\n post_reset_redirect='/usermgr/password_reset/done/')\n\telse:\n\t\treturn HttpResponseRedirect(settings.LOGIN_REDIRECT_URL)", "def forgot():\n form = ForgotForm()\n\n if form.validate_on_submit():\n db.session.add(form.pw_reset)\n db.session.commit()\n\n form.pw_reset.send()\n flash('A password reset link has been sent to your email', 'alert-success')\n return redirect(url_for('default.home'))\n else:\n flash_form_errors(form)\n return render_template('forgot.html', form=form)", "def reset_password_request():\n form = ResetPasswordRequestForm()\n if form.validate_on_submit():\n try:\n user = User.query.filter_by(email=form.email.data).first_or_404()\n except Exception:\n flash('This Email ID is Not Registered', 'error')\n return render_template('password_reset_request.html',\n form=form), 400\n\n if user:\n send_password_reset_email(user)\n flash('Please check your email for a password reset link.',\n 'success')\n return render_template('post_pass_reset_request.html',\n title=\"Reset Password\")\n else:\n flash(\n 'Your email address must be confirmed \\\n before attempting a password reset.',\n 'error')\n return redirect(url_for('auth.login'))\n\n return render_template('password_reset_request.html', form=form), 400", "def reset_password():\r\n key = request.args.get('key')\r\n if key is None:\r\n abort(403)\r\n userdict = {}\r\n try:\r\n userdict = signer.signer.loads(key, max_age=3600, salt='password-reset')\r\n except BadData:\r\n abort(403)\r\n username = userdict.get('user')\r\n if not username or not userdict.get('password'):\r\n abort(403)\r\n user = model.user.User.query.filter_by(name=username).first_or_404()\r\n if user.passwd_hash != userdict.get('password'):\r\n abort(403)\r\n form = ChangePasswordForm(request.form)\r\n if form.validate_on_submit():\r\n user.set_password(form.new_password.data)\r\n db.session.add(user)\r\n db.session.commit()\r\n login_user(user)\r\n flash(gettext('You reset your password successfully!'), 'success')\r\n return redirect(url_for('.signin'))\r\n if request.method == 'POST' and not form.validate():\r\n flash(gettext('Please correct the errors'), 'error')\r\n return render_template('/account/password_reset.html', form=form)", "def account_api_password_reset(request):\n if request.method != 'POST':\n return render(request, 'agda/account/api_password_reset.html')\n profile = request.user\n modified = profile.has_usable_api_password()\n api_password = User.objects.make_random_password(settings.API_PASSWORD_LENGTH, settings.API_PASSWORD_CHARACTERS)\n profile.set_api_password(api_password)\n profile.save()\n profile.log_change(request.user, \"Generated new api password.\")\n return render(request, 'agda/account/api_password_reset.html', dict(api_password=api_password, modified=modified))", "def forgot_password():\n url = 'http://localhost:8080/' + 'user/reset/'\n body = request.get_json()\n email = body.get('email')\n if not email:\n return jsonify(msg.MISSING_PARAMETER), 400\n user_email = views.UserManagement().exists(email=email)\n\n if not user_email:\n return jsonify(msg.NO_DATA), 404\n expires = datetime.timedelta(hours=24)\n reset_token = create_access_token(identity=email, expires_delta=expires)\n\n send_email('[Shodita] Reset Your Password', sender='shodita@shodita.com', recipients=[email],\n text_body=render_template('email/reset_password.txt', url=url + reset_token),\n html_body=render_template('email/reset_password.html', url=url + reset_token))\n\n return jsonify(msg.SUCCESS), 200", "def post(self):\n try:\n body = request.get_json()\n bearer = request.headers.get('Authorization')\n base_url = request.url_root\n token = bearer.split()[1]\n password = body.get('password')\n\n if not token or not password:\n raise SchemaValidationError\n\n user_id = decode_token(token)['sub']['user_id']\n\n user = User.objects.get(id=user_id)\n\n user.modify(password=password)\n user.hash_password()\n user.save()\n\n return send_email('[Unboxit] Password reset successful',\n sender='contact@tsantos.dev',\n recipients=[user.email],\n text_body='Password Reset',\n html_body=render_template(\n 'components/reset_password_response.html',\n first_name=user.first_name,\n base_url=base_url))\n\n except SchemaValidationError:\n raise SchemaValidationError\n except ExpiredSignatureError:\n raise ExpiredTokenError\n except (DecodeError, InvalidTokenError):\n raise BadTokenError\n except Exception as e:\n raise InternalServerError", "def post(self):\n args = password_reset.parse_args()\n email = args.get('email')\n new_password = password_generator()\n\n validation_email = email_validation(email)\n if validation_email:\n return validation_email\n\n user = User.query.filter_by(email=email).first()\n if user:\n user.password = new_password\n user.save()\n response = {\n \"message\": \"Password has been reset\",\n \"status\": \"Reset password succesful!\",\n \"new_password\": new_password\n }\n return response, 200\n else:\n response = {\n 'message': 'User email does not exist, Please try again',\n 'status': 'Reset password failed!'\n }\n return response, 400", "def forgotPassword():\n if request.method == 'POST':\n if emailform():\n email = request.form['email1']\n\n #Confirm the user exist\n if hl.confirmUser(email):\n user = hl.getUser(\"Email\",email)\n refLink = \"http://\"+request.headers['Host']+hl.genUrl(user[\"Name\"],\"Password\")\n #Send email\n msg = \"\"\"\n Dear {},\n\n You are receiving this email because you have requested your password be reset. \n Use the following link to reset your password:\n\n {}\n\n If you did not request that your password be changed, please reply to this email immediately.\n\n Regards,\n Onegroup Admin Team\n \"\"\".format(user[\"Name\"],refLink)\n\n emailMessage(\"Password Reset\", [user[\"Email\"]], msg)\n return redirect(url_for('confirm', confirmed = 'Password reset email has been sent.'))\n else:\n flash(\"User doesn't exists\")\n else:\n flash(\"Emails don't match\")\n \n return render_template('emailsend.html')", "def request_password_reset_token():\n j = request.get_json(force=True)\n user_requested = j['user'].lower()\n\n # Disabled user accounts can not request for a new password.\n target_user = User.query.filter_by(mail=user_requested).first()\n\n if target_user is None:\n return Errors.UNKNOWN_USER.make_json_response(status.HTTP_400_BAD_REQUEST)\n\n if target_user.state == StateType.DEACTIVATED:\n return Errors.DEACTIVATED_USER.make_json_response(status.HTTP_400_BAD_REQUEST)\n\n target_user.generate_password_request_token()\n\n send_mail(target_user.mail, render_template(\"password/reset_password_mail.txt\",\n greeting=get_opening_greeting(target_user),\n wlink=\"{}/password/reset/{}\".format(\n app.config['BUZZN_BASE_URL'],\n target_user.password_reset_token\n )), 'Passwort zurücksetzen für Buzzn-App')\n\n db.session.commit()\n return '', status.HTTP_201_CREATED", "def ask_password_reset(request):\n output_data = {}\n\n # Here we do not send a JSON answer based on success or failure\n # in order to prevent attackers from knowing if email exists in db or not.\n\n if request.method == 'POST':\n\n email = request.POST.get('email')\n\n if not email:\n output_data['error_code'] = '1'\n output_data['error_details'] = errors_for_dev['1']\n return JsonResponse(\n output_data,\n status=status.HTTP_400_BAD_REQUEST\n )\n\n email = email.lower()\n\n try:\n user = User.objects.get(email=email)\n except exceptions.ObjectDoesNotExist:\n return JsonResponse(output_data)\n\n signer = TimestampSigner()\n timestamped_id = signer.sign(user.id)\n\n password_reset_url = \"%s%s\" % (\n settings.SITE_BASE_URL,\n reverse(set_new_password, args=(timestamped_id,))\n )\n\n send_password_reset_email(email, password_reset_url)\n\n return JsonResponse(output_data)\n\n else:\n\n output_data['error_code'] = '8'\n output_data['error_details'] = errors_for_dev['8']\n return JsonResponse(\n output_data,\n status=status.HTTP_400_BAD_REQUEST\n )", "def reset_password():\n if request.method == 'POST':\n email = request.json.get('email')\n new_password = request.json.get('new_password')\n if len(new_password.strip()) < 4:\n return make_response(jsonify(\n {'message': 'password too short'}\n )), 409\n user = User.query.filter_by(email=email).first()\n if user:\n user.password_hash = generate_password_hash(new_password)\n user.save_user()\n return make_response(jsonify(\n {\n 'message': 'password reset successful',\n 'your new password': new_password\n }\n )), 201\n return make_response(jsonify(\n {'message': 'Wrong email, please provide a valid email and try again'}\n )), 401\n return None", "def verify_reset_token(self, token):\n\n expired, invalid, data = self._verify_token(token)\n if data and data.get('id') == self.id and data.get('op') == 'reset':\n data = True\n else:\n data = False\n return expired, invalid, data", "def password_reset_confirm(request, uidb36=None, token=None,\n template_name='gallery/password_reset_confirm.html',\n token_generator=default_token_generator,\n set_password_form=SetPasswordForm,\n post_reset_redirect=None):\n assert uidb36 is not None and token is not None # checked by URLconf\n if post_reset_redirect is None:\n post_reset_redirect = reverse('django.contrib.auth.views.password_reset_complete')\n try:\n uid_int = base36_to_int(uidb36)\n except ValueError:\n raise HttpResponseNotFound\n\n user = get_object_or_404(authmodels.User, id=uid_int)\n context_instance = RequestContext(request)\n\n if token_generator.check_token(user, token):\n context_instance['validlink'] = True\n if request.method == 'POST':\n form = set_password_form(user, request.POST)\n if form.is_valid():\n # we can't use form.save b/c that will update the p/w on the\n # model object, we need to do it in LDAP\n if settings.USE_LDAP:\n ldapper = get_ldap_connection()\n dn = get_user_dn(user.username)\n new_password = request.POST.get('new_password1')\n ldapper.passwd_s(dn, None, new_password)\n ldapper.unbind_s()\n request.notifications.add(_('Password change successful.'))\n else:\n form.save()\n return HttpResponseRedirect(post_reset_redirect)\n else:\n form = set_password_form(None)\n else:\n context_instance['validlink'] = False\n form = None\n context_instance['form'] = form\n return render_to_response(template_name, context_instance=context_instance)", "def reset_pass(key):\n form = NewPasswordForm()\n form.key.data = key\n\n if form.validate_on_submit():\n form.user.set_password(form.password.data)\n db.session.delete(form.pw_reset)\n db.session.commit()\n\n flash('Your password has been successfully reset', 'alert-success')\n login_user(form.user)\n return redirect(url_for('default.home'))\n else:\n flash_form_errors(form)\n form.key.data = key\n # NOTE: This render_template is causing a 404\n return render_template('reset_pass.html', form=form, key=key)", "def reset_token(sender, instance, **kwargs):\n new_password = instance.password\n\n try:\n old_password = User.objects.get(pk=instance.pk).password\n except User.DoesNotExist:\n old_password = None\n\n if new_password != old_password:\n Token.objects.filter(user=instance).delete()", "def get(self, request, token):\n try:\n primary_key = signing.loads(token, max_age=self.token_expires, salt=self.salt)\n user = get_user_data(primary_key)\n if user:\n return render_to_response(\n 'registration/email.html',\n {\n 'in_browser': True,\n 'site': RequestSite(request),\n 'user': user,\n 'token': signing.dumps(user.pk, salt=self.salt),\n 'secure': request.is_secure(),\n })\n except signing.BadSignature:\n return render_to_response(\n 'registration/show_message.html',\n {\n 'title': \"invalid token\",\n 'message': \"sorry invalid token try again to recover password\"})", "def reset_password(request):\r\n params = request.params\r\n\r\n # now also load the password info\r\n current = params.get('current_password', None)\r\n new = params.get('new_password', None)\r\n\r\n # if we don't have any password info, try a json_body in case it's a json\r\n # POST\r\n if current is None and new is None:\r\n params = request.json_body\r\n current = params.get('current_password', None)\r\n new = params.get('new_password', None)\r\n\r\n user_acct = request.user\r\n\r\n if not UserMgr.acceptable_password(new):\r\n request.response.status_int = 406\r\n return _api_response(request, {\r\n 'username': user_acct.username,\r\n 'error': \"Come on, let's try a real password this time\"\r\n })\r\n\r\n # before we change the password, let's verify it\r\n if user_acct.validate_password(current):\r\n # we're good to change it\r\n user_acct.password = new\r\n return _api_response(request, {\r\n 'username': user_acct.username,\r\n 'message': \"Password changed\",\r\n })\r\n else:\r\n request.response.status_int = 403\r\n return _api_response(request, {\r\n 'username': user_acct.username,\r\n 'error': \"There was a typo somewhere. Please check your request\"\r\n })", "def user_forgotpassword(): \n data = user_obj.user_forgotpassword(request.forms) \n return data", "async def reset_post_handler(req: web.Request) -> web.Response:\n db = req.app[\"db\"]\n\n client = req[\"client\"]\n\n form_data = await req.post()\n\n password = form_data.get(\"password\", \"\")\n confirm = form_data.get(\"confirm\", \"\")\n verification_key = form_data.get(\"verification\", None)\n return_to = get_return_to_from_query(req)\n\n # Check that the hidden verification key matches the one attached to the logging-in session. Redirect to `return_to`\n # URL if verification fails (this will end up on login page with correct query parameter.\n if not await virtool.db.sessions.check_verification_key(db, client.session_id, verification_key, mode=\"reset\"):\n return web.Response(status=302, headers={\"Location\": return_to})\n\n user_id = await virtool.db.utils.get_one_field(db.sessions, \"reset_user_id\", client.session_id)\n\n if not user_id:\n return web.Response(status=302, headers={\"Location\": return_to})\n\n errors = list()\n\n # Re-render the reset page with an error message if the new password is invalid.\n if password != confirm:\n errors.append(\"Passwords do not match\")\n\n minimum_password_length = req.app[\"settings\"][\"minimum_password_length\"]\n\n if len(password) < minimum_password_length:\n errors.append(f\"Password must contain at least {minimum_password_length} characters\")\n\n if errors:\n reset_code = await virtool.db.sessions.set_reset_errors(db, client.session_id, errors)\n return web.Response(status=302, headers={\"Location\": f\"/reset?return_to={return_to}&code={reset_code}\"})\n\n # Unset all reset page errors.\n await virtool.db.sessions.set_reset_errors(db, client.session_id)\n\n # Update the user password and disable the `force_reset`.\n await virtool.db.users.edit(db, user_id, force_reset=False, password=password)\n\n # Authenticate and return a redirect response to the `return_to` path. This is identical to the process used for\n # successful login requests.\n return await auth_response(req, return_to, user_id, False)", "def send_password_reset_mail(email, token):\n print(\"reset password\")\n url = f\"{settings.SITE_URL}/reset-password?email={email}&token={token}\"\n SUBJECT = \"Reset Password Request\"\n # The HTML body of the email.\n body = \"\"\"\n <html>\n <head></head>\n <body>\n <p>Here is your password reset link:</p>\n <p><a href='{0}'>{1}</a></p>\n </body>\n </html>\n \"\"\".format(url, url)\n send_mail(SUBJECT, body, email)", "def password_reset(self, password, vtoken, welcomeEmailTemplate = ''):\n auth = 'appkey='+ self._lr_object._get_api_key()+ '&appsecret='+ self._lr_object._get_api_secret() + '&vtoken=' + vtoken\n payload = {'password': password}\n url = SECURE_API_URL + \"raas/v1/account/password/reset\" + \"?\" + auth\n return self._lr_object._post_json(url, payload)", "def post(self):\n try:\n url = request.host_url + 'reset/password/'\n body = request.get_json()\n base_url = request.url_root\n email = body.get('email')\n\n if not email:\n raise SchemaValidationError\n\n user = User.objects.get(email=email)\n if not user:\n raise EmailDoesNotExistsError\n\n expires = datetime.timedelta(minutes=60)\n payload = {\"user_id\": str(user.id)}\n reset_token = create_access_token(payload, expires_delta=expires)\n\n return send_email('[Unboxit] Reset Your Password',\n sender='contact@tsantos.dev',\n recipients=[user.email],\n text_body=render_template(\n 'components/reset_password.txt',\n url=url + reset_token),\n html_body=render_template(\n 'components/reset_password.html',\n url=url + reset_token,\n first_name=user.first_name,\n base_url=base_url))\n except SchemaValidationError:\n raise SchemaValidationError\n except DoesNotExist:\n raise EmailDoesNotExistsError\n except Exception as e:\n raise InternalServerError", "def GET_resetpassword(self, user, key):\r\n done = False\r\n if not key and request.referer:\r\n referer_path = request.referer.split(g.domain)[-1]\r\n done = referer_path.startswith(request.fullpath)\r\n elif not user:\r\n return self.abort404()\r\n return BoringPage(_(\"Reset password\"),\r\n content=ResetPassword(key=key, done=done)).render()", "def password_reset_token_created(sender, reset_password_token, *args, **kwargs):\n # send an e-mail to the user\n context = {\n 'current_user': reset_password_token.user,\n 'username': reset_password_token.user.username,\n 'email': reset_password_token.user.email,\n # ToDo: The URL can (and should) be constructed using pythons built-in `reverse` method.\n 'reset_password_url': \"http://some_url/reset/?token={token}\".format(token=reset_password_token.key)\n }\n\n # render email text\n email_html_message = render_to_string('email/user_reset_password.html', context)\n email_plaintext_message = render_to_string('email/user_reset_password.txt', context)\n\n msg = EmailMultiAlternatives(\n # title:\n \"Password Reset for {title}\".format(title=\"Some website title\"),\n # message:\n email_plaintext_message,\n # from:\n \"noreply@somehost.local\",\n # to:\n [reset_password_token.user.email]\n )\n msg.attach_alternative(email_html_message, \"text/html\")\n msg.send()", "def send_recovery_password_email(token: str, email: str) -> None:\n\n # TODO ...\n # Load html templates and get the content from it.\n # html_content = ...\n\n # You must have to send this as a anchor\n # to my-domain.com/reset-password?token=ad5a....\n link = f\"{SERVER_HOST}/reset-password?token={token}\"\n content = f\"\"\"\n <h1>Reset your password</h1>\n <p></p>\n <a href=\"{link}\" target=\"_blank\" rel=\"noopener noreferrer\">Press here</a>\n \"\"\"\n email = sender.create_email(\n to_list=[email],\n subject=f\"Recovery Password\",\n html_content=content,\n )\n sender.send_email(email_to_send=email)", "def reset_password_email(request):\n if request.method == 'POST' :\n try:\n print(request.POST)\n user = models.UserProfile.objects.get(email=request.POST.get('email',''))\n current_site=get_current_site(request)\n email_subject='Password Reset'\n message=render_to_string('reset_password.html',{\n 'user':user,\n 'domain':current_site.domain,\n 'uid':urlsafe_base64_encode(force_bytes(user.id)),\n 'token':account_activation_token.make_token(user),\n })\n to_email= user.email\n email= EmailMessage(email_subject,message,to=[to_email])\n email.send()\n return JsonResponse(\n {\n \"status\":\"The Reset password email has been sent.\"\n }\n )\n except(TypeError, ValueError, OverflowError, models.UserProfile.DoesNotExist):\n user = None\n return JsonResponse(\n {\n \"status\":\"No matching account found\"\n }\n )\n else :\n return JsonResponse(\n {\n \"status\":\"only post method is available\"\n }\n )", "def reset_password():\n pass", "def _request_reset(self, email):\n response = self.client.post(reverse('users.send_password_reset'),\n {'email': email})\n return response.context['token']", "def passwordCode(code):\n #Check if code exists and for the correct purpose. Else abort\n if (hl.checkCode(code,\"Password\")):\n user = hl.getUserFromCode(code)\n else:\n abort(404)\n\n if request.method == 'POST':\n #Get new password and handle\n passwordform(user)\n #Mark code as used\n hl.flagCode(code)\n #return\n return redirect(url_for('confirm', confirmed = 'Changed Password'))\n\n return render_template('password.html')", "def forgot_req(request):\n server = request.META['SERVER_NAME']\n recover_url = urljoin(full_url(request), 'recover')\n\n if request.POST and not request.user.is_authenticated():\n\ttry:\n\t username_or_email = request.POST['username']\n\texcept KeyError:\n\t pass\n\telse:\n\t if '@' in username_or_email:\n\t\tqs = User.objects.filter(email = username_or_email)\n\t else:\n\t\tqs = User.objects.filter(username = username_or_email)\n\n\t users = []\n\t user = None\n\n\t for user in qs:\n\t\tquery = 'salt=%s&user=%s' % (urlsafe_b64encode(urandom(8)),\\\n\t\t\t\t\t user.username)\n\t\turl = add_encrypted_query_string(recover_url, query,\n\t\t\t\t\t\t settings.SECRET_KEY)\n\n\t\turl = sign_query_string(settings.SECRET_KEY + user.password,\n\t\t\t\t\turl)\n\n\t\tusers.append(dict(username = user.username, url = url))\n\n\t template = get_template('registration/recover-password.txt')\n\t context = Context(dict(users = users, ApplianceName = server))\n\n\t if len(users) == 1:\n\t\tplural = ''\n\t else:\n\t\tplural = 's'\n\n\t if user:\n\t\tuser.email_user(subject = \"Your %s console account%s\" % (server, plural),\n\t\t\t\tfrom_email = FROM_EMAIL,\n\t\t\t\tmessage = template.render(context))\n\n\t return HttpResponseRedirect('sent')\n\n return render_to_response('registration/forgotten.html',\n\t\t\t dict(username=request.GET.get('username', ''),\n META=request.META, root=settings.ROOT_URL,\n media=settings.MEDIA_URL))", "def change_password_user():\n\n form = ChangePasswordForm(request.form)\n\n if form.validate_on_submit():\n\n if not request.form['old_password'] or request.form['old_password'] == '' :\n flash(\"No null or empty values are allowed.\",\"warn\")\n return render_template('user/change_password_user.html', title='Change Password', form=form)\n\n if not request.form['password'] or request.form['password'] == '' :\n flash(\"No null or empty values are allowed.\",\"warn\")\n return render_template('user/change_password_user.html', title='Change Password', form=form)\n\n if request.form['password'] != request.form['retype_password']:\n flash(\"Passwords are not the same!\",\"warn\")\n return render_template('user/change_password_user.html', title='Change Password', form=form)\n\n\n hashed_password = user_manager.hash_password(request.form['password'])\n\n # Modificamos el password del usuario\n current_user.password = hashed_password\n\n try:\n correct = True\n db.session.commit()\n except Exception as e:\n # Catch anything unknown\n print(e)\n correct = False\n finally:\n if not correct:\n # Cleanup and show error\n db.session.rollback()\n flash('Error modifying password of user, make sure username and email are unique','error')\n return render_template('user/change_password_user.html', title='Change Password', form=form)\n else:\n flash('Congratulations, update your password!','success')\n return redirect(url_for('user_ksat.show_user'))\n\n\n return render_template('user/change_password_user.html', title='Change Password', form=form)", "def verify_reset_password_token(token):\n ser = Serializer(current_app.config['SECRET_KEY'])\n try:\n data = ser.loads(token.encode('utf-8'))\n user_id = data.get('reset_password')\n return User.query.get(user_id)\n except (BadSignature, SignatureExpired):\n return None\n\n return None", "def test_reset_password_bad_token(self, reset_confirm):\r\n\r\n bad_reset_req = self.request_factory.get('/password_reset_confirm/NO-OP/')\r\n password_reset_confirm_wrapper(bad_reset_req, 'NO', 'OP')\r\n confirm_kwargs = reset_confirm.call_args[1]\r\n self.assertEquals(confirm_kwargs['uidb36'], 'NO')\r\n self.assertEquals(confirm_kwargs['token'], 'OP')\r\n self.user = User.objects.get(pk=self.user.pk)\r\n self.assertFalse(self.user.is_active)", "def expire_password_reset_tokens():\n users = get_users_with_password_reset_tokens()\n\n with closing(get_db()) as db:\n if users:\n for user in users:\n if user['password_reset_token']:\n execute_statement(\n f\"UPDATE user SET password_reset_token = '' WHERE id = {user['id']}\"\n )\n\n # create audit trail in db\n create_audit_log('127.0.0.1', 'Server', '127.0.0.1', action='Successfully expired password token for user {}'.format(\n user['email']), table='user', function='UPDATE')\n else:\n # create audit trail in db\n create_audit_log('127.0.0.1', 'Server', '127.0.0.1',\n action='No password tokens to expire today', table='user', function='')", "def test_wrong_token(self):\n token = str((jwt.encode(\n {\"email\": \"bagendadeogracious@gmail.com\"},\n settings.SECRET_KEY)).decode('utf-8')\n )\n self.client.post(self.registration_url, valid_user, format='json')\n response = self.client.patch(\n self.change_password_url+\"?token=\"+token+\"wrong\",\n {\"password\": \"bagenda1234\"},\n format='json'\n )\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(response.data['error'],\n \"verification link is invalid.\")", "def test_api_user_reset_password_post(self):\n pass", "def forgot_password_api():\n\n # get the data for this query\n data = request.get_json()\n if not data:\n response = jsonify({\n 'success': False,\n 'message': 'Missing request body'\n })\n response.status_code = 422\n return response\n\n user_email = data.get('email').strip().lower()\n\n # look for a user with this email\n user = db.session.query(User).filter(User.email == user_email).one_or_none()\n if not user:\n response = jsonify({\n 'success': False,\n 'message': 'No user with this email. Contact your system admin to create a user.'\n })\n response.status_code = 200\n return response\n\n # send this user a password reset email\n send_password_reset(user)\n response = jsonify({\n 'success': True\n })\n response.status_code = 200\n return response", "def recover_password_token(pw_token: bytes):\n current_time = int(time.time())\n for ut in range(current_time - 3700, current_time):\n if generate_password_reset_token(ut) == pw_token:\n return ut", "def password_reset_confirm(request, uidb64=None, token=None,\n template_name='registration/password_reset_confirm.html',\n token_generator=default_token_generator,\n set_password_form=SetPasswordForm,\n post_reset_redirect=None,\n current_app=None, extra_context=None):\n UserModel = get_user_model()\n assert uidb64 is not None and token is not None # checked by URLconf\n if post_reset_redirect is None:\n post_reset_redirect = reverse('session:password_reset_complete')\n else:\n post_reset_redirect = resolve_url(post_reset_redirect)\n try:\n # urlsafe_base64_decode() decodes to bytestring on Python 3\n uid = force_text(urlsafe_base64_decode(uidb64))\n user = UserModel._default_manager.get(pk=uid)\n except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist):\n user = None\n\n if user is not None and token_generator.check_token(user, token):\n validlink = True\n title = _('Enter new password')\n if request.method == 'POST':\n form = set_password_form(user, request.POST)\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(post_reset_redirect)\n else:\n form = set_password_form(user)\n else:\n validlink = False\n form = None\n title = _('Password reset unsuccessful')\n context = {\n 'form': form,\n 'title': title,\n 'validlink': validlink,\n }\n if extra_context is not None:\n context.update(extra_context)\n\n if current_app is not None:\n request.current_app = current_app\n\n return TemplateResponse(request, template_name, context)", "def reset_password(token, new_password):\n app = current_app._get_current_object()\n serializer = Serializer(app.config[\"SECRET_KEY\"])\n try:\n data = serializer.loads(token.encode(\"utf-8\"))\n except:\n return False\n user = User.query.get(data.get(\"reset\"))\n if user is None:\n return False\n user.password = new_password\n db.session.add(user)\n return True", "def reset_request():\n if current_user.is_authenticated:\n return redirect('/home')\n form = RequestResetForm()\n if form.validate_on_submit():\n staff = Staff.query.filter_by(email=form.email.data).first()\n send_reset_email(staff)\n flash('An email has been sent with instructions to reset your password.', 'info')\n return redirect(url_for('login'))\n return render_template('reset_request.html', title='Reset Password',\n form=form)", "def password_req(request):\n next = request.POST.get('next',\n\t\t\t request.META.get('HTTP_REFERER',\n\t\t\t\t\t DEFAULT_REDIRECT))\n args = default_context(request, username=request.user.username, next=next)\n\n try:\n\tpassword = request.POST['password']\n\n\tpw1 = request.POST['pw1']\n\tpw2 = request.POST['pw2']\n except KeyError:\n\tpass\n else:\n\tif pw1 != pw2:\n\t args['mismatch'] = True\n\telif not request.user.check_password(password):\n\t args['error'] = True\n\telse:\n\t request.user.set_password(pw1)\n\t request.user.save()\n\t return HttpResponseRedirect(next)\n\n return render_to_response('registration/password.html', args)", "def get(self, request, **kwargs):\n token = kwargs.get('token')\n try:\n user_pk = loads(token, max_age=self.timeout_seconds)\n\n # Expired\n except SignatureExpired:\n return HttpResponseBadRequest()\n\n # token is wrong\n except BadSignature:\n return HttpResponseBadRequest()\n\n # token is no problem\n else:\n try:\n user = User.objects.get(pk=user_pk)\n except User.DoesNotExist:\n return HttpResponseBadRequest()\n else:\n if not user.is_active:\n # まだ仮登録で、他に問題なければ本登録とする\n user.is_active = True\n user.save()\n return super().get(request, **kwargs)\n\n return HttpResponseBadRequest()", "def post(self):\n try:\n identity = get_jwt_identity()\n body = request.get_json()\n if identity:\n user = User.objects.get(id=identity['user_id'])\n user.modify(password=body.get('password'))\n user.hash_password()\n user.save()\n res = make_response({\n \"response\": \"You have changed your password successfully.\",\n 'status': 200\n }, 200)\n return res\n except SchemaValidationError:\n raise SchemaValidationError\n except ExpiredSignatureError:\n raise ExpiredTokenError\n except (DecodeError, InvalidTokenError):\n raise BadTokenError\n except Exception as e:\n raise InternalServerError", "def pass_change(request):\n if request.method == \"POST\":\n form = PasswordChangeForm(request.user, request.POST)\n if form.is_valid():\n form.save()\n return home(request, \"Password Changed Successfully\")\n \n else:\n form = PasswordChangeForm(instance=request.user)\n \n ctx = _make_context(request, \"pass_form\", form)\n \n return TemplateResponse(request, \"users/index.html\", ctx)", "def change_Password(): \r\n try:\r\n\r\n UserName=request.args.get(\"UserName\")\r\n validate_otp=request.args.get(\"OTP\") \r\n NewPassword=request.args.get(\"NewPassword\")\r\n hashed_Password = hashlib.md5(NewPassword.encode()).hexdigest() \r\n user_details=otp_access(UserName)\r\n otp=user_details[0]['otp']\r\n with open('api.key', 'r') as apikey:\r\n key=apikey.read().replace('\\n', '')\r\n if request.headers.get('API_KEY') == key:\r\n if str(otp)==str(validate_otp):\r\n msg=update_Password(UserName,hashed_Password)\r\n #This function calling makes the user use OTP until Password gets changed after that validity of OTP will be expired.\r\n new_otp=randint(10000,100000)\r\n # This will checks the new generated OTP and old OTP\r\n if str(otp)==str(new_otp):\r\n new_otp=randint(10000,100000)\r\n update_otp(UserName,new_otp)\r\n else:\r\n update_otp(UserName,new_otp)\r\n else:\r\n msg=\"Something went wrong check the OTP or UserName!!!!\"\r\n else:\r\n msg=\"Enter correct API KEY for Authentication.\"\r\n except IndexError:\r\n msg=f\"{UserName} does not exist , kindly enter correct UserName.\"\r\n return msg", "def reset(request):\r\n rdict = request.matchdict\r\n params = request.params\r\n\r\n # This is an initial request to show the activation form.\r\n username = rdict.get('username', None)\r\n activation_key = rdict.get('reset_key', None)\r\n user = ActivationMgr.get_user(username, activation_key)\r\n new_username = None\r\n\r\n if user is None:\r\n # just 404 if we don't have an activation code for this user\r\n raise HTTPNotFound()\r\n\r\n if 'code' in params:\r\n # This is a posted form with the activation, attempt to unlock the\r\n # user's account.\r\n username = params.get('username', None)\r\n activation = params.get('code', None)\r\n password = params.get('new_password', None)\r\n new_username = params.get('new_username', None)\r\n error = None\r\n\r\n if new_username:\r\n new_username = new_username.lower()\r\n\r\n # Check whether username exists or not. During signup request , a\r\n # record of current user is created with username as his email id\r\n # which is already checked for uniqueness. So when new_username is\r\n # equal to username ie the email id then no need to check for\r\n # uniqueness , but if new_username is something else it has to be\r\n # verified\r\n\r\n if username != new_username and \\\r\n UserMgr.get(username=new_username) is not None:\r\n # Set an error message to the template.\r\n error = \"Username already exists.\"\r\n elif not UserMgr.acceptable_password(password):\r\n # Set an error message to the template.\r\n error = \"Come on, pick a real password please.\"\r\n else:\r\n res = ActivationMgr.activate_user(username, activation, password)\r\n if res:\r\n # success so respond nicely\r\n AuthLog.reactivate(username, success=True, code=activation)\r\n\r\n # if there's a new username and it's not the same as our\r\n # current username, update it\r\n if new_username and new_username != username:\r\n try:\r\n user = UserMgr.get(username=username)\r\n user.username = new_username\r\n except IntegrityError:\r\n error = 'There was an issue setting your new username'\r\n else:\r\n AuthLog.reactivate(username, success=False, code=activation)\r\n error = ('There was an issue attempting to activate'\r\n 'this account.')\r\n\r\n if error:\r\n return {\r\n 'message': error,\r\n 'user': user\r\n }\r\n else:\r\n # Log the user in and move along.\r\n headers = remember(request, user.id, max_age=60 * 60 * 24 * 30)\r\n user.last_login = datetime.utcnow()\r\n\r\n # log the successful login\r\n AuthLog.login(user.username, True)\r\n\r\n # we're always going to return a user to their own /recent after a\r\n # login\r\n return HTTPFound(\r\n location=request.route_url(\r\n 'user_bmark_recent',\r\n username=user.username),\r\n headers=headers)\r\n\r\n else:\r\n LOG.error(\"CHECKING\")\r\n LOG.error(username)\r\n\r\n if user is None:\r\n # just 404 if we don't have an activation code for this user\r\n raise HTTPNotFound()\r\n\r\n LOG.error(user.username)\r\n LOG.error(user.email)\r\n return {\r\n 'user': user,\r\n }", "def forgot_password(self, version):\n form=cgi.FieldStorage(\n fp=self.rfile,\n headers=self.headers,\n environ={'REQUEST_METHOD':'POST','CONTENT_TYPE':self.headers['Content-Type'],}\n )\n version=version.split('/')[0]\n host = self.headers['Host']\n\n data={'email':form['email'].value}\n user = UserServices()\n response_data = user.forgot(data,host,version)\n return response_data", "def post(self, request, *args, **kwargs):\n data = request.data\n serializer = self.serializer_class(data=data)\n serializer.is_valid(raise_exception=True)\n try:\n user = get_object_or_404(User, email=data['email'])\n current_site = get_current_site(request)\n token = password_rest_token.make_token(user),\n uidb64 = urlsafe_base64_encode(force_bytes(data['email'])).decode()\n body = json.dumps({\n 'message': 'Please use the url below to rest your password,\\\n This expires after an hour, Thank you.',\n 'domain': current_site.domain + f'/api/reset/{uidb64}/{token[0]}',\n })\n from_email = settings.DEFAULT_FROM_EMAIL\n to_email = data['email']\n subject = 'Confirm Your Article Account Password Reset'\n send_mail(subject, body, from_email, [\n to_email], fail_silently=False)\n response = {\n 'message': 'Please check your email to confirm rest password',\n 'status_code': status.HTTP_200_OK}\n except Exception as e:\n response = {'error': e, 'status_code': status.HTTP_400_BAD_REQUEST}\n return Response(response, content_type='text/json')", "def post(self):\n data = request.get_json()\n user = actions.get_user_by_email(data['email'])\n html = '<p>To reset your password </p>'\n subject = 'Request for changing password, ' + user['username']\n actions.send_email(data['email'], user['username'], user['password'], subject,\n '/reset_password/', html, False)\n pass", "def passwordless():\n if current_app.config['DRIBDAT_NOT_REGISTER'] or \\\n not current_app.config['MAIL_SERVER']:\n flash(\"Passwordless login currently not possible.\", 'warning')\n return redirect(url_for(\"auth.login\", local=1))\n form = EmailForm(request.form)\n if not (form.is_submitted() and form.validate()):\n flash_errors(form)\n return redirect(url_for('auth.forgot'))\n # Continue with user activation\n flash(\n \"If your account exists, you will shortly receive \"\n + \"an activation mail. Check your Spam folder if you do not. \"\n + \"Then click the link in that e-mail to log into this application.\",\n 'success')\n a_user = User.query.filter_by(email=form.email.data).first()\n if a_user:\n # Continue with reset\n user_activation(a_user)\n else:\n current_app.logger.warn('User not found: %s' % form.email.data)\n # Don't let people spy on your address\n return redirect(url_for(\"auth.login\"))", "def test_password_policy_on_password_reset(self):\r\n staff_email, _ = self._setup_user(is_staff=True, password='foofoo')\r\n\r\n success_msg = 'Your Password Reset is Complete'\r\n\r\n # try to reset password, it should fail\r\n user = User.objects.get(email=staff_email)\r\n token = default_token_generator.make_token(user)\r\n uidb36 = int_to_base36(user.id)\r\n\r\n # try to do a password reset with the same password as before\r\n resp = self.client.post('/password_reset_confirm/{0}-{1}/'.format(uidb36, token), {\r\n 'new_password1': 'foo',\r\n 'new_password2': 'foo',\r\n }, follow=True)\r\n\r\n self.assertNotIn(\r\n success_msg,\r\n resp.content\r\n )\r\n\r\n # try to reset password with a long enough password\r\n user = User.objects.get(email=staff_email)\r\n token = default_token_generator.make_token(user)\r\n uidb36 = int_to_base36(user.id)\r\n\r\n # try to do a password reset with the same password as before\r\n resp = self.client.post('/password_reset_confirm/{0}-{1}/'.format(uidb36, token), {\r\n 'new_password1': 'foofoo',\r\n 'new_password2': 'foofoo',\r\n }, follow=True)\r\n\r\n self.assertIn(\r\n success_msg,\r\n resp.content\r\n )", "def password():\n\n if request.method == 'POST':\n print 'Changing password'\n # query for user's hash of password\n pw_hash = datastore.get_user_by_user_id(engine, session['user_id'])['hash']\n\n # check all boxes filled, old password is correct, new and confirmation match\n if not request.form.get('old') or not check_password_hash(pw_hash, request.form.get('old')):\n flash('Incorrect old password!', 'danger')\n return render_template('password.html')\n elif not request.form.get('new') or not request.form.get('confirmation'):\n flash('Must confirm new password!', 'danger')\n return render_template('password.html')\n elif not request.form.get('new') == request.form.get('confirmation'):\n flash('New passwords don\\'t match!', 'danger')\n return render_template('password.html')\n\n # update hash in database\n datastore.update_password_hash(engine, session['user_id'], generate_password_hash(request.form.get('new')))\n\n # redirect to portfolio\n flash('Password changed!', 'info')\n print 'Password changed!'\n return redirect(url_for('index'))\n\n else:\n print 'Loading change password page'\n return render_template('password.html')", "def test_reset_password_good_token(self, reset_confirm):\r\n\r\n good_reset_req = self.request_factory.get('/password_reset_confirm/{0}-{1}/'.format(self.uidb36, self.token))\r\n password_reset_confirm_wrapper(good_reset_req, self.uidb36, self.token)\r\n confirm_kwargs = reset_confirm.call_args[1]\r\n self.assertEquals(confirm_kwargs['uidb36'], self.uidb36)\r\n self.assertEquals(confirm_kwargs['token'], self.token)\r\n self.user = User.objects.get(pk=self.user.pk)\r\n self.assertTrue(self.user.is_active)" ]
[ "0.77497625", "0.7739758", "0.7730872", "0.77258337", "0.76405317", "0.75555956", "0.74249434", "0.7417758", "0.7381122", "0.7303035", "0.7288763", "0.72742236", "0.7218797", "0.713395", "0.7124694", "0.70419747", "0.69283503", "0.6910953", "0.69020367", "0.6885189", "0.6881159", "0.6879662", "0.6868552", "0.68663836", "0.6863451", "0.68610513", "0.6849947", "0.67858964", "0.6755484", "0.67546755", "0.67495203", "0.6721817", "0.6707581", "0.6695456", "0.66656333", "0.66610855", "0.6658449", "0.66402173", "0.658016", "0.65776056", "0.65760136", "0.65478873", "0.65382713", "0.6524348", "0.6506304", "0.6500373", "0.64898205", "0.64834714", "0.643027", "0.6428508", "0.64283526", "0.64214057", "0.64075536", "0.6400923", "0.6378915", "0.6373376", "0.6365661", "0.6359789", "0.63568205", "0.63531446", "0.63511664", "0.6349402", "0.6273192", "0.62594426", "0.62567776", "0.62448025", "0.62388027", "0.623432", "0.62264556", "0.6206196", "0.61824006", "0.6170853", "0.61672735", "0.6162163", "0.61562794", "0.61349237", "0.61319715", "0.6127707", "0.6122237", "0.6074485", "0.6074117", "0.60452485", "0.60452133", "0.6030888", "0.6007962", "0.60009754", "0.5999346", "0.59950024", "0.59926355", "0.5975218", "0.59597665", "0.59593976", "0.594843", "0.5936191", "0.59296966", "0.5916617", "0.59151906", "0.58953863", "0.5888733", "0.58703095" ]
0.73993194
8
Everything important about the chip
def __init__(self, channel=None, bpe=None, reservoir=None, electrodes=None, fluid_handling_system=None, material_in_optical_path=None, thickness_in_optical_path=None): #self.material = material # deprecated so the channel class can hold this information self.channel = channel self.bpe = bpe self.electrodes = electrodes self.fluid_handling_system = fluid_handling_system self.material_in_optical_path = material_in_optical_path self.thickness_in_optical_path = thickness_in_optical_path
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n ChipData.ChipData.__init__(self)", "def read_chip_info(self):\n return [self.read_chip_type(), self.read_chip_revision()]", "def support(self):", "def show(self):\n # Disable IRQ to improve speed\n with NoIRQ():\n for chip in range(NB_CHIPS):\n self._select(chip)\n row = 0 if chip in (0, 1) else 1\n col = 0 if chip in (0, 2) else 1\n data = self.get_ht1632_data(row, col)\n green = (is_green(value) for value in data)\n red = (is_red(value) for value in data)\n self._write_data(green, red)", "def plugh():", "def run_all(self):\n self.formatter.section_start('Firmware info')\n self.analyse_firmware_id() # Always do this first!\n # If the chip has not panicked, the preserved\n # block is populated with random values, therefore\n # until the magic_value is implemented, do a try and except\n self.analyse_panic_state()\n self.analyse_slt() # Kind of pointless but why not.\n self.formatter.section_end()", "def presenetCar():", "def produce_features(self, chip):\n raise NotImplementedError(\"produce_features() not implemented\")", "def _init_hardware(self):\n return", "def probe(self):", "def bioinfo():\n\n pass", "def update(self):\n try:\n if not self._sysinfo:\n self._sysinfo = self.smartplug.sys_info\n self._mac = self.smartplug.mac\n self._model = self.smartplug.model\n if self.smartplug.context is None:\n self._alias = self.smartplug.alias\n self._device_id = self._mac\n else:\n self._alias = self._plug_from_context[\"alias\"]\n self._device_id = self.smartplug.context\n\n if self.smartplug.context is None:\n self._state = self.smartplug.state == self.smartplug.SWITCH_STATE_ON\n else:\n self._state = self._plug_from_context[\"state\"] == 1\n\n if self.smartplug.has_emeter:\n emeter_readings = self.smartplug.get_emeter_realtime()\n\n self._emeter_params[ATTR_CURRENT_POWER_W] = \"{:.2f}\".format(\n emeter_readings[\"power\"]\n )\n self._emeter_params[ATTR_TOTAL_ENERGY_KWH] = \"{:.3f}\".format(\n emeter_readings[\"total\"]\n )\n self._emeter_params[ATTR_VOLTAGE] = \"{:.1f}\".format(\n emeter_readings[\"voltage\"]\n )\n self._emeter_params[ATTR_CURRENT_A] = \"{:.2f}\".format(\n emeter_readings[\"current\"]\n )\n\n emeter_statics = self.smartplug.get_emeter_daily()\n try:\n self._emeter_params[ATTR_TODAY_ENERGY_KWH] = \"{:.3f}\".format(\n emeter_statics[int(time.strftime(\"%e\"))]\n )\n except KeyError:\n # Device returned no daily history\n pass\n\n self._available = True\n\n except (SmartDeviceException, OSError) as ex:\n if self._available:\n _LOGGER.warning(\n \"Could not read state for %s: %s\", self.smartplug.host, ex\n )\n self._available = False", "def info(self):", "def info(self):", "def __init__(self):\r\n # Check device ID.\r\n chip_id = self._read_byte(_BME280_REGISTER_CHIPID)\r\n if _BME280_CHIPID != chip_id:\r\n raise RuntimeError('Failed to find BME280! Chip ID 0x%x' % chip_id)\r\n self._write_register_byte(_BME280_REGISTER_SOFTRESET, 0xB6)\r\n time.sleep(0.5)\r\n self._read_coefficients()\r\n self.sea_level_pressure = 1013.25\r\n \"\"\"Pressure in hectoPascals at sea level. Used to calibrate `altitude`.\"\"\"\r\n # turn on humidity oversample 16x\r\n self._write_register_byte(_BME280_REGISTER_CTRL_HUM, 0x03)\r\n self._t_fine = None", "def __init__(self):\n self._device_info = None", "def serial(self):", "def _pending_chips(self):\n assert not (self._deleted and self._new)\n # NOTE: This is ce4 specific code and could be factored out.\n deliver_at = gametime.now()\n\n chips = []\n # If this is a DELETE, send an empty dict.\n if self._deleted:\n chips.append({\n 'action':DELETE,\n 'path':self._chip_path(),\n 'value':{},\n 'time':deliver_at\n })\n # If this is an ADD, add all fields and collections.\n elif self._new:\n chips.append({\n 'action':ADD,\n 'path':self._chip_path(),\n 'value':self.to_struct(),\n 'time':deliver_at\n })\n # If this is a MOD, add only the changed fields and id_field.\n elif len(self._changed_fields) > 0:\n chips.append({\n 'action':MOD,\n 'path':self._chip_path(),\n 'value':self.to_struct(fields=self._changed_fields),\n 'time':deliver_at})\n return chips", "def info(rom):\n rom = ROM(rom, detect=True)", "def manage_info():", "def _get_info_about_sensor(self):\n reg_id = 0xD0\n chip_id, chip_version = self.bus.read_i2c_block_data(self.address,\n reg_id,\n 2)\n return chip_id, chip_version", "def gmcp_setup_data(self):\n yield \"Core.Supports.Debug\", 20\n yield \"Core.Supports.Set\", [ \"MG.char 1\", \"MG.room 1\", \"comm.channel 1\" ]", "def pick_up(self):", "def _read_cardiochip(self):\n cur_leadstatus = 0\n sample_count =0\n while self.connected:\n sample_count+=1\n #check for sync bytes\n readbyte = ord(self.ser.read(1))\n #print readbyte, SYNC_BYTE\n if readbyte != SYNC_BYTE:\n continue\n readbyte = ord(self.ser.read(1))\n if readbyte != SYNC_BYTE:\n continue\n\n #parse length byte\n while True:\n pLength = ord(self.ser.read(1))\n if pLength != SYNC_BYTE:\n break\n if pLength > 169:\n continue\n #print \"L: %i\" % pLength\n\n # collect payload bytes\n payload = self.ser.read(pLength)\n payload = [ord(x) for x in payload] #convert to int from string\n #print \"payload: \" + str(payload).strip('[]')\n # ones complement inverse of 8-bit payload sum\n checksum = sum(payload) & 0xFF\n checksum = ~checksum & 0xFF\n\n # catch and verify checksum byte\n chk = ord(self.ser.read(1))\n #print \"chk: \" + str(checksum)\n if chk != checksum:\n print \"checksum error, %i != %i\" % (chk, checksum)\n continue\n\n output = self._parseData(payload)\n\n lead_status = next(( d for d in output if 'leadoff' in d), None)\n if lead_status is not None:\n if cur_leadstatus != lead_status['leadoff']:\n #we have a change\n if lead_status['leadoff']==200:\n print \"LEAD ON\"\n elif lead_status['leadoff']==0:\n print \"LEAD OFF\"\n cur_leadstatus = lead_status['leadoff']\n\n # store the output data in a queue\n # first, create a tuple with the sample index and dict with the timestamp and ecg\n ecgdict = next(((i,d) for i,d in enumerate(output) if 'ecg_raw' in d), None)\n if ecgdict is not None and sample_count>self.Fs*2:\n #let's just ignore the first 2 seconds of crappy data\n ecgdict[1]['leadoff'] = cur_leadstatus\n #print ecgdict[1]\n self.ecg_buffer.put(ecgdict[1]) # this should save the ecg and timestamp keys\n\n return", "def CL(self):", "def use(self):", "def _get_information(self):\n pass", "def enable(self):", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_i2c'] = True", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_i2c'] = True", "def __init__(self):\n self.Revision = '0'\n self.Serial = None\n try:\n with open('/proc/cpuinfo','r') as f:\n for line in f:\n splitLine = line.split(':')\n if len(splitLine) < 2:\n continue\n key = splitLine[0].strip()\n value = splitLine[1].strip()\n if key == 'Revision':\n self.Revision = value\n if key == 'Serial' and value != len(value) * '0':\n self.Serial = value\n except:\n exception (\"Error reading cpuinfo\")\n self.model = 'Unknown'\n if self.Revision == 'Beta':\n self.model = 'Raspberry Pi Model B (Beta)'\n if self.Revision in ('000d', '000e', '000f', '0002', '0003', '0004', '0005', '0006'):\n self.model = 'Raspberry Pi Model B'\n if self.Revision in ('0007', '0008', '0009'):\n self.model = 'Raspberry Pi Model A'\n if self.Revision in ('0010', '0013', '900032'):\n self.model = 'Raspberry Pi Model B +'\n if self.Revision in ('0011', '0014'):\n self.model = 'Raspberry Pi Compute Module'\n if self.Revision in ('0012', '0015'):\n self.model = 'Raspberry Pi Model A+'\n if self.Revision in ('a01040', 'a01041', 'a21041', 'a22042'):\n self.model = 'Raspberry Pi 2 Model B'\n if self.Revision in ('900092', '900093', '920093'):\n self.model = 'Raspberry Pi Zero'\n if self.Revision in ('9000c1',):\n self.model = 'Raspberry Pi Zero W'\n if self.Revision in ('a02082', 'a22082', 'a32082'):\n self.model = 'Raspberry Pi 3 Model B' \n if self.Revision in ('a020d3'):\n self.model = 'Raspberry Pi 3 Model B+'\n if self.Revision in ('a020a0'):\n self.model = 'Raspberry Pi Compute Module 3'\n if 'Rockchip' in CPU_HARDWARE:\n self.model = 'Tinker Board'\n self.manufacturer = 'Element14/Premier Farnell'\n if self.Revision in ('a01041', '900092', 'a02082', '0012', '0011', '0010', '000e', '0008', '0004', 'a020d3', 'a01040', 'a020a0'):\n self.manufacturer = 'Sony, UK'\n if self.Revision in ('a32082'):\n self.manufacturer = 'Sony, Japan'\n if self.Revision in ('0014', '0015', 'a21041', 'a22082', '920093'):\n self.manufacturer = 'Embest, China'\n if self.Revision in ('0005', '0009', '000f'):\n self.manufacturer = 'Qisda'\n if self.Revision in ('0006', '0007', '000d'):\n self.manufacturer = 'Egoman'\n if self.Revision == '0000':\n if 'Rockchip' in CPU_HARDWARE:\n self.manufacturer = 'ASUS'\n else:\n try:\n with open('/proc/device-tree/model', 'r') as model_file:\n for line in model_file:\n if 'BeagleBone' in line:\n index = line.index('BeagleBone')\n self.manufacturer = line[:index - 1].strip(' \\n\\t\\0')\n self.model = line[index:].strip(' \\n\\t\\0')\n break\n except:\n exception (\"Error reading model\")", "def addExtraDevices(self):\n \n # These tables were extracted from\n # pirates/src/piratesgui/GameOptions.py.\n \n ati_device_list = [ \n [\"ATI MOBILITY/RADEON X700\", 0x5653],\n [1, \"Radeon X1950 XTX Uber - Limited Edition\", 0x7248],\n [1, \"Radeon X1950 XTX Uber - Limited Edition Secondary\", 0x7268],\n [1, \"Radeon X800 CrossFire Edition\", 0x554D],\n [1, \"Radeon X800 CrossFire Edition Secondary\", 0x556D],\n [1, \"Radeon X850 CrossFire Edition\", 0x5D52],\n [1, \"Radeon X850 CrossFire Edition Secondary\", 0x5D72],\n [\"Radeon X550/X700 Series\", 0x564F],\n [\"ATI FireGL T2\", 0x4154],\n [\"ATI FireGL T2 Secondary\", 0x4174],\n [\"ATI FireGL V3100\", 0x5B64],\n [\"ATI FireGL V3100 Secondary\", 0x5B74],\n [\"ATI FireGL V3200\", 0x3E54],\n [\"ATI FireGL V3200 Secondary\", 0x3E74],\n [\"ATI FireGL V3300\", 0x7152],\n [\"ATI FireGL V3300 Secondary\", 0x7172],\n [\"ATI FireGL V3350\", 0x7153],\n [\"ATI FireGL V3350 Secondary\", 0x7173],\n [\"ATI FireGL V3400\", 0x71D2],\n [\"ATI FireGL V3400 Secondary\", 0x71F2],\n [\"ATI FireGL V5000\", 0x5E48],\n [\"ATI FireGL V5000 Secondary\", 0x5E68],\n [\"ATI FireGL V5100\", 0x5551],\n [\"ATI FireGL V5100 Secondary\", 0x5571],\n [\"ATI FireGL V5200\", 0x71DA],\n [\"ATI FireGL V5200 Secondary\", 0x71FA],\n [\"ATI FireGL V5300\", 0x7105],\n [\"ATI FireGL V5300 Secondary\", 0x7125],\n [\"ATI FireGL V7100\", 0x5550],\n [\"ATI FireGL V7100 Secondary\", 0x5570],\n [\"ATI FireGL V7200\", 0x5D50],\n [\"ATI FireGL V7200 \", 0x7104],\n [\"ATI FireGL V7200 Secondary\", 0x5D70],\n [\"ATI FireGL V7200 Secondary \", 0x7124],\n [\"ATI FireGL V7300\", 0x710E],\n [\"ATI FireGL V7300 Secondary\", 0x712E],\n [\"ATI FireGL V7350\", 0x710F],\n [\"ATI FireGL V7350 Secondary\", 0x712F],\n [\"ATI FireGL X1\", 0x4E47],\n [\"ATI FireGL X1 Secondary\", 0x4E67],\n [\"ATI FireGL X2-256/X2-256t\", 0x4E4B],\n [\"ATI FireGL X2-256/X2-256t Secondary\", 0x4E6B],\n [\"ATI FireGL X3-256\", 0x4A4D],\n [\"ATI FireGL X3-256 Secondary\", 0x4A6D],\n [\"ATI FireGL Z1\", 0x4147],\n [\"ATI FireGL Z1 Secondary\", 0x4167],\n [\"ATI FireMV 2200\", 0x5B65],\n [\"ATI FireMV 2200 Secondary\", 0x5B75],\n [\"ATI FireMV 2250\", 0x719B],\n [\"ATI FireMV 2250 Secondary\", 0x71BB],\n [\"ATI FireMV 2400\", 0x3151],\n [\"ATI FireMV 2400 Secondary\", 0x3171],\n [\"ATI FireStream 2U\", 0x724E],\n [\"ATI FireStream 2U Secondary\", 0x726E],\n [\"ATI MOBILITY FIRE GL 7800\", 0x4C58],\n [\"ATI MOBILITY FIRE GL T2/T2e\", 0x4E54],\n [\"ATI MOBILITY FireGL V3100\", 0x5464],\n [\"ATI MOBILITY FireGL V3200\", 0x3154],\n [\"ATI MOBILITY FireGL V5000\", 0x564A],\n [\"ATI MOBILITY FireGL V5000 \", 0x564B],\n [\"ATI MOBILITY FireGL V5100\", 0x5D49],\n [\"ATI MOBILITY FireGL V5200\", 0x71C4],\n [\"ATI MOBILITY FireGL V5250\", 0x71D4],\n [\"ATI MOBILITY FireGL V7100\", 0x7106],\n [\"ATI MOBILITY FireGL V7200\", 0x7103],\n [\"ATI MOBILITY RADEON\", 0x4C59],\n [\"ATI MOBILITY RADEON 7500\", 0x4C57],\n [\"ATI MOBILITY RADEON 9500\", 0x4E52],\n [\"ATI MOBILITY RADEON 9550\", 0x4E56],\n [\"ATI MOBILITY RADEON 9600/9700 Series\", 0x4E50],\n [\"ATI MOBILITY RADEON 9800\", 0x4A4E],\n [\"ATI Mobility Radeon HD 2300\", 0x7210],\n [\"ATI Mobility Radeon HD 2300 \", 0x7211],\n [\"ATI Mobility Radeon HD 2400\", 0x94C9],\n [\"ATI Mobility Radeon HD 2400 XT\", 0x94C8],\n [1, \"ATI Mobility Radeon HD 2600\", 0x9581],\n [1, \"ATI Mobility Radeon HD 2600 XT\", 0x9583],\n [\"ATI Mobility Radeon X1300\", 0x714A],\n [\"ATI Mobility Radeon X1300 \", 0x7149],\n [\"ATI Mobility Radeon X1300 \", 0x714B],\n [\"ATI Mobility Radeon X1300 \", 0x714C],\n [\"ATI Mobility Radeon X1350\", 0x718B],\n [\"ATI Mobility Radeon X1350 \", 0x718C],\n [\"ATI Mobility Radeon X1350 \", 0x7196],\n [\"ATI Mobility Radeon X1400\", 0x7145],\n [\"ATI Mobility Radeon X1450\", 0x7186],\n [\"ATI Mobility Radeon X1450 \", 0x718D],\n [\"ATI Mobility Radeon X1600\", 0x71C5],\n [\"ATI Mobility Radeon X1700\", 0x71D5],\n [\"ATI Mobility Radeon X1700 \", 0x71DE],\n [\"ATI Mobility Radeon X1700 XT\", 0x71D6],\n [1, \"ATI Mobility Radeon X1800\", 0x7102],\n [1, \"ATI Mobility Radeon X1800 XT\", 0x7101],\n [1, \"ATI Mobility Radeon X1900\", 0x7284],\n [1, \"ATI Mobility Radeon X2300\", 0x718A],\n [1, \"ATI Mobility Radeon X2300 \", 0x7188],\n [\"ATI MOBILITY RADEON X300\", 0x5461],\n [\"ATI MOBILITY RADEON X300 \", 0x5460],\n [\"ATI MOBILITY RADEON X300 \", 0x3152],\n [\"ATI MOBILITY RADEON X600\", 0x3150],\n [\"ATI MOBILITY RADEON X600 SE\", 0x5462],\n [\"ATI MOBILITY RADEON X700\", 0x5652],\n [\"ATI MOBILITY RADEON X700 \", 0x5653],\n [\"ATI MOBILITY RADEON X700 Secondary\", 0x5673],\n [1, \"ATI MOBILITY RADEON X800\", 0x5D4A],\n [1, \"ATI MOBILITY RADEON X800 XT\", 0x5D48],\n [\"ATI Radeon 9550/X1050 Series\", 0x4153],\n [\"ATI Radeon 9550/X1050 Series Secondary\", 0x4173],\n [\"ATI RADEON 9600 Series\", 0x4150],\n [\"ATI RADEON 9600 Series \", 0x4E51],\n [\"ATI RADEON 9600 Series \", 0x4151],\n [\"ATI RADEON 9600 Series \", 0x4155],\n [\"ATI RADEON 9600 Series \", 0x4152],\n [\"ATI RADEON 9600 Series Secondary\", 0x4E71],\n [\"ATI RADEON 9600 Series Secondary \", 0x4171],\n [\"ATI RADEON 9600 Series Secondary \", 0x4170],\n [\"ATI RADEON 9600 Series Secondary \", 0x4175],\n [\"ATI RADEON 9600 Series Secondary \", 0x4172],\n [1, \"ATI Radeon HD 2900 XT\", 0x9402],\n [1, \"ATI Radeon HD 2900 XT \", 0x9403],\n [1, \"ATI Radeon HD 2900 XT \", 0x9400],\n [1, \"ATI Radeon HD 2900 XT \", 0x9401],\n [\"ATI Radeon X1200 Series\", 0x791E],\n [\"ATI Radeon X1200 Series \", 0x791F],\n [1, \"ATI Radeon X1950 GT\", 0x7288],\n [1, \"ATI Radeon X1950 GT Secondary\", 0x72A8],\n [1, \"ATI RADEON X800 GT\", 0x554E],\n [1, \"ATI RADEON X800 GT Secondary\", 0x556E],\n [1, \"ATI RADEON X800 XL\", 0x554D],\n [1, \"ATI RADEON X800 XL Secondary\", 0x556D],\n [1, \"ATI RADEON X850 PRO\", 0x4B4B],\n [1, \"ATI RADEON X850 PRO Secondary\", 0x4B6B],\n [1, \"ATI RADEON X850 SE\", 0x4B4A],\n [1, \"ATI RADEON X850 SE Secondary\", 0x4B6A],\n [1, \"ATI RADEON X850 XT\", 0x4B49],\n [1, \"ATI RADEON X850 XT Platinum Edition\", 0x4B4C],\n [1, \"ATI RADEON X850 XT Platinum Edition Secondary\", 0x4B6C],\n [1, \"ATI RADEON X850 XT Secondary\", 0x4B69],\n [\"ATI Radeon Xpress 1200 Series\", 0x793F],\n [\"ATI Radeon Xpress 1200 Series \", 0x7941],\n [\"ATI Radeon Xpress 1200 Series \", 0x7942],\n [\"ATI Radeon Xpress Series\", 0x5A61],\n [\"ATI Radeon Xpress Series \", 0x5A63],\n [\"ATI Radeon Xpress Series \", 0x5A62],\n [\"ATI Radeon Xpress Series \", 0x5A41],\n [\"ATI Radeon Xpress Series \", 0x5A43],\n [\"ATI Radeon Xpress Series \", 0x5A42],\n [\"ATI Radeon Xpress Series \", 0x5954],\n [\"ATI Radeon Xpress Series \", 0x5854],\n [\"ATI Radeon Xpress Series \", 0x5955],\n [\"ATI Radeon Xpress Series \", 0x5974],\n [\"ATI Radeon Xpress Series \", 0x5874],\n [\"ATI Radeon Xpress Series \", 0x5975],\n [\"Radeon 9500\", 0x4144],\n [\"Radeon 9500 \", 0x4149],\n [\"Radeon 9500 PRO / 9700\", 0x4E45],\n [\"Radeon 9500 PRO / 9700 Secondary\", 0x4E65],\n [\"Radeon 9500 Secondary\", 0x4164],\n [\"Radeon 9500 Secondary \", 0x4169],\n [\"Radeon 9600 TX\", 0x4E46],\n [\"Radeon 9600 TX Secondary\", 0x4E66],\n [\"Radeon 9600TX\", 0x4146],\n [\"Radeon 9600TX Secondary\", 0x4166],\n [\"Radeon 9700 PRO\", 0x4E44],\n [\"Radeon 9700 PRO Secondary\", 0x4E64],\n [\"Radeon 9800\", 0x4E49],\n [\"Radeon 9800 PRO\", 0x4E48],\n [\"Radeon 9800 PRO Secondary\", 0x4E68],\n [\"Radeon 9800 SE\", 0x4148],\n [\"Radeon 9800 SE Secondary\", 0x4168],\n [\"Radeon 9800 Secondary\", 0x4E69],\n [\"Radeon 9800 XT\", 0x4E4A],\n [\"Radeon 9800 XT Secondary\", 0x4E6A],\n [\"Radeon X1300 / X1550 Series\", 0x7146],\n [\"Radeon X1300 / X1550 Series Secondary\", 0x7166],\n [\"Radeon X1300 Series\", 0x714E],\n [\"Radeon X1300 Series \", 0x715E],\n [\"Radeon X1300 Series \", 0x714D],\n [\"Radeon X1300 Series \", 0x71C3],\n [\"Radeon X1300 Series \", 0x718F],\n [\"Radeon X1300 Series Secondary\", 0x716E],\n [\"Radeon X1300 Series Secondary \", 0x717E],\n [\"Radeon X1300 Series Secondary \", 0x716D],\n [\"Radeon X1300 Series Secondary \", 0x71E3],\n [\"Radeon X1300 Series Secondary \", 0x71AF],\n [\"Radeon X1300/X1550 Series\", 0x7142],\n [\"Radeon X1300/X1550 Series \", 0x7180],\n [\"Radeon X1300/X1550 Series \", 0x7183],\n [\"Radeon X1300/X1550 Series \", 0x7187],\n [\"Radeon X1300/X1550 Series Secondary\", 0x7162],\n [\"Radeon X1300/X1550 Series Secondary \", 0x71A0],\n [\"Radeon X1300/X1550 Series Secondary \", 0x71A3],\n [\"Radeon X1300/X1550 Series Secondary \", 0x71A7],\n [\"Radeon X1550 64-bit\", 0x7147],\n [\"Radeon X1550 64-bit \", 0x715F],\n [\"Radeon X1550 64-bit \", 0x719F],\n [\"Radeon X1550 64-bit Secondary\", 0x7167],\n [\"Radeon X1550 64-bit Secondary \", 0x717F],\n [\"Radeon X1550 Series\", 0x7143],\n [\"Radeon X1550 Series \", 0x7193],\n [\"Radeon X1550 Series Secondary\", 0x7163],\n [\"Radeon X1550 Series Secondary \", 0x71B3],\n [\"Radeon X1600 Pro / Radeon X1300 XT\", 0x71CE],\n [\"Radeon X1600 Pro / Radeon X1300 XT Secondary\", 0x71EE],\n [\"Radeon X1600 Series\", 0x7140],\n [\"Radeon X1600 Series \", 0x71C0],\n [\"Radeon X1600 Series \", 0x71C2],\n [\"Radeon X1600 Series \", 0x71C6],\n [\"Radeon X1600 Series \", 0x7181],\n [\"Radeon X1600 Series \", 0x71CD],\n [\"Radeon X1600 Series Secondary\", 0x7160],\n [\"Radeon X1600 Series Secondary \", 0x71E2],\n [\"Radeon X1600 Series Secondary \", 0x71E6],\n [\"Radeon X1600 Series Secondary \", 0x71A1],\n [\"Radeon X1600 Series Secondary \", 0x71ED],\n [\"Radeon X1600 Series Secondary \", 0x71E0],\n [\"Radeon X1650 Series\", 0x71C1],\n [\"Radeon X1650 Series \", 0x7293],\n [\"Radeon X1650 Series \", 0x7291],\n [\"Radeon X1650 Series \", 0x71C7],\n [\"Radeon X1650 Series Secondary\", 0x71E1],\n [\"Radeon X1650 Series Secondary \", 0x72B3],\n [\"Radeon X1650 Series Secondary \", 0x72B1],\n [\"Radeon X1650 Series Secondary \", 0x71E7],\n [1, \"Radeon X1800 Series\", 0x7100],\n [1, \"Radeon X1800 Series \", 0x7108],\n [1, \"Radeon X1800 Series \", 0x7109],\n [1, \"Radeon X1800 Series \", 0x710A],\n [1, \"Radeon X1800 Series \", 0x710B],\n [1, \"Radeon X1800 Series \", 0x710C],\n [1, \"Radeon X1800 Series Secondary\", 0x7120],\n [1, \"Radeon X1800 Series Secondary \", 0x7128],\n [1, \"Radeon X1800 Series Secondary \", 0x7129],\n [1, \"Radeon X1800 Series Secondary \", 0x712A],\n [1, \"Radeon X1800 Series Secondary \", 0x712B],\n [1, \"Radeon X1800 Series Secondary \", 0x712C],\n [1, \"Radeon X1900 Series\", 0x7243],\n [1, \"Radeon X1900 Series \", 0x7245],\n [1, \"Radeon X1900 Series \", 0x7246],\n [1, \"Radeon X1900 Series \", 0x7247],\n [1, \"Radeon X1900 Series \", 0x7248],\n [1, \"Radeon X1900 Series \", 0x7249],\n [1, \"Radeon X1900 Series \", 0x724A],\n [1, \"Radeon X1900 Series \", 0x724B],\n [1, \"Radeon X1900 Series \", 0x724C],\n [1, \"Radeon X1900 Series \", 0x724D],\n [1, \"Radeon X1900 Series \", 0x724F],\n [1, \"Radeon X1900 Series Secondary\", 0x7263],\n [1, \"Radeon X1900 Series Secondary \", 0x7265],\n [1, \"Radeon X1900 Series Secondary \", 0x7266],\n [1, \"Radeon X1900 Series Secondary \", 0x7267],\n [1, \"Radeon X1900 Series Secondary \", 0x7268],\n [1, \"Radeon X1900 Series Secondary \", 0x7269],\n [1, \"Radeon X1900 Series Secondary \", 0x726A],\n [1, \"Radeon X1900 Series Secondary \", 0x726B],\n [1, \"Radeon X1900 Series Secondary \", 0x726C],\n [1, \"Radeon X1900 Series Secondary \", 0x726D],\n [1, \"Radeon X1900 Series Secondary \", 0x726F],\n [1, \"Radeon X1950 Series\", 0x7280],\n [1, \"Radeon X1950 Series \", 0x7240],\n [1, \"Radeon X1950 Series \", 0x7244],\n [1, \"Radeon X1950 Series Secondary\", 0x72A0],\n [1, \"Radeon X1950 Series Secondary \", 0x7260],\n [1, \"Radeon X1950 Series Secondary \", 0x7264],\n [\"Radeon X300/X550/X1050 Series\", 0x5B60],\n [\"Radeon X300/X550/X1050 Series \", 0x5B63],\n [\"Radeon X300/X550/X1050 Series Secondary\", 0x5B73],\n [\"Radeon X300/X550/X1050 Series Secondary \", 0x5B70],\n [\"Radeon X550/X700 Series \", 0x5657],\n [\"Radeon X550/X700 Series Secondary\", 0x5677],\n [\"Radeon X600 Series\", 0x5B62],\n [\"Radeon X600 Series Secondary\", 0x5B72],\n [\"Radeon X600/X550 Series\", 0x3E50],\n [\"Radeon X600/X550 Series Secondary\", 0x3E70],\n [\"Radeon X700\", 0x5E4D],\n [\"Radeon X700 PRO\", 0x5E4B],\n [\"Radeon X700 PRO Secondary\", 0x5E6B],\n [\"Radeon X700 SE\", 0x5E4C],\n [\"Radeon X700 SE Secondary\", 0x5E6C],\n [\"Radeon X700 Secondary\", 0x5E6D],\n [\"Radeon X700 XT\", 0x5E4A],\n [\"Radeon X700 XT Secondary\", 0x5E6A],\n [\"Radeon X700/X550 Series\", 0x5E4F],\n [\"Radeon X700/X550 Series Secondary\", 0x5E6F],\n [1, \"Radeon X800 GT\", 0x554B],\n [1, \"Radeon X800 GT Secondary\", 0x556B],\n [1, \"Radeon X800 GTO\", 0x5549],\n [1, \"Radeon X800 GTO \", 0x554F],\n [1, \"Radeon X800 GTO \", 0x5D4F],\n [1, \"Radeon X800 GTO Secondary\", 0x5569],\n [1, \"Radeon X800 GTO Secondary \", 0x556F],\n [1, \"Radeon X800 GTO Secondary \", 0x5D6F],\n [1, \"Radeon X800 PRO\", 0x4A49],\n [1, \"Radeon X800 PRO Secondary\", 0x4A69],\n [1, \"Radeon X800 SE\", 0x4A4F],\n [1, \"Radeon X800 SE Secondary\", 0x4A6F],\n [1, \"Radeon X800 Series\", 0x4A48],\n [1, \"Radeon X800 Series \", 0x4A4A],\n [1, \"Radeon X800 Series \", 0x4A4C],\n [1, \"Radeon X800 Series \", 0x5548],\n [1, \"Radeon X800 Series Secondary\", 0x4A68],\n [1, \"Radeon X800 Series Secondary \", 0x4A6A],\n [1, \"Radeon X800 Series Secondary \", 0x4A6C],\n [1, \"Radeon X800 Series Secondary \", 0x5568],\n [1, \"Radeon X800 VE\", 0x4A54],\n [1, \"Radeon X800 VE Secondary\", 0x4A74],\n [1, \"Radeon X800 XT\", 0x4A4B],\n [1, \"Radeon X800 XT \", 0x5D57],\n [1, \"Radeon X800 XT Platinum Edition\", 0x4A50],\n [1, \"Radeon X800 XT Platinum Edition \", 0x554A],\n [1, \"Radeon X800 XT Platinum Edition Secondary\", 0x4A70],\n [1, \"Radeon X800 XT Platinum Edition Secondary \", 0x556A],\n [1, \"Radeon X800 XT Secondary\", 0x4A6B],\n [1, \"Radeon X800 XT Secondary \", 0x5D77],\n [1, \"Radeon X850 XT\", 0x5D52],\n [1, \"Radeon X850 XT Platinum Edition\", 0x5D4D],\n [1, \"Radeon X850 XT Platinum Edition Secondary\", 0x5D6D],\n [1, \"Radeon X850 XT Secondary\", 0x5D72],\n ]\n vendorId = 0x1002\n for entry in ati_device_list:\n if len(entry) == 3:\n flag, deviceName, deviceId = entry\n else:\n deviceName, deviceId = entry\n self.devices[(vendorId, deviceId)] = deviceName.strip()\n \n nvidia_device_list = [\n [0x014F, \"GeForce 6200\"],\n [0x00F3, \"GeForce 6200\"],\n [0x0221, \"GeForce 6200\"],\n [0x0163, \"GeForce 6200 LE\"],\n [0x0162, \"GeForce 6200SE TurboCache(TM)\"],\n [0x0161, \"GeForce 6200 TurboCache(TM)\"],\n [0x0162, \"GeForce 6200SE TurboCache(TM)\"],\n [0x0160, \"GeForce 6500\"],\n [1, 0x0141, \"GeForce 6600\"],\n [1, 0x00F2, \"GeForce 6600\"],\n [1, 0x0140, \"GeForce 6600 GT\"],\n [1, 0x00F1, \"GeForce 6600 GT\"],\n [1, 0x0142, \"GeForce 6600 LE\"],\n [1, 0x00F4, \"GeForce 6600 LE\"],\n [1, 0x0143, \"GeForce 6600 VE\"],\n [1, 0x0147, \"GeForce 6700 XL\"],\n [1, 0x0041, \"GeForce 6800\"],\n [1, 0x00C1, \"GeForce 6800\"],\n [1, 0x0047, \"GeForce 6800 GS\"],\n [1, 0x00F6, \"GeForce 6800 GS\"],\n [1, 0x00C0, \"GeForce 6800 GS\"],\n [1, 0x0045, \"GeForce 6800 GT\"],\n [1, 0x00F9, \"GeForce 6800 Series GPU\"],\n [1, 0x00C2, \"GeForce 6800 LE\"],\n [1, 0x0040, \"GeForce 6800 Ultra\"],\n [1, 0x00F9, \"GeForce 6800 Series GPU\"],\n [1, 0x0043, \"GeForce 6800 XE\"],\n [1, 0x0048, \"GeForce 6800 XT\"],\n [1, 0x0218, \"GeForce 6800 XT\"],\n [1, 0x00C3, \"GeForce 6800 XT\"],\n [0x01DF, \"GeForce 7300 GS\"],\n [0x0393, \"GeForce 7300 GT\"],\n [0x01D1, \"GeForce 7300 LE\"],\n [0x01D3, \"GeForce 7300 SE\"],\n [0x01DD, \"GeForce 7500 LE\"],\n [1, 0x0392, \"GeForce 7600 GS\"],\n [1, 0x0392, \"GeForce 7600 GS\"],\n [1, 0x02E1, \"GeForce 7600 GS\"],\n [1, 0x0391, \"GeForce 7600 GT\"],\n [1, 0x0394, \"GeForce 7600 LE\"],\n [1, 0x00F5, \"GeForce 7800 GS\"],\n [1, 0x0092, \"GeForce 7800 GT\"],\n [1, 0x0091, \"GeForce 7800 GTX\"],\n [1, 0x0291, \"GeForce 7900 GT/GTO\"],\n [1, 0x0290, \"GeForce 7900 GTX\"],\n [1, 0x0293, \"GeForce 7900 GX2\"],\n [1, 0x0294, \"GeForce 7950 GX2\"],\n [0x0322, \"GeForce FX 5200\"],\n [0x0321, \"GeForce FX 5200 Ultra\"],\n [0x0323, \"GeForce FX 5200LE\"],\n [0x0326, \"GeForce FX 5500\"],\n [0x0326, \"GeForce FX 5500\"],\n [0x0312, \"GeForce FX 5600\"],\n [0x0311, \"GeForce FX 5600 Ultra\"],\n [0x0314, \"GeForce FX 5600XT\"],\n [0x0342, \"GeForce FX 5700\"],\n [0x0341, \"GeForce FX 5700 Ultra\"],\n [0x0343, \"GeForce FX 5700LE\"],\n [0x0344, \"GeForce FX 5700VE\"],\n [0x0302, \"GeForce FX 5800\"],\n [0x0301, \"GeForce FX 5800 Ultra\"],\n [0x0331, \"GeForce FX 5900\"],\n [0x0330, \"GeForce FX 5900 Ultra\"],\n [0x0333, \"GeForce FX 5950 Ultra\"],\n [0x0324, \"GeForce FX Go5200 64M\"],\n [0x031A, \"GeForce FX Go5600\"],\n [0x0347, \"GeForce FX Go5700\"],\n [0x0167, \"GeForce Go 6200/6400\"],\n [0x0168, \"GeForce Go 6200/6400\"],\n [1, 0x0148, \"GeForce Go 6600\"],\n [1, 0x00c8, \"GeForce Go 6800\"],\n [1, 0x00c9, \"GeForce Go 6800 Ultra\"],\n [1, 0x0098, \"GeForce Go 7800\"],\n [1, 0x0099, \"GeForce Go 7800 GTX\"],\n [1, 0x0298, \"GeForce Go 7900 GS\"],\n [1, 0x0299, \"GeForce Go 7900 GTX\"],\n [0x0185, \"GeForce MX 4000\"],\n [0x00FA, \"GeForce PCX 5750\"],\n [0x00FB, \"GeForce PCX 5900\"],\n [0x0110, \"GeForce2 MX/MX 400\"],\n [0x0111, \"GeForce2 MX200\"],\n [0x0110, \"GeForce2 MX/MX 400\"],\n [0x0200, \"GeForce3\"],\n [0x0201, \"GeForce3 Ti200\"],\n [0x0202, \"GeForce3 Ti500\"],\n [0x0172, \"GeForce4 MX 420\"],\n [0x0171, \"GeForce4 MX 440\"],\n [0x0181, \"GeForce4 MX 440 with AGP8X\"],\n [0x0173, \"GeForce4 MX 440-SE\"],\n [0x0170, \"GeForce4 MX 460\"],\n [0x0253, \"GeForce4 Ti 4200\"],\n [0x0281, \"GeForce4 Ti 4200 with AGP8X\"],\n [0x0251, \"GeForce4 Ti 4400\"],\n [0x0250, \"GeForce4 Ti 4600\"],\n [0x0280, \"GeForce4 Ti 4800\"],\n [0x0282, \"GeForce4 Ti 4800SE\"],\n [0x0203, \"Quadro DCC\"],\n [0x0309, \"Quadro FX 1000\"],\n [0x034E, \"Quadro FX 1100\"],\n [0x00FE, \"Quadro FX 1300\"],\n [0x00CE, \"Quadro FX 1400\"],\n [0x0308, \"Quadro FX 2000\"],\n [0x0338, \"Quadro FX 3000\"],\n [0x00FD, \"Quadro PCI-E Series\"],\n [1, 0x00F8, \"Quadro FX 3400/4400\"],\n [1, 0x00CD, \"Quadro FX 3450/4000 SDI\"],\n [1, 0x004E, \"Quadro FX 4000\"],\n [1, 0x00CD, \"Quadro FX 3450/4000 SDI\"],\n [1, 0x00F8, \"Quadro FX 3400/4400\"],\n [1, 0x009D, \"Quadro FX 4500\"],\n [1, 0x029F, \"Quadro FX 4500 X2\"],\n [0x032B, \"Quadro FX 500/FX 600\"],\n [0x014E, \"Quadro FX 540\"],\n [0x014C, \"Quadro FX 540 MXM\"],\n [0x032B, \"Quadro FX 500/FX 600\"],\n [0X033F, \"Quadro FX 700\"],\n [0x034C, \"Quadro FX Go1000\"],\n [0x00CC, \"Quadro FX Go1400\"],\n [0x031C, \"Quadro FX Go700\"],\n [0x018A, \"Quadro NVS with AGP8X\"],\n [0x032A, \"Quadro NVS 280 PCI\"],\n [0x00FD, \"Quadro PCI-E Series\"],\n [0x0165, \"Quadro NVS 285\"],\n [0x017A, \"Quadro NVS\"],\n [0x018A, \"Quadro NVS with AGP8X\"],\n [0x0113, \"Quadro2 MXR/EX\"],\n [0x017A, \"Quadro NVS\"],\n [0x018B, \"Quadro4 380 XGL\"],\n [0x0178, \"Quadro4 550 XGL\"],\n [0x0188, \"Quadro4 580 XGL\"],\n [0x025B, \"Quadro4 700 XGL\"],\n [0x0259, \"Quadro4 750 XGL\"],\n [0x0258, \"Quadro4 900 XGL\"],\n [0x0288, \"Quadro4 980 XGL\"],\n [0x028C, \"Quadro4 Go700\"],\n [1, 0x0295, \"NVIDIA GeForce 7950 GT\"],\n [0x03D0, \"NVIDIA GeForce 6100 nForce 430\"],\n [0x03D1, \"NVIDIA GeForce 6100 nForce 405\"],\n [0x03D2, \"NVIDIA GeForce 6100 nForce 400\"],\n [0x0241, \"NVIDIA GeForce 6150 LE\"],\n [0x0242, \"NVIDIA GeForce 6100\"],\n [0x0245, \"NVIDIA Quadro NVS 210S / NVIDIA GeForce 6150LE\"],\n [1, 0x029C, \"NVIDIA Quadro FX 5500\"],\n [1, 0x0191, \"NVIDIA GeForce 8800 GTX\"],\n [1, 0x0193, \"NVIDIA GeForce 8800 GTS\"],\n [1, 0x0400, \"NVIDIA GeForce 8600 GTS\"],\n [1, 0x0402, \"NVIDIA GeForce 8600 GT\"],\n [0x0421, \"NVIDIA GeForce 8500 GT\"],\n [0x0422, \"NVIDIA GeForce 8400 GS\"],\n [0x0423, \"NVIDIA GeForce 8300 GS\"],\n ]\n vendorId = 0x10de\n for entry in nvidia_device_list:\n if len(entry) == 3:\n flag, deviceId, deviceName = entry\n else:\n deviceId, deviceName = entry\n self.devices[(vendorId, deviceId)] = deviceName.strip()", "def _data(self, data):\n# \"\"\"Send data to spi bus of display chip, most DC pin need set to HIGH \"\"\"\n# if self._spi == None: raise \"Do not setting SPI\"\n# GPIO.output( self._spi_dc, 1 )\n# self._spi.writebytes( data )\n raise NotImplementedError", "def __str__(self):\n s = \"Filename : %s\\n\" % self.fname\n s += \"Data size : %d x %d x %d\\n\" % (self._size[::-1])\n s += \"CCD Chip Size : %d x %d\\n\" % self._chipSize[::-1]\n s += \"File date : %s\\n\" % time.asctime(self._filedate)\n s += \"Exposure Time : %f\\n\" % self.Exposure\n s += \"Num ROI : %d\\n\" % self.NumROI\n s += \"Num ROI Exp : %d\\n\" % self.NumROIExperiment\n s += \"Contoller Ver.: %d\\n\" % self.ControllerVersion\n s += \"Logic Output : %d\\n\" % self.LogicOutput\n #self.AppHiCapLowNoise = self._readInt(4)\n s += \"Timing Mode : %d\\n\" % self.TimingMode\n s += \"Det. Temp : %d\\n\" % self.DetTemperature\n s += \"Det. Type : %d\\n\" % self.DetectorType\n s += \"Trigger Diode : %d\\n\" % self.TriggerDiode\n s += \"Delay Time : %d\\n\" % self.DelayTime\n s += \"Shutter Cont. : %d\\n\" % self.ShutterControl\n s += \"Absorb Live : %d\\n\" % self.AbsorbLive\n s += \"Absorb Mode : %d\\n\" % self.AbsorbMode\n s += \"Virtual Chip : %d\\n\" % self.CanDoVirtualChip\n s += \"Thresh. Min L : %d\\n\" % self.ThresholdMinLive\n s += \"Thresh. Min : %d\\n\" % self.ThresholdMin\n s += \"Thresh. Max L : %d\\n\" % self.ThresholdMaxLive\n s += \"Thresh. Max : %d\\n\" % self.ThresholdMax\n s += \"Geometric Op : %d\\n\" % self.GeometricOps\n s += \"ADC Offset : %d\\n\" % self.ADCOffset\n s += \"ADC Rate : %d\\n\" % self.ADCRate\n s += \"ADC Type : %d\\n\" % self.ADCType\n s += \"ADC Resol. : %d\\n\" % self.ADCRes\n s += \"ADC Bit. Adj. : %d\\n\" % self.ADCBitAdj\n s += \"ADC Gain : %d\\n\" % self.Gain\n \n i = 0\n for roi in self.allROI:\n s += \"ROI %-4d : %-5d %-5d %-5d %-5d %-5d %-5d\\n\" % (i,roi[0], roi[1], roi[2],\n roi[3], roi[4], roi[5])\n i += 1\n \n s += \"\\nComments :\\n\"\n i = 0\n for c in self._comments:\n s += \"%-3d : \" % i\n i += 1\n s += c\n s += \"\\n\"\n return s", "def __init__(self, hdw=['Soundcard'], devicename='dev1'):\n self.debugFlag = False\n self.task = None # NI Task\n self.required_hardware = hdw # Require specific hardware \n self.hardware = [] # list of hardware actually found on this system\n self.find_hardware(device_info={'devicename': devicename}) # population the self.hardware list", "def degibber(self):", "def update_firmware(self) -> str:", "def identifyChip(chipType):\n with open('../illumina_files/illumina_dict.pickle', \"rb\") as f:\n chipDict = pickle.load(f)\n\n values = chipDict[chipType]\n\n print('BPM: ' + values[0] + '\\n')\n print('EGT: ' + values[1] + '\\n')\n print('CSV: ' + values[2] + '\\n')\n\n return values[0], values[1], values[2]", "def info() -> None:", "def __init__(self, address=0x76):\n self.address = address\n self.bus = self._initialize_bus()\n\n self.chip_id, self.chip_version = self._get_info_about_sensor()", "async def identify(self):\n await self.send({\n \"op\": 2,\n \"d\" : {\n \"token\" : self.client.token,\n \"properties\": {\n \"$os\" : platform,\n \"$browser\": \"SpeedCord\",\n \"$device\" : \"SpeedCord\"\n },\n \"intents\" : self.client.intents,\n \"shard\" : (self.id, self.client.shard_count)\n }\n })", "def bootloader() -> NoReturn:", "def __set_chips(self):\n\n # Scan filesystem\n root_files = [root_file for root_file in os.walk(self.dataset_path)]\n\n # Decode truth.txt file\n truth_files = [os.path.join(walked[0], 'truth.txt') for walked in root_files if 'truth.txt' in walked[2]]\n if len(truth_files) == 0:\n raise IOError(\"No truth file found.\")\n elif len(truth_files) > 1:\n raise IOError(\"Too many truth files available.\")\n\n truth_data = self.__decode_truth_file(truth_files.pop())\n if len(truth_data) < 1:\n raise IOError(\"No truth loaded\")\n if self.__debug:\n print(\"{} truth records loaded.\".format(len(truth_data)))\n\n # Index all image chips\n file_paths = [[os.path.join(walked[0], wfile) for wfile in walked[2]] for walked in root_files]\n chip_idx = dict(filter(lambda t: t is not None, map(self.__index_chip, itertools.chain(*file_paths))))\n\n if len(chip_idx) != len(truth_data):\n raise IOError(\"Number of truth records not equal to number of chips.\")\n if self.__debug:\n print(\"{} image chips loaded.\".format(len(chip_idx)))\n\n # Create and store chips\n self.chips = {meta['file']: self.__create_chip(meta, truth_data[idx]) for idx, meta in chip_idx.items()}\n if self.__debug:\n print(\"{} chip.Chips loaded.\".format(len(self.chips)))", "def get_info(self):\n return \"TODO !\"", "def getInfo():", "def _default_setup(self):\n self._n_configs = 1\n self._sn_size = 100\n self._nt = 10000\n self._active_brdch = np.zeros(\n (), dtype=[(\"SIS 3302\", bool, (4, 8)), (\"SIS 3305\", bool, (2, 8))]\n )\n self._active_brdch[\"SIS 3302\"][0][0] = True\n self._active_brdch[\"SIS 3305\"][0][0] = True\n self._config_names = []\n self._active_config = (\"config01\",)\n self._sis3305_mode = 0", "def do_Device (self, line):", "def updateInterface(self):\n p = self.cxn[self.selectedADR].packet()\n p.magnetv().pscurrent().psvoltage()\n p.time()\n p.temperatures()\n p.get_state_var('CompressorStatus')\n p.get_instrument_state()\n state = yield p.send()\n # change instrument statuses\n for name,status in state['get_instrument_state']:\n if status[0] == False: color = 'red3'\n elif status[1] == False: color = 'orange3'\n elif status[1] == True: color = 'green3'\n else: color = 'gray70'\n self.instrumentStatuses[name].config(bg=color)\n # change compressor button\n if state['get_state_var'] == True:\n self.compressorButton.configure(text='Stop Compressor',\n command=self.stopCompressor,\n state=Tkinter.NORMAL)\n elif state['get_state_var'] == False:\n self.compressorButton.configure(text='Start Compressor',\n command=self.startCompressor,\n state=Tkinter.NORMAL)\n else: self.compressorButton.configure(state=Tkinter.DISABLED)\n # update current, voltage fields\n temps = {}\n stages = ('T_60K','T_3K','T_GGG','T_FAA')\n for i in range(len(stages)):\n temps[stages[i]] = state['temperatures'][i]\n #if temps[stages[i]] == 'nan': temps[stages[i]] = numpy.nan\n if numpy.isnan(state['magnetv']['V']):\n emf = 'ERR'\n else:\n emf = \"{0:.3f}\".format(state['magnetv']['V'])\n if numpy.isnan(state['pscurrent']['A']):\n psI = 'PS OFF'\n else:\n psI = \"{0:.3f}\".format(state['pscurrent']['A'])\n if numpy.isnan(state['psvoltage']['V']):\n psV = 'PS OFF'\n else:\n psV = \"{0:.3f}\".format(state['psvoltage']['V'])\n self.currentBackEMF.set( emf )\n self.currentI.set( psI )\n self.currentV.set( psV )\n # update plot:\n # change data to plot\n self.stage60K.set_xdata(numpy.append(self.stage60K.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stage60K.set_ydata(numpy.append(self.stage60K.get_ydata(),temps['T_60K']['K']))\n self.stage03K.set_xdata(numpy.append(self.stage03K.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stage03K.set_ydata(numpy.append(self.stage03K.get_ydata(),temps['T_3K']['K']))\n self.stageGGG.set_xdata(numpy.append(self.stageGGG.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stageGGG.set_ydata(numpy.append(self.stageGGG.get_ydata(),temps['T_GGG']['K']))\n self.stageFAA.set_xdata(numpy.append(self.stageFAA.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stageFAA.set_ydata(numpy.append(self.stageFAA.get_ydata(),temps['T_FAA']['K']))\n #update plot\n self.updatePlot()\n # update legend\n labelOrder = ['T_60K','T_3K','T_GGG','T_FAA']\n lines = [self.stage60K,self.stage03K,self.stageGGG,self.stageFAA]\n labels = [l.strip('T_')+' ['+\"{0:.3f}\".format(temps[l]['K'])+'K]' for l in labelOrder]\n labels = [s.replace('1.#QOK','OoR') for s in labels]\n # legend on top (if not using this, delete \\n in title)\n self.ax.legend(lines,labels,bbox_to_anchor=(0., 1.02, 1., .102), loc=3,\n ncol=4, mode=\"expand\", borderaxespad=0.)", "def _initialize_data(self):\n self.reset_count = 0\n self._idn_no_firmware = \"KEPCO,BOP 50-20,E1234,\"\n self._firmware = 2.6\n self._init_data()", "def on(self):", "def cx():", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_hardware_sound_systems'] = True", "def __init__(self, sensor, temperature_resolution, humidity_resolution):\n self.sensor = sensor\n self.sensor.turnHeaterOn() \n time.sleep(1.0) # Burn off condensed stuff.\n self.sensor.turnHeaterOff() \n self.update()\n # Main Program\n #print \"------------\"\n #print \"Manfacturer ID=0x%X\"% self.sensor.readManufacturerID() \n #print \"Device ID=0x%X\"% self.sensor.readDeviceID() \n #print \"Serial Number ID=0x%X\"% self.sensor.readSerialNumber() \n \n # change temperature resolution\n self.sensor.setTemperatureResolution(temperature_resolution)\n self.sensor.setHumidityResolution(humidity_resolution)", "def setup(self):", "def setup(self):", "def setup(self):", "def setup(self):", "def init_IR_codes():\n IR_codes.update( {b'FF629D' : say_temp} ) # Say temperature status\n IR_codes.update( {b'84FF9375' : say_temp} ) # Say temperature status\n #IR_codes.update( {b'FFA857' : volume_inc} ) # increase volume\n #IR_codes.update( {b'FFE01F' : volume_dec} ) # reduce volume\n IR_codes.update( {b'FF906F' : toSecureMode} ) # Will be noBodyHome\n IR_codes.update( {b'FFC23D' : ultra.switch} ) # On/off radio\n IR_codes.update( {b'BF09C35C' : ultra.switch} ) # On/off radio (big)\n #IR_codes.update( {b'8BE68656' : holeNightLightAuto} )\n #IR_codes.update( {b'B21F28AE' : hole_night_light.setManualStateOff} )\n #IR_codes.update( {b'A6B1096A' : hole_night_light.setManualStateOn} )\n IR_codes.update( {b'24014B0' : noolite_hole_set_off} )\n IR_codes.update( {b'8FC212DB' : noolite_hole_set_on} )\n IR_codes.update( {b'7960556F' : noolite_hole_set_auto} )\n #IR_codes.update( {b'FF10EF' : holeNightLightAuto} )\n #IR_codes.update( {b'FF38C7' : hole_night_light.setManualStateOff} )\n #IR_codes.update( {b'FF5AA5' : hole_night_light.setManualStateOn} )\n IR_codes.update( {b'FF30CF' : noolite_hole_set_off} )\n IR_codes.update( {b'FF18E7' : noolite_hole_set_on} )\n IR_codes.update( {b'FF7A85' : noolite_hole_set_auto} )", "def __init__(self):\n i2c.Pn532_i2c.__init__(self)\n self._uid = False", "def __init__(self, device):\n self.device = device\n self.io = serial.Serial(device, 57600, timeout=1)\n self.keys = ['time', 'centroid_x', 'centroid_y', 'centroid_r',\n 'level_1', 'level_2', 'level_3',\n 'width_1', 'width_2', 'width_3',\n 'height_1', 'height_2', 'height_3',\n 'power']", "def get_coulomb_info(self):\n return", "def about( cls, ):\n url = r\"http://www.opencircuits.com/Python_Smart_Terminal\"\n __, mem_msg = cls.show_process_memory( )\n msg = ( f\"{cls.controller.app_name} version:{cls.controller.version} \\nmode: {cls.parameters.mode}\"\n f\"\\n by Russ Hensel\"\n f\"\\nMemory in use {mem_msg} \\nCheck <Help> or \\n{url} \\nfor more info.\" )\n messagebox.showinfo( \"About\", msg )", "def healthcare():", "def test_card_info_lookup(self):\n pass", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_rgb_dmd'] = True", "def dicom_cli():", "def state_information(self) -> Dict[str, Any]:\n raise NotImplementedError(\"Device subclass needs to implement this.\")", "def __init__(self, starting_point=-1):\n self.i_read = starting_point\n self.data = [['fake_chip_id', 'fake_version'],\n [96, 110, 203, 104, 50, 0, 29, 145, 59, 215, 208, 11,\n 232, 38, 42, 255, 249, 255, 172, 38, 10, 216, 189, 16],\n [75],\n [129, 1, 0, 16, 44, 3, 30],\n [76, 60, 128, 129, 49, 128, 94, 120]]", "def sth():", "def calc_misc(self, model):\n\n part_family = model.part_family.lower()\n\n # Legacy Demod Registers\n # FIXME: calculate these\n\n self._reg_write(model.vars.MODEM_CTRL2_BRDIVA, 0)\n self._reg_write(model.vars.MODEM_CTRL2_BRDIVB, 0)\n self._reg_write(model.vars.MODEM_CTRL2_DEVMULA, 0)\n self._reg_write(model.vars.MODEM_CTRL2_DEVMULB, 0)\n self._reg_write(model.vars.MODEM_CTRL2_RATESELMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL2_RXFRCDIS, 0)\n self._reg_write(model.vars.MODEM_CTRL2_SQITHRESH, 0)\n self._reg_write(model.vars.MODEM_CTRL2_TXPINMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL4_ADCSATDENS, 0)\n self._reg_write(model.vars.MODEM_CTRL4_ADCSATLEVEL, 6)\n self._reg_write(model.vars.MODEM_CTRL4_OFFSETPHASESCALING, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PHASECLICKFILT, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTAVG, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTDEB, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTGAIN, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTRST, 0)\n self._reg_write(model.vars.MODEM_CTRL4_SOFTDSSSMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL5_BBSS, 0)\n self._reg_write(model.vars.MODEM_CTRL5_DEMODRAWDATASEL2, 0)\n self._reg_write(model.vars.MODEM_CTRL5_DETDEL, 0)\n self._reg_write(model.vars.MODEM_CTRL5_POEPER, 0)\n self._reg_write(model.vars.MODEM_CTRL5_RESYNCLIMIT, 0)\n self._reg_write(model.vars.MODEM_CTRL6_CODINGB, 0)\n self._reg_write(model.vars.MODEM_CTRL6_CPLXCORREN, 0)\n self._reg_write(model.vars.MODEM_CTRL6_DEMODRESTARTALL, 0)\n self._reg_write(model.vars.MODEM_CTRL6_DSSS3SYMBOLSYNCEN, 0)\n self._reg_write(model.vars.MODEM_CTRL6_PREBASES, 0)\n self._reg_write(model.vars.MODEM_CTRL6_RXRESTARTUPONRSSI, 0)\n self._reg_write(model.vars.MODEM_CTRL6_RXRESTARTUPONSHORTRSSI, 0)\n self._reg_write(model.vars.MODEM_CTRL6_TXDBPSKINV, 0)\n self._reg_write(model.vars.MODEM_CTRL6_TXDBPSKRAMPEN, 0)\n self._reg_write(model.vars.MODEM_ANARAMPCTRL_VMIDCTRL, 1)\n self._reg_write(model.vars.MODEM_ANARAMPCTRL_MUTEDLY, 0)\n self._reg_write(model.vars.MODEM_ETSCTRL_CAPTRIG, 0)\n self._reg_write(model.vars.MODEM_ETSCTRL_ETSLOC, 0)\n self._reg_write(model.vars.MODEM_ETSTIM_ETSCOUNTEREN, 0)\n self._reg_write(model.vars.MODEM_ETSTIM_ETSTIMVAL, 0)\n\n self._reg_write(model.vars.MODEM_OOKSHAPING_OOKSHAPINGEN, 0)\n self._reg_write(model.vars.MODEM_OOKSHAPING_OOKSHAPINGLUTSIZE, 0)\n self._reg_write(model.vars.MODEM_OOKSHAPING_OOKSHAPINGSTEP, 0)\n self._reg_write(model.vars.MODEM_PRE_DSSSPRE, 0)\n self._reg_write(model.vars.MODEM_PRE_PRESYMB4FSK, 0)\n self._reg_write(model.vars.MODEM_PRE_SYNCSYMB4FSK, 0)\n self._reg_write(model.vars.MODEM_TIMING_FASTRESYNC, 0)\n self._reg_write(model.vars.MODEM_TIMING_TIMSEQINVEN, 0)\n self._reg_write(model.vars.MODEM_TIMING_TIMSEQSYNC, 0)\n self._reg_write(model.vars.MODEM_TIMING_TSAGCDEL, 0)\n\n # Long Range registers\n # FIXME: calculate these\n self._reg_write(model.vars.MODEM_LONGRANGE1_LOGICBASEDLRDEMODGATE, 0)\n self._reg_write(model.vars.MODEM_LONGRANGE1_LOGICBASEDPUGATE, 0)\n self._reg_write(model.vars.MODEM_LONGRANGE1_LRSPIKETHADD, 0)\n self._reg_write(model.vars.MODEM_LONGRANGE1_LRSS, 0)\n self._reg_write(model.vars.MODEM_LRFRC_CI500, 1)\n self._reg_write(model.vars.MODEM_LRFRC_FRCACKTIMETHD, 0)\n self._reg_write(model.vars.MODEM_LRFRC_LRCORRMODE, 1)\n\n # DSA registers\n # FIXME: what do we need to calculate here?\n self._reg_write(model.vars.MODEM_DSACTRL_AGCBAUDEN, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_AMPJUPTHD, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_ARRTOLERTHD0, 2)\n self._reg_write(model.vars.MODEM_DSACTRL_ARRTOLERTHD1, 4)\n self._reg_write(model.vars.MODEM_DSACTRL_DSARSTON, 1)\n self._reg_write(model.vars.MODEM_DSACTRL_FREQAVGSYM, 1)\n self._reg_write(model.vars.MODEM_DSACTRL_GAINREDUCDLY, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_LOWDUTY, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_RESTORE, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_SCHPRD, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_TRANRSTDSA, 0)\n self._reg_write(model.vars.MODEM_DSATHD0_FDEVMAXTHD, 0x78)\n self._reg_write(model.vars.MODEM_DSATHD0_FDEVMINTHD, 12)\n self._reg_write(model.vars.MODEM_DSATHD0_SPIKETHD, 0x64)\n self._reg_write(model.vars.MODEM_DSATHD0_UNMODTHD, 4)\n self._reg_write(model.vars.MODEM_DSATHD1_AMPFLTBYP, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_DSARSTCNT, 2)\n self._reg_write(model.vars.MODEM_DSATHD1_FREQLATDLY, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_FREQSCALE, 0)\n self._reg_write(model.vars.MODEM_DSATHD1_POWABSTHD, 0x1388)\n self._reg_write(model.vars.MODEM_DSATHD1_POWRELTHD, 0)\n self._reg_write(model.vars.MODEM_DSATHD1_PWRDETDIS, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_PWRFLTBYP, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_RSSIJMPTHD, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_FDADJTHD, 1)\n self._reg_write(model.vars.MODEM_DSATHD2_FREQESTTHD, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_INTERFERDET, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_JUMPDETEN, 1)\n self._reg_write(model.vars.MODEM_DSATHD2_PMDETFORCE, 0)\n self._reg_write(model.vars.MODEM_DSATHD2_PMDETPASSTHD, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_POWABSTHDLOG, 100)\n self._reg_write(model.vars.MODEM_DSATHD3_FDEVMAXTHDLO, 120)\n self._reg_write(model.vars.MODEM_DSATHD3_FDEVMINTHDLO, 12)\n self._reg_write(model.vars.MODEM_DSATHD3_SPIKETHDLO, 100)\n self._reg_write(model.vars.MODEM_DSATHD3_UNMODTHDLO, 4)\n self._reg_write(model.vars.MODEM_DSATHD4_ARRTOLERTHD0LO, 2)\n self._reg_write(model.vars.MODEM_DSATHD4_ARRTOLERTHD1LO, 4)\n self._reg_write(model.vars.MODEM_DSATHD4_POWABSTHDLO, 5000)\n self._reg_write(model.vars.MODEM_DSATHD4_SWTHD, 0)\n\n # FIXME: check with Subrata on how to calculate these\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_ANTWAIT, 20)\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_SKIP2ANT, 1)\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_SKIPCORRTHD, 100)\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_SKIPRSSITHD, 0)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR0,1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR1, 1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR2, 1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR3, 1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSIANDDIVTHD, 20)\n self._reg_write(model.vars.MODEM_PHANTDECSION_CORRANDDIVTHD, 100)\n\n # FIXME: figure out how these AGC registers need to be calculated\n self._reg_write(model.vars.AGC_RSSISTEPTHR_DEMODRESTARTPER, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_DEMODRESTARTTHR, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_NEGSTEPTHR, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_POSSTEPTHR, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_STEPPER, 0)\n\n # Antenna Diversity Registers\n # FIXME: check with Amey if we need to calculate these\n self._reg_write(model.vars.MODEM_ANTDIVCTRL_ADPRETHRESH, 0)\n self._reg_write(model.vars.MODEM_ANTDIVCTRL_ENADPRETHRESH, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL1_TIMEPERIOD, 436906)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTCOUNT, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTDFLTSEL, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWENABLE, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWTYPE, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_CFGANTPATTEN, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_EXTDSTOPPULSECNT, 30)\n self._reg_write(model.vars.MODEM_ANTSWEND_ANTSWENDTIM, 0)\n self._reg_write(model.vars.MODEM_ANTSWSTART_ANTSWSTARTTIM, 0)\n self._reg_write(model.vars.MODEM_CFGANTPATT_CFGANTPATTVAL, 0)\n #self._reg_write(model.vars.MODEM_CTRL3_ANTDIVMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL3_ANTDIVREPEATDIS, 0)\n\n # Coherent Demod Registers\n #FIXME: Check with Yan/Per on how to calculate these\n self._reg_write(model.vars.MODEM_COH2_DSAPEAKCHPWRTH, 0)\n self._reg_write(model.vars.MODEM_COH3_COHDSADETDIS, 0)\n self._reg_write(model.vars.MODEM_COH3_DSAPEAKCHPWREN, 0)\n self._reg_write(model.vars.MODEM_COH3_LOGICBASEDCOHDEMODGATE, 0)\n self._reg_write(model.vars.MODEM_COH3_ONEPEAKQUALEN, 0)\n self._reg_write(model.vars.MODEM_COH3_PEAKCHKTIMOUT, 0)\n\n # Clock-gating register\n self._reg_write(model.vars.MODEM_AUTOCG_AUTOCGEN, 0) #We calculate MODEM_CGCLKSTOP_FORCEOFF in calculator instead\n self._reg_write(model.vars.FRC_AUTOCG_AUTOCGEN, 7)\n\n # Shaping filter coefficients\n #FIXME: check with Yan on how to calculate these\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF40, 0)\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF41, 0)\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF42, 0)\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF43, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF44, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF45, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF46, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF47, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF48, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF49, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF50, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF51, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF52, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF53, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF54, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF55, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF56, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF57, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF58, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF59, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF60, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF61, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF62, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF63, 0)\n self._reg_write(model.vars.MODEM_SHAPING2_COEFF10, 0)\n self._reg_write(model.vars.MODEM_SHAPING2_COEFF11, 0)\n self._reg_write(model.vars.MODEM_SHAPING2_COEFF9, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF12, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF13, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF14, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF15, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF16, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF17, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF18, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF19, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF20, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF21, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF22, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF23, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF24, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF25, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF26, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF27, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF28, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF29, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF30, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF31, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF32, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF33, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF34, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF35, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF36, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF37, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF38, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF39, 0)\n\n # Modem Registers with fixed value\n self._reg_write(model.vars.MODEM_AFC_AFCTXMODE, 0)\n# self._reg_write(model.vars.MODEM_AFC_AFCGEAR, 3)\n self._reg_write(model.vars.MODEM_CTRL0_DEMODRAWDATASEL, 0)\n self._reg_write(model.vars.MODEM_CTRL2_DMASEL, 0)\n self._reg_write(model.vars.MODEM_CTRL3_PRSDINEN, 0)\n self._reg_write(model.vars.MODEM_CTRL4_CLKUNDIVREQ, 0)\n self._reg_write(model.vars.MODEM_CTRL3_RAMTESTEN, 0)\n self._reg_write(model.vars.MODEM_DIRECTMODE_CLKWIDTH, 1)\n self._reg_write(model.vars.MODEM_DIRECTMODE_DMENABLE, 0)\n self._reg_write(model.vars.MODEM_DIRECTMODE_SYNCASYNC, 0)\n self._reg_write(model.vars.MODEM_DIRECTMODE_SYNCPREAM, 3)\n self._reg_write(model.vars.MODEM_PADEBUG_ENMANPACLKAMPCTRL, 0)\n self._reg_write(model.vars.MODEM_PADEBUG_ENMANPAPOWER, 0)\n self._reg_write(model.vars.MODEM_PADEBUG_ENMANPASELSLICE, 0)\n self._reg_write(model.vars.MODEM_PADEBUG_MANPACLKAMPCTRL, 0)\n self._reg_write(model.vars.MODEM_CTRL0_OOKASYNCPIN, 0)\n self._reg_write(model.vars.MODEM_CTRL0_DETDIS, 0)\n self._reg_write(model.vars.MODEM_CTRL0_DUALCORROPTDIS, 0)\n self._reg_write(model.vars.MODEM_CTRL0_FRAMEDETDEL, 0)\n self._reg_write(model.vars.MODEM_CTRL1_SYNC1INV, 0)\n\n # FRC Registers with fixed value\n self._reg_write(model.vars.FRC_BOICTRL_BOIBITPOS, 0)\n self._reg_write(model.vars.FRC_BOICTRL_BOIEN, 0)\n self._reg_write(model.vars.FRC_BOICTRL_BOIFIELDLOC, 0)\n self._reg_write(model.vars.FRC_BOICTRL_BOIMATCHVAL, 0)\n self._reg_write(model.vars.FRC_CTRL_LPMODEDIS, 0)\n self._reg_write(model.vars.FRC_CTRL_RATESELECT, 0)\n self._reg_write(model.vars.FRC_CTRL_WAITEOFEN, 0)\n self._reg_write(model.vars.FRC_DFLCTRL_DFLBOIOFFSET, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLBITORDER, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLBITS, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLMINLENGTH, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLMODE, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLOFFSET, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLSHIFT, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_RXSUPRECEPMODE, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_STORESUP, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_SUPSHFFACTOR, 0)\n self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TRAILTXREPLEN, 0)\n self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TXSUPPLENOVERIDE, 0)\n self._reg_write(model.vars.FRC_WCNTCMP3_SUPPLENFIELDLOC, 0)\n self._reg_write(model.vars.FRC_WCNTCMP4_SUPPLENGTH, 0)\n\n # Added new reg-fields related to 15.4 subG OQPSK phys\n self._reg_write(model.vars.MODEM_COH3_COHDSACMPLX, 0)\n self._reg_write(model.vars.MODEM_SYNCPROPERTIES_STATICSYNCTHRESH, 0)\n\n # Added new reg-fields related to Internal Long Range\n self._reg_write(model.vars.MODEM_PRE_PREWNDERRORS, 0)\n self._reg_write(model.vars.MODEM_CTRL3_TIMINGBASESGAIN, 0)\n\n #AGC default settings\n self._reg_write(model.vars.AGC_CTRL0_CFLOOPNFADJ, 0)\n self._reg_write(model.vars.AGC_CTRL6_DUALRFPKDDEC, 240296)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD1_RFPKDHITHD0, 1)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD1_RFPKDHITHD1, 40)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD0_RFPKDLOWTHD0, 1)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD0_RFPKDLOWTHD1, 10)\n self._reg_write(model.vars.AGC_CTRL6_ENDUALRFPKD, 1)\n\n self._reg_write(model.vars.MODEM_SQ_SQEN , 0)\n self._reg_write(model.vars.MODEM_SQ_SQTIMOUT , 0)\n self._reg_write(model.vars.MODEM_SQEXT_SQSTG2TIMOUT , 0)\n self._reg_write(model.vars.MODEM_SQEXT_SQSTG3TIMOUT , 0)\n\n # reg-fields to modify sync detection reset behavior PGOCELOT-5282\n self._reg_write(model.vars.MODEM_FRMSCHTIME_PMRSTSYCNEN, 0)\n self._reg_write(model.vars.MODEM_FRMSCHTIME_DSARSTSYCNEN, 0)\n\n #RAC settings\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVN, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVP, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENREG3, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENBYPASS40MHZ, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTREG3ADJV, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN1_CLKMULTDRVAMPSEL, part_family)\n self._reg_write_default(model.vars.RAC_IFADCTRIM0_IFADCSIDETONEAMP, part_family)", "def _command(self, commands):\n# \"\"\"Send command to spi bus of display chip, most DC pin need set to LOW \"\"\"\n# if self._spi == None: raise \"Do not setting SPI\"\n# GPIO.output( self._spi_dc, 0 )\n# self._spi.writebytes( commands )\n raise NotImplementedError", "def setup(instname):\n global reducer, inst_name,van_mass,bleed_switch,rate,pixels\n # debugging (allows to reload changed DirectEnergyConversion package from Mantid)\n\n if instname=='MAR' or instname=='mar':\n print 'setup mari'\n inst_name='MAR'\n reducer = DRC.setup_reducer('MARI')\n bleed_switch=False\n rate=0.0\n pixels=0\n elif instname=='MER' or instname=='mer':\n print 'setup merlin'\n inst_name='MER'\n reducer = DRC.setup_reducer('MERLIN')\n bleed_switch=True\n rate=0.01\n pixels=80\n elif instname=='MAP' or instname=='map':\n print 'setup maps'\n inst_name='MAP'\n reducer = DRC.setup_reducer('MAPS')\n bleed_switch=False\n rate=0.0\n pixels=0.0\n elif instname=='LET' or instname=='let':\n print 'setup let'\n inst_name='LET'\n reducer = DRC.setup_reducer('LET')\n bleed_switch=True\n rate=0.01\n pixels=80\n elif instname=='ARCS' or instname=='arcs':\n print 'setup Arcs'\n inst_name='ARC'\n reducer = DRC.setup_reducer('ARCS')\n bleed_switch=False\n rate=0.01\n pixels=80\n elif instname=='SEQ' or instname=='seq':\n print 'setup Sequoia'\n inst_name='SEQ'\n reducer = DRC.setup_reducer('SEQUOIA')\n bleed_switch=False\n rate=0.01\n pixels=80\n elif instname=='CNCS' or instname=='cncs':\n print 'setup cncs'\n inst_name='SEQ'\n reducer = DRC.setup_reducer('CNCS')\n bleed_switch=False\n rate=0.01\n pixels=80\n elif instname=='HYSPEC' or instname=='hyspec':\n print 'setup hyspec'\n inst_name='SEQ'\n reducer = DRC.setup_reducer('HYSPEC')\n bleed_switch=False\n rate=0.01\n pixels=80\n else:\n print 'Instrument name not defined'\n return \n van_mass=reducer.get_default_parameter('vanadium-mass')", "def setup(self):\n header_print(self.data['intro'])\n header_print(self.data['help'])\n random.shuffle(self.data['draw'])\n random.shuffle(self.data['locations'])\n random.shuffle(self.data['events'])\n random.shuffle(self.data['aces'])\n random.shuffle(self.data['personalities'])\n self.stats = {\n 'round': 0,\n 'powers': {\n 'MOONS': 6,\n 'SUNS': 6,\n 'WAVES': 6,\n 'LEAVES': 6,\n 'WYRMS': 6,\n 'KNOTS': 6,\n },\n 'hand': self.data['draw'][:],\n 'discard': [],\n 'active': [],\n 'opponent': {},\n }", "def basic(self):\n pass", "def info(self) -> dict:", "def fetch_stick(self):\r\n print(\"There you go, sir!\\n\")", "def init_devices(self):\n self.hp_nb = int(self.rs_nb* self.hp_proportion/(1- self.hp_proportion))\n self.defense_cost = self.hp_nb * self.hp_unit_cost\n rs_devices = [True for i in range(self.rs_nb)] #rs --> True\n hp_devices = [False for i in range(self.hp_nb)] #hp --> False\n self.devices = rs_devices + hp_devices\n shuffle(self.devices)", "def load_device():", "def __init__(self, dev):\n self.dev = dev\n self.dev.cla = 0x80", "def take_control(self):\n pass", "def init(self):\n self.reset()\n\n self.__interface.send_command('POWER_SETTING')\n self.__interface.send_data(0x37)\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('PANEL_SETTING')\n self.__interface.send_data(0xCF)\n self.__interface.send_data(0x08)\n\n self.__interface.send_command('BOOSTER_SOFT_START')\n self.__interface.send_data(0xc7)\n self.__interface.send_data(0xcc)\n self.__interface.send_data(0x28)\n\n self.__interface.send_command('POWER_ON')\n self.wait_until_idle()\n\n self.__interface.send_command('PLL_CONTROL')\n self.__interface.send_data(0x3c)\n\n self.__interface.send_command('TEMPERATURE_CALIBRATION')\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('VCOM_AND_DATA_INTERVAL_SETTING')\n self.__interface.send_data(0x77)\n\n self.__interface.send_command('TCON_SETTING')\n self.__interface.send_data(0x22)\n\n self.__interface.send_command('TCON_RESOLUTION')\n self.__interface.send_data(0x02) #source 640\n self.__interface.send_data(0x80)\n self.__interface.send_data(0x01) #gate 384\n self.__interface.send_data(0x80)\n\n self.__interface.send_command('VCM_DC_SETTING')\n self.__interface.send_data(0x1E) #decide by LUT file\n\n self.__interface.send_command(0xe5, False) #FLASH MODE\n self.__interface.send_data(0x03)", "def info(self):\n\n print(\"pixellisation:\", self.pixel)\n print(\"number of components:\", self.ncomp)\n print(\"number of pixels:\", self.data.shape[:] if self.ncomp == 1 else self.data.shape[1:])\n print(\"nside:\", self.nside)\n print(\"geometry:\", self.geometry)\n print(\"coordinates:\", self.coordinate)", "def info():\n print(\"Made using the OOP RPG game creator (c) Claire.\\n\")", "def protocolInfoLaser(self, fh, inputs, derivative):\n #global summary\n try:\n nspikes = len(inputs)\n self.devicemode = 'Laser'\n #print inputs\n # print 'FH parent info: ', fh.parent().info()\n print('1')\n reps = fh.parent().info()['protocol']['conf']['repetitions'] # fh.info()[('protocol', 'repetitions')]\n print('2')\n print(list(fh.info().keys()))\n print(fh.info())\n try:\n pulseDurIndex = fh.info()['Laser-Blue', 'Shutter.duration']\n except:\n try:\n pulseDurIndex = fh.info()['Laser-UV', 'Shutter.duration']\n except:\n raise ValueError(\" No key for Laser-Blue or Laser-UV in data set\")\n # fh.info()[('Laser-Blue', 'Command.PulseTrain_length')]\n # print 'pulsedurindex: ', pulseDurIndex\n fn = fh.shortName()\n # find date string in the path, and return path to current data set\n # allows us to identify the data set by date, slice, cell, protocol, etc.\n dm = re.compile(r'(\\d{4,4})\\.(\\d{2,2})\\.(\\d{2,2})*')\n dsearch = dm.search(fh.name())\n expname = fh.name()[dsearch.start():] # pull full path for experiment here, but leave out everything above the date\n print('3')\n pulseDur = fh.parent().info()['sequenceParams'][('Laser-Blue','Shutter.duration')] # [pulseDurIndex]\n print('4')\n pulseDur = pulseDur[pulseDurIndex]\n print('5')\n pulseTrainCommandShutter = fh.parent().info()['devices']['Laser-Blue']['channels']['Shutter']\n print('6')\n pulseTrainFcn = pulseTrainCommandShutter['waveGeneratorWidget']['function']\n r = re.compile('(?P<type>pulse)\\((?P<delay>\\d+),\\s(?P<param>\\w+),\\s(?P<value>\\d+)\\)')\n s = r.match(pulseTrainFcn)\n print('6.5')\n startTime = float(s.group('delay'))*1e-3 # pulseTrainFcn['start']['value'] # retrieve start time\n print('7')\n rep = 0 # fh.info()[('protocol', 'repetitions')]\n ipi = 1 # pulseTrainInfo['interpulse_length']['value'] # retrieve interpulse interval\n npulses = 1 # pulseTrainInfo['pulse_number']['value'] # retrieve number of pulses in train\n spikeTimes = [t['time'] for t in inputs]\n # figure max of derivative of the data after each stimulus pulse. 5 msec window.\n t = derivative.xvals(\"Time\")\n slopes = np.zeros(npulses)\n print('8')\n for n in range(npulses):\n t0 = startTime + n * ipi\n t1 = t0 + 3e-3\n x = np.where((t > t0) & (t <= t1))\n print('n, x: ', n, x)\n slopes[n] = np.max(derivative[x])\n\n res = OrderedDict([('Experiment: ', expname), ('File: ', fn), ('startTime', startTime),\n ('NPulses', npulses), ('IPI', ipi), ('PulseDur', pulseDur), ('Reps', reps),\n ('thisRep', rep),\n ('NSpikes', nspikes), ('SpikeTimes', spikeTimes), ('Slopes', slopes)])\n self.summary.append(res)\n except:\n raise Exception('Laser stuff failed')\n return res", "def _connect(self):\n\n log.info(\"Loading HVI\")\n\n self._hvi = sd1.SD_HVI()\n hvi_file = pkg_resources.resource_filename(\"qtrl.keysight\", 'sequencer.hvi')\n log.info(hvi_file)\n self._hvi.open(hvi_file)\n # for some unknown reason, this has to be run twice before it will not error\n self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=3, index=0)\n self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=4, index=1)\n self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=5, index=2)\n\n assert self._hvi.open(hvi_file) >= 0, 'Failed to load HVI'\n assert self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=3, index=0) >= 0, 'Failed to load HVI'\n assert self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=4, index=1) >= 0, 'Failed to load HVI'\n assert self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=5, index=2) >= 0, 'Failed to load HVI'\n\n assert self._hvi.compile() >= 0, 'Failed to load HVI'\n\n assert self._hvi.load() >= 0, 'Failed to load HVI'\n self._hvi.reset()\n\n cur_chan = 0\n for i, card in enumerate(self.cards):\n if card.connection is not None:\n self.close()\n\n card_cxn = sd1.SD_AOU()\n assert card_cxn.openWithSlot(\"\", card.chassis, card.slot) > 0, 'Failed to connect to slot'\n\n self.cards[i] = card._replace(connection=card_cxn)\n # self.cards[i].connection.triggerIOconfig(sd1.SD_TriggerDirections.AOU_TRG_IN)\n\n for channel in range(card.channels):\n self.channels[cur_chan] = KeysightChannel(channel=channel,\n chassis=card.chassis,\n slot=card.slot,\n model=card.model,\n type=card.type,\n connection=card_cxn)\n\n self.channels[cur_chan].connection.channelWaveShape(channel+1, sd1.SD_Waveshapes.AOU_AWG)\n\n self.channels[cur_chan].connection.clockResetPhase(3, 0)\n\n # ext trig config, 0 is external source, 3 is rising edge\n # self.channels[cur_chan].connection.AWGtriggerExternalConfig(channel+1, 0, 3)\n\n cur_chan += 1\n self._hvi.start()\n self._hvi.stop()\n\n self._n_channels = cur_chan", "def SPIchiperase(self):\n self.writecmd(0x01,0x81);", "def __init__(self):\n self.ram = [0] * 256\n self.reg = [0] * 8\n self.pc = 0", "def c(self):\n pass", "def c(self):\n pass", "def ExtraInfo(self) -> object:", "def __init__(self):\n self.ser = serial.Serial('/dev/ttyUSB3',9600)\n collect_readings = False\n self.colours = []\n self.max_readings = 50 #maximum number of readings to use", "def get_info(self):\n\t\tret = 'Flash info\\n'\n\t\tret += '\\tGPNVM bits: ' + str(self.read_gpnvm()) + '\\n'\n\t\tret += '\\tUnique identifier area: ' + self.read_unique_identifier_area().decode('ascii', 'replace') + '\\n'\n\t\tret += '\\tDescriptor: ' + str(self.read_descriptor()) + '\\n'\n\t\treturn ret", "def refresh_description(self):\n # AIN\n code_ain = (self.CODE >> 12) & 0b0111\n # DICT_AIN = [[0, 1], [0, 3], [1, 3], [2, 3], [0, 4], [1, 4], [2, 4], [3, 4]]\n ind_p, ind_n = DICT_AIN[code_ain]\n self.AINP = \"AIN\" + str(ind_p)\n self.AINN = \"AIN\" + str(ind_n)\n if (ind_n == 4): self.AINN = \"GND\"\n\n # FSR\n code_fsr = (self.CODE >> 9) & 0b0000111\n # DICT_FSR = [\"6.144V\", \"4.096V\", \"2.048V\", \"1.024V\", \"0.512V\", \"0.256V\"]\n self.FSR = DICT_FSR[code_fsr]\n\n # MODE\n\n # rate\n code_rate = (self.CODE >> 5) & 0b00000000111\n # DICT_RATE = [\"8 SPS\", \"16 SPS\", \"32 SPS\", \"64 SPS\", \"128 SPS\", \"250 SPS\", \"475 SPS\", \"860 SPS\"]\n self.RATE = DICT_RATE[code_rate]", "def reckon(self):", "def polyChipOff(*args, attraction: Union[float, bool]=0.0, caching: bool=True,\n constructionHistory: bool=True, duplicate: bool=True, gain: Union[float,\n List[float], bool]=1.0, gravity: Union[List[float, float, float], bool]=None,\n gravityX: Union[float, bool]=0.0, gravityY: Union[float, bool]=0.0, gravityZ:\n Union[float, bool]=0.0, keepFacesTogether: bool=True, keepFacetTogether:\n bool=True, localCenter: Union[int, bool]=0, localDirection: Union[List[float,\n float, float], bool]=None, localDirectionX: Union[float, bool]=0.0,\n localDirectionY: Union[float, bool]=0.0, localDirectionZ: Union[float,\n bool]=0.0, localRotate: Union[List[float, float, float], bool]=None,\n localRotateX: Union[float, bool]=0.0, localRotateY: Union[float, bool]=0.0,\n localRotateZ: Union[float, bool]=0.0, localScale: Union[List[float, float,\n float], bool]=None, localScaleX: Union[float, bool]=0.0, localScaleY:\n Union[float, bool]=0.0, localScaleZ: Union[float, bool]=0.0, localTranslate:\n Union[List[float, float, float], bool]=None, localTranslateX: Union[float,\n bool]=0.0, localTranslateY: Union[float, bool]=0.0, localTranslateZ:\n Union[float, bool]=0.0, magnX: Union[float, bool]=0.0, magnY: Union[float,\n bool]=0.0, magnZ: Union[float, bool]=0.0, magnet: Union[List[float, float,\n float], bool]=None, name: AnyStr=\"\", nodeState: Union[int, bool]=0, offset:\n Union[float, bool]=0.0, pivot: Union[List[float, float, float], bool]=None,\n pivotX: Union[float, bool]=0.0, pivotY: Union[float, bool]=0.0, pivotZ:\n Union[float, bool]=0.0, random: Union[float, bool]=0.0, scale: Union[List[float,\n float, float], bool]=None, scaleX: Union[float, bool]=0.0, scaleY: Union[float,\n bool]=0.0, scaleZ: Union[float, bool]=0.0, translate: Union[List[float, float,\n float], bool]=None, translateX: Union[float, bool]=0.0, translateY: Union[float,\n bool]=0.0, translateZ: Union[float, bool]=0.0, weight: Union[float, bool]=0.0,\n worldSpace: bool=True, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def data(self):", "def __init__(self):\n self.bytes = bytearray(3)\n MCP4725.__init__(self)", "def __init__(self, busRestriction=0, devAddressRestriction=0, serialNumber=\"\"):\n self.handle = libcaer.caerDeviceOpen(1, libcaer.CAER_DEVICE_DAVIS, busRestriction, devAddressRestriction, serialNumber)\n self.info = libcaer.caerDavisInfoGet(self.handle)\n\n print(\"device ID: \" + str(libcaer.caer_davis_info_deviceID_get(self.info)))\n\n if (libcaer.caer_davis_info_deviceIsMaster_get(self.info)):\n print(\"device is Master\")\n else:\n print(\"device is Slave\")\n\n print(\"device Serial Number: \" + str(libcaer.caer_davis_info_deviceSerialNumber_get(self.info)))\n print(libcaer.caer_davis_info_deviceString_get(self.info))\n\n self.dvsSizeX = libcaer.caer_davis_info_dvsSizeX_get(self.info)\n self.dvsSizeY = libcaer.caer_davis_info_dvsSizeY_get(self.info)\n\n self.apsSizeX = libcaer.caer_davis_info_apsSizeX_get(self.info)\n self.apsSizeY = libcaer.caer_davis_info_apsSizeY_get(self.info)\n\n # init default biases\n ret = libcaer.caerDeviceSendDefaultConfig(self.handle)\n if(ret == True):\n print(\"Default biases loaded\")\n else:\n print(\"Error while loading default biases\")\n raise Exception\n\n # set blocking data exchange\n ret = libcaer.caerDeviceConfigSet(self.handle, libcaer.CAER_HOST_CONFIG_DATAEXCHANGE, libcaer.CAER_HOST_CONFIG_DATAEXCHANGE_BLOCKING, True)\n if(ret == True):\n print(\"Data exchange set to blocking mode\")\n else:\n print(\"Error in communicating with the device, please check your setup\")\n raise Exception\n\n # start data transfer from device\n ret = libcaer.caerDeviceDataStart(self.handle, None, None, None, None, None)\n if(ret == True):\n print(\"Data transfer started\")\n else:\n print(\"Error in starting data transfer\")\n raise Exception", "def __init__(self, make, model, year):\r\n super().__init__(make, model, year)\r\n self.battery_size = 70\r\n # self.autopilot = autopilot\r", "def moi(self):\n\n pass", "def enable(self) -> None:" ]
[ "0.67811126", "0.6066584", "0.5964433", "0.59339035", "0.589296", "0.58123773", "0.58010674", "0.57554996", "0.5728705", "0.56449646", "0.56439716", "0.56414634", "0.56379575", "0.56379575", "0.5594598", "0.55358076", "0.5497361", "0.54925936", "0.5483349", "0.5473763", "0.5462485", "0.5460163", "0.54515415", "0.5441278", "0.5419607", "0.54022926", "0.53795564", "0.53726125", "0.5338211", "0.5338211", "0.5337915", "0.5328013", "0.5326133", "0.53258103", "0.52956903", "0.5283403", "0.528021", "0.5279774", "0.5276576", "0.52745867", "0.52732724", "0.52700603", "0.5265963", "0.5249531", "0.52298385", "0.52277905", "0.5222615", "0.52194977", "0.5206727", "0.5206362", "0.5201322", "0.51996315", "0.51887923", "0.51804155", "0.51804155", "0.51804155", "0.51804155", "0.51774514", "0.5176834", "0.516562", "0.5158818", "0.5148516", "0.5147972", "0.51450545", "0.51415676", "0.5134778", "0.5133039", "0.5128874", "0.51275474", "0.51258636", "0.5125459", "0.5123773", "0.5122468", "0.5119849", "0.51123893", "0.5109508", "0.5105934", "0.51058424", "0.50912714", "0.5090566", "0.50820786", "0.5077213", "0.5076216", "0.5076194", "0.50725013", "0.5070938", "0.5069573", "0.5067492", "0.5067492", "0.5065867", "0.5065824", "0.506569", "0.50642616", "0.5063172", "0.5062251", "0.5059142", "0.505062", "0.50505215", "0.50487983", "0.5045563", "0.5045133" ]
0.0
-1
Everything important about the chip
def __init__(self, length=None, width=None, height=None, material_bottom_wall_surface=None, material_top_wall_surface=None, material_fluid=None): self.length = length self.width = width self.height = height self.material_bottom_wall_surface = material_bottom_wall_surface # material should only hold relevant electrokinetic data self.material_top_wall_surface = material_top_wall_surface # material should only hold relevant elect self.material_fluid = material_fluid # could be a mixture of liquid materials + fluorescent particles
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n ChipData.ChipData.__init__(self)", "def read_chip_info(self):\n return [self.read_chip_type(), self.read_chip_revision()]", "def support(self):", "def show(self):\n # Disable IRQ to improve speed\n with NoIRQ():\n for chip in range(NB_CHIPS):\n self._select(chip)\n row = 0 if chip in (0, 1) else 1\n col = 0 if chip in (0, 2) else 1\n data = self.get_ht1632_data(row, col)\n green = (is_green(value) for value in data)\n red = (is_red(value) for value in data)\n self._write_data(green, red)", "def plugh():", "def run_all(self):\n self.formatter.section_start('Firmware info')\n self.analyse_firmware_id() # Always do this first!\n # If the chip has not panicked, the preserved\n # block is populated with random values, therefore\n # until the magic_value is implemented, do a try and except\n self.analyse_panic_state()\n self.analyse_slt() # Kind of pointless but why not.\n self.formatter.section_end()", "def presenetCar():", "def produce_features(self, chip):\n raise NotImplementedError(\"produce_features() not implemented\")", "def _init_hardware(self):\n return", "def probe(self):", "def bioinfo():\n\n pass", "def update(self):\n try:\n if not self._sysinfo:\n self._sysinfo = self.smartplug.sys_info\n self._mac = self.smartplug.mac\n self._model = self.smartplug.model\n if self.smartplug.context is None:\n self._alias = self.smartplug.alias\n self._device_id = self._mac\n else:\n self._alias = self._plug_from_context[\"alias\"]\n self._device_id = self.smartplug.context\n\n if self.smartplug.context is None:\n self._state = self.smartplug.state == self.smartplug.SWITCH_STATE_ON\n else:\n self._state = self._plug_from_context[\"state\"] == 1\n\n if self.smartplug.has_emeter:\n emeter_readings = self.smartplug.get_emeter_realtime()\n\n self._emeter_params[ATTR_CURRENT_POWER_W] = \"{:.2f}\".format(\n emeter_readings[\"power\"]\n )\n self._emeter_params[ATTR_TOTAL_ENERGY_KWH] = \"{:.3f}\".format(\n emeter_readings[\"total\"]\n )\n self._emeter_params[ATTR_VOLTAGE] = \"{:.1f}\".format(\n emeter_readings[\"voltage\"]\n )\n self._emeter_params[ATTR_CURRENT_A] = \"{:.2f}\".format(\n emeter_readings[\"current\"]\n )\n\n emeter_statics = self.smartplug.get_emeter_daily()\n try:\n self._emeter_params[ATTR_TODAY_ENERGY_KWH] = \"{:.3f}\".format(\n emeter_statics[int(time.strftime(\"%e\"))]\n )\n except KeyError:\n # Device returned no daily history\n pass\n\n self._available = True\n\n except (SmartDeviceException, OSError) as ex:\n if self._available:\n _LOGGER.warning(\n \"Could not read state for %s: %s\", self.smartplug.host, ex\n )\n self._available = False", "def info(self):", "def info(self):", "def __init__(self):\r\n # Check device ID.\r\n chip_id = self._read_byte(_BME280_REGISTER_CHIPID)\r\n if _BME280_CHIPID != chip_id:\r\n raise RuntimeError('Failed to find BME280! Chip ID 0x%x' % chip_id)\r\n self._write_register_byte(_BME280_REGISTER_SOFTRESET, 0xB6)\r\n time.sleep(0.5)\r\n self._read_coefficients()\r\n self.sea_level_pressure = 1013.25\r\n \"\"\"Pressure in hectoPascals at sea level. Used to calibrate `altitude`.\"\"\"\r\n # turn on humidity oversample 16x\r\n self._write_register_byte(_BME280_REGISTER_CTRL_HUM, 0x03)\r\n self._t_fine = None", "def __init__(self):\n self._device_info = None", "def serial(self):", "def _pending_chips(self):\n assert not (self._deleted and self._new)\n # NOTE: This is ce4 specific code and could be factored out.\n deliver_at = gametime.now()\n\n chips = []\n # If this is a DELETE, send an empty dict.\n if self._deleted:\n chips.append({\n 'action':DELETE,\n 'path':self._chip_path(),\n 'value':{},\n 'time':deliver_at\n })\n # If this is an ADD, add all fields and collections.\n elif self._new:\n chips.append({\n 'action':ADD,\n 'path':self._chip_path(),\n 'value':self.to_struct(),\n 'time':deliver_at\n })\n # If this is a MOD, add only the changed fields and id_field.\n elif len(self._changed_fields) > 0:\n chips.append({\n 'action':MOD,\n 'path':self._chip_path(),\n 'value':self.to_struct(fields=self._changed_fields),\n 'time':deliver_at})\n return chips", "def info(rom):\n rom = ROM(rom, detect=True)", "def manage_info():", "def _get_info_about_sensor(self):\n reg_id = 0xD0\n chip_id, chip_version = self.bus.read_i2c_block_data(self.address,\n reg_id,\n 2)\n return chip_id, chip_version", "def gmcp_setup_data(self):\n yield \"Core.Supports.Debug\", 20\n yield \"Core.Supports.Set\", [ \"MG.char 1\", \"MG.room 1\", \"comm.channel 1\" ]", "def pick_up(self):", "def _read_cardiochip(self):\n cur_leadstatus = 0\n sample_count =0\n while self.connected:\n sample_count+=1\n #check for sync bytes\n readbyte = ord(self.ser.read(1))\n #print readbyte, SYNC_BYTE\n if readbyte != SYNC_BYTE:\n continue\n readbyte = ord(self.ser.read(1))\n if readbyte != SYNC_BYTE:\n continue\n\n #parse length byte\n while True:\n pLength = ord(self.ser.read(1))\n if pLength != SYNC_BYTE:\n break\n if pLength > 169:\n continue\n #print \"L: %i\" % pLength\n\n # collect payload bytes\n payload = self.ser.read(pLength)\n payload = [ord(x) for x in payload] #convert to int from string\n #print \"payload: \" + str(payload).strip('[]')\n # ones complement inverse of 8-bit payload sum\n checksum = sum(payload) & 0xFF\n checksum = ~checksum & 0xFF\n\n # catch and verify checksum byte\n chk = ord(self.ser.read(1))\n #print \"chk: \" + str(checksum)\n if chk != checksum:\n print \"checksum error, %i != %i\" % (chk, checksum)\n continue\n\n output = self._parseData(payload)\n\n lead_status = next(( d for d in output if 'leadoff' in d), None)\n if lead_status is not None:\n if cur_leadstatus != lead_status['leadoff']:\n #we have a change\n if lead_status['leadoff']==200:\n print \"LEAD ON\"\n elif lead_status['leadoff']==0:\n print \"LEAD OFF\"\n cur_leadstatus = lead_status['leadoff']\n\n # store the output data in a queue\n # first, create a tuple with the sample index and dict with the timestamp and ecg\n ecgdict = next(((i,d) for i,d in enumerate(output) if 'ecg_raw' in d), None)\n if ecgdict is not None and sample_count>self.Fs*2:\n #let's just ignore the first 2 seconds of crappy data\n ecgdict[1]['leadoff'] = cur_leadstatus\n #print ecgdict[1]\n self.ecg_buffer.put(ecgdict[1]) # this should save the ecg and timestamp keys\n\n return", "def CL(self):", "def use(self):", "def _get_information(self):\n pass", "def enable(self):", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_i2c'] = True", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_i2c'] = True", "def __init__(self):\n self.Revision = '0'\n self.Serial = None\n try:\n with open('/proc/cpuinfo','r') as f:\n for line in f:\n splitLine = line.split(':')\n if len(splitLine) < 2:\n continue\n key = splitLine[0].strip()\n value = splitLine[1].strip()\n if key == 'Revision':\n self.Revision = value\n if key == 'Serial' and value != len(value) * '0':\n self.Serial = value\n except:\n exception (\"Error reading cpuinfo\")\n self.model = 'Unknown'\n if self.Revision == 'Beta':\n self.model = 'Raspberry Pi Model B (Beta)'\n if self.Revision in ('000d', '000e', '000f', '0002', '0003', '0004', '0005', '0006'):\n self.model = 'Raspberry Pi Model B'\n if self.Revision in ('0007', '0008', '0009'):\n self.model = 'Raspberry Pi Model A'\n if self.Revision in ('0010', '0013', '900032'):\n self.model = 'Raspberry Pi Model B +'\n if self.Revision in ('0011', '0014'):\n self.model = 'Raspberry Pi Compute Module'\n if self.Revision in ('0012', '0015'):\n self.model = 'Raspberry Pi Model A+'\n if self.Revision in ('a01040', 'a01041', 'a21041', 'a22042'):\n self.model = 'Raspberry Pi 2 Model B'\n if self.Revision in ('900092', '900093', '920093'):\n self.model = 'Raspberry Pi Zero'\n if self.Revision in ('9000c1',):\n self.model = 'Raspberry Pi Zero W'\n if self.Revision in ('a02082', 'a22082', 'a32082'):\n self.model = 'Raspberry Pi 3 Model B' \n if self.Revision in ('a020d3'):\n self.model = 'Raspberry Pi 3 Model B+'\n if self.Revision in ('a020a0'):\n self.model = 'Raspberry Pi Compute Module 3'\n if 'Rockchip' in CPU_HARDWARE:\n self.model = 'Tinker Board'\n self.manufacturer = 'Element14/Premier Farnell'\n if self.Revision in ('a01041', '900092', 'a02082', '0012', '0011', '0010', '000e', '0008', '0004', 'a020d3', 'a01040', 'a020a0'):\n self.manufacturer = 'Sony, UK'\n if self.Revision in ('a32082'):\n self.manufacturer = 'Sony, Japan'\n if self.Revision in ('0014', '0015', 'a21041', 'a22082', '920093'):\n self.manufacturer = 'Embest, China'\n if self.Revision in ('0005', '0009', '000f'):\n self.manufacturer = 'Qisda'\n if self.Revision in ('0006', '0007', '000d'):\n self.manufacturer = 'Egoman'\n if self.Revision == '0000':\n if 'Rockchip' in CPU_HARDWARE:\n self.manufacturer = 'ASUS'\n else:\n try:\n with open('/proc/device-tree/model', 'r') as model_file:\n for line in model_file:\n if 'BeagleBone' in line:\n index = line.index('BeagleBone')\n self.manufacturer = line[:index - 1].strip(' \\n\\t\\0')\n self.model = line[index:].strip(' \\n\\t\\0')\n break\n except:\n exception (\"Error reading model\")", "def addExtraDevices(self):\n \n # These tables were extracted from\n # pirates/src/piratesgui/GameOptions.py.\n \n ati_device_list = [ \n [\"ATI MOBILITY/RADEON X700\", 0x5653],\n [1, \"Radeon X1950 XTX Uber - Limited Edition\", 0x7248],\n [1, \"Radeon X1950 XTX Uber - Limited Edition Secondary\", 0x7268],\n [1, \"Radeon X800 CrossFire Edition\", 0x554D],\n [1, \"Radeon X800 CrossFire Edition Secondary\", 0x556D],\n [1, \"Radeon X850 CrossFire Edition\", 0x5D52],\n [1, \"Radeon X850 CrossFire Edition Secondary\", 0x5D72],\n [\"Radeon X550/X700 Series\", 0x564F],\n [\"ATI FireGL T2\", 0x4154],\n [\"ATI FireGL T2 Secondary\", 0x4174],\n [\"ATI FireGL V3100\", 0x5B64],\n [\"ATI FireGL V3100 Secondary\", 0x5B74],\n [\"ATI FireGL V3200\", 0x3E54],\n [\"ATI FireGL V3200 Secondary\", 0x3E74],\n [\"ATI FireGL V3300\", 0x7152],\n [\"ATI FireGL V3300 Secondary\", 0x7172],\n [\"ATI FireGL V3350\", 0x7153],\n [\"ATI FireGL V3350 Secondary\", 0x7173],\n [\"ATI FireGL V3400\", 0x71D2],\n [\"ATI FireGL V3400 Secondary\", 0x71F2],\n [\"ATI FireGL V5000\", 0x5E48],\n [\"ATI FireGL V5000 Secondary\", 0x5E68],\n [\"ATI FireGL V5100\", 0x5551],\n [\"ATI FireGL V5100 Secondary\", 0x5571],\n [\"ATI FireGL V5200\", 0x71DA],\n [\"ATI FireGL V5200 Secondary\", 0x71FA],\n [\"ATI FireGL V5300\", 0x7105],\n [\"ATI FireGL V5300 Secondary\", 0x7125],\n [\"ATI FireGL V7100\", 0x5550],\n [\"ATI FireGL V7100 Secondary\", 0x5570],\n [\"ATI FireGL V7200\", 0x5D50],\n [\"ATI FireGL V7200 \", 0x7104],\n [\"ATI FireGL V7200 Secondary\", 0x5D70],\n [\"ATI FireGL V7200 Secondary \", 0x7124],\n [\"ATI FireGL V7300\", 0x710E],\n [\"ATI FireGL V7300 Secondary\", 0x712E],\n [\"ATI FireGL V7350\", 0x710F],\n [\"ATI FireGL V7350 Secondary\", 0x712F],\n [\"ATI FireGL X1\", 0x4E47],\n [\"ATI FireGL X1 Secondary\", 0x4E67],\n [\"ATI FireGL X2-256/X2-256t\", 0x4E4B],\n [\"ATI FireGL X2-256/X2-256t Secondary\", 0x4E6B],\n [\"ATI FireGL X3-256\", 0x4A4D],\n [\"ATI FireGL X3-256 Secondary\", 0x4A6D],\n [\"ATI FireGL Z1\", 0x4147],\n [\"ATI FireGL Z1 Secondary\", 0x4167],\n [\"ATI FireMV 2200\", 0x5B65],\n [\"ATI FireMV 2200 Secondary\", 0x5B75],\n [\"ATI FireMV 2250\", 0x719B],\n [\"ATI FireMV 2250 Secondary\", 0x71BB],\n [\"ATI FireMV 2400\", 0x3151],\n [\"ATI FireMV 2400 Secondary\", 0x3171],\n [\"ATI FireStream 2U\", 0x724E],\n [\"ATI FireStream 2U Secondary\", 0x726E],\n [\"ATI MOBILITY FIRE GL 7800\", 0x4C58],\n [\"ATI MOBILITY FIRE GL T2/T2e\", 0x4E54],\n [\"ATI MOBILITY FireGL V3100\", 0x5464],\n [\"ATI MOBILITY FireGL V3200\", 0x3154],\n [\"ATI MOBILITY FireGL V5000\", 0x564A],\n [\"ATI MOBILITY FireGL V5000 \", 0x564B],\n [\"ATI MOBILITY FireGL V5100\", 0x5D49],\n [\"ATI MOBILITY FireGL V5200\", 0x71C4],\n [\"ATI MOBILITY FireGL V5250\", 0x71D4],\n [\"ATI MOBILITY FireGL V7100\", 0x7106],\n [\"ATI MOBILITY FireGL V7200\", 0x7103],\n [\"ATI MOBILITY RADEON\", 0x4C59],\n [\"ATI MOBILITY RADEON 7500\", 0x4C57],\n [\"ATI MOBILITY RADEON 9500\", 0x4E52],\n [\"ATI MOBILITY RADEON 9550\", 0x4E56],\n [\"ATI MOBILITY RADEON 9600/9700 Series\", 0x4E50],\n [\"ATI MOBILITY RADEON 9800\", 0x4A4E],\n [\"ATI Mobility Radeon HD 2300\", 0x7210],\n [\"ATI Mobility Radeon HD 2300 \", 0x7211],\n [\"ATI Mobility Radeon HD 2400\", 0x94C9],\n [\"ATI Mobility Radeon HD 2400 XT\", 0x94C8],\n [1, \"ATI Mobility Radeon HD 2600\", 0x9581],\n [1, \"ATI Mobility Radeon HD 2600 XT\", 0x9583],\n [\"ATI Mobility Radeon X1300\", 0x714A],\n [\"ATI Mobility Radeon X1300 \", 0x7149],\n [\"ATI Mobility Radeon X1300 \", 0x714B],\n [\"ATI Mobility Radeon X1300 \", 0x714C],\n [\"ATI Mobility Radeon X1350\", 0x718B],\n [\"ATI Mobility Radeon X1350 \", 0x718C],\n [\"ATI Mobility Radeon X1350 \", 0x7196],\n [\"ATI Mobility Radeon X1400\", 0x7145],\n [\"ATI Mobility Radeon X1450\", 0x7186],\n [\"ATI Mobility Radeon X1450 \", 0x718D],\n [\"ATI Mobility Radeon X1600\", 0x71C5],\n [\"ATI Mobility Radeon X1700\", 0x71D5],\n [\"ATI Mobility Radeon X1700 \", 0x71DE],\n [\"ATI Mobility Radeon X1700 XT\", 0x71D6],\n [1, \"ATI Mobility Radeon X1800\", 0x7102],\n [1, \"ATI Mobility Radeon X1800 XT\", 0x7101],\n [1, \"ATI Mobility Radeon X1900\", 0x7284],\n [1, \"ATI Mobility Radeon X2300\", 0x718A],\n [1, \"ATI Mobility Radeon X2300 \", 0x7188],\n [\"ATI MOBILITY RADEON X300\", 0x5461],\n [\"ATI MOBILITY RADEON X300 \", 0x5460],\n [\"ATI MOBILITY RADEON X300 \", 0x3152],\n [\"ATI MOBILITY RADEON X600\", 0x3150],\n [\"ATI MOBILITY RADEON X600 SE\", 0x5462],\n [\"ATI MOBILITY RADEON X700\", 0x5652],\n [\"ATI MOBILITY RADEON X700 \", 0x5653],\n [\"ATI MOBILITY RADEON X700 Secondary\", 0x5673],\n [1, \"ATI MOBILITY RADEON X800\", 0x5D4A],\n [1, \"ATI MOBILITY RADEON X800 XT\", 0x5D48],\n [\"ATI Radeon 9550/X1050 Series\", 0x4153],\n [\"ATI Radeon 9550/X1050 Series Secondary\", 0x4173],\n [\"ATI RADEON 9600 Series\", 0x4150],\n [\"ATI RADEON 9600 Series \", 0x4E51],\n [\"ATI RADEON 9600 Series \", 0x4151],\n [\"ATI RADEON 9600 Series \", 0x4155],\n [\"ATI RADEON 9600 Series \", 0x4152],\n [\"ATI RADEON 9600 Series Secondary\", 0x4E71],\n [\"ATI RADEON 9600 Series Secondary \", 0x4171],\n [\"ATI RADEON 9600 Series Secondary \", 0x4170],\n [\"ATI RADEON 9600 Series Secondary \", 0x4175],\n [\"ATI RADEON 9600 Series Secondary \", 0x4172],\n [1, \"ATI Radeon HD 2900 XT\", 0x9402],\n [1, \"ATI Radeon HD 2900 XT \", 0x9403],\n [1, \"ATI Radeon HD 2900 XT \", 0x9400],\n [1, \"ATI Radeon HD 2900 XT \", 0x9401],\n [\"ATI Radeon X1200 Series\", 0x791E],\n [\"ATI Radeon X1200 Series \", 0x791F],\n [1, \"ATI Radeon X1950 GT\", 0x7288],\n [1, \"ATI Radeon X1950 GT Secondary\", 0x72A8],\n [1, \"ATI RADEON X800 GT\", 0x554E],\n [1, \"ATI RADEON X800 GT Secondary\", 0x556E],\n [1, \"ATI RADEON X800 XL\", 0x554D],\n [1, \"ATI RADEON X800 XL Secondary\", 0x556D],\n [1, \"ATI RADEON X850 PRO\", 0x4B4B],\n [1, \"ATI RADEON X850 PRO Secondary\", 0x4B6B],\n [1, \"ATI RADEON X850 SE\", 0x4B4A],\n [1, \"ATI RADEON X850 SE Secondary\", 0x4B6A],\n [1, \"ATI RADEON X850 XT\", 0x4B49],\n [1, \"ATI RADEON X850 XT Platinum Edition\", 0x4B4C],\n [1, \"ATI RADEON X850 XT Platinum Edition Secondary\", 0x4B6C],\n [1, \"ATI RADEON X850 XT Secondary\", 0x4B69],\n [\"ATI Radeon Xpress 1200 Series\", 0x793F],\n [\"ATI Radeon Xpress 1200 Series \", 0x7941],\n [\"ATI Radeon Xpress 1200 Series \", 0x7942],\n [\"ATI Radeon Xpress Series\", 0x5A61],\n [\"ATI Radeon Xpress Series \", 0x5A63],\n [\"ATI Radeon Xpress Series \", 0x5A62],\n [\"ATI Radeon Xpress Series \", 0x5A41],\n [\"ATI Radeon Xpress Series \", 0x5A43],\n [\"ATI Radeon Xpress Series \", 0x5A42],\n [\"ATI Radeon Xpress Series \", 0x5954],\n [\"ATI Radeon Xpress Series \", 0x5854],\n [\"ATI Radeon Xpress Series \", 0x5955],\n [\"ATI Radeon Xpress Series \", 0x5974],\n [\"ATI Radeon Xpress Series \", 0x5874],\n [\"ATI Radeon Xpress Series \", 0x5975],\n [\"Radeon 9500\", 0x4144],\n [\"Radeon 9500 \", 0x4149],\n [\"Radeon 9500 PRO / 9700\", 0x4E45],\n [\"Radeon 9500 PRO / 9700 Secondary\", 0x4E65],\n [\"Radeon 9500 Secondary\", 0x4164],\n [\"Radeon 9500 Secondary \", 0x4169],\n [\"Radeon 9600 TX\", 0x4E46],\n [\"Radeon 9600 TX Secondary\", 0x4E66],\n [\"Radeon 9600TX\", 0x4146],\n [\"Radeon 9600TX Secondary\", 0x4166],\n [\"Radeon 9700 PRO\", 0x4E44],\n [\"Radeon 9700 PRO Secondary\", 0x4E64],\n [\"Radeon 9800\", 0x4E49],\n [\"Radeon 9800 PRO\", 0x4E48],\n [\"Radeon 9800 PRO Secondary\", 0x4E68],\n [\"Radeon 9800 SE\", 0x4148],\n [\"Radeon 9800 SE Secondary\", 0x4168],\n [\"Radeon 9800 Secondary\", 0x4E69],\n [\"Radeon 9800 XT\", 0x4E4A],\n [\"Radeon 9800 XT Secondary\", 0x4E6A],\n [\"Radeon X1300 / X1550 Series\", 0x7146],\n [\"Radeon X1300 / X1550 Series Secondary\", 0x7166],\n [\"Radeon X1300 Series\", 0x714E],\n [\"Radeon X1300 Series \", 0x715E],\n [\"Radeon X1300 Series \", 0x714D],\n [\"Radeon X1300 Series \", 0x71C3],\n [\"Radeon X1300 Series \", 0x718F],\n [\"Radeon X1300 Series Secondary\", 0x716E],\n [\"Radeon X1300 Series Secondary \", 0x717E],\n [\"Radeon X1300 Series Secondary \", 0x716D],\n [\"Radeon X1300 Series Secondary \", 0x71E3],\n [\"Radeon X1300 Series Secondary \", 0x71AF],\n [\"Radeon X1300/X1550 Series\", 0x7142],\n [\"Radeon X1300/X1550 Series \", 0x7180],\n [\"Radeon X1300/X1550 Series \", 0x7183],\n [\"Radeon X1300/X1550 Series \", 0x7187],\n [\"Radeon X1300/X1550 Series Secondary\", 0x7162],\n [\"Radeon X1300/X1550 Series Secondary \", 0x71A0],\n [\"Radeon X1300/X1550 Series Secondary \", 0x71A3],\n [\"Radeon X1300/X1550 Series Secondary \", 0x71A7],\n [\"Radeon X1550 64-bit\", 0x7147],\n [\"Radeon X1550 64-bit \", 0x715F],\n [\"Radeon X1550 64-bit \", 0x719F],\n [\"Radeon X1550 64-bit Secondary\", 0x7167],\n [\"Radeon X1550 64-bit Secondary \", 0x717F],\n [\"Radeon X1550 Series\", 0x7143],\n [\"Radeon X1550 Series \", 0x7193],\n [\"Radeon X1550 Series Secondary\", 0x7163],\n [\"Radeon X1550 Series Secondary \", 0x71B3],\n [\"Radeon X1600 Pro / Radeon X1300 XT\", 0x71CE],\n [\"Radeon X1600 Pro / Radeon X1300 XT Secondary\", 0x71EE],\n [\"Radeon X1600 Series\", 0x7140],\n [\"Radeon X1600 Series \", 0x71C0],\n [\"Radeon X1600 Series \", 0x71C2],\n [\"Radeon X1600 Series \", 0x71C6],\n [\"Radeon X1600 Series \", 0x7181],\n [\"Radeon X1600 Series \", 0x71CD],\n [\"Radeon X1600 Series Secondary\", 0x7160],\n [\"Radeon X1600 Series Secondary \", 0x71E2],\n [\"Radeon X1600 Series Secondary \", 0x71E6],\n [\"Radeon X1600 Series Secondary \", 0x71A1],\n [\"Radeon X1600 Series Secondary \", 0x71ED],\n [\"Radeon X1600 Series Secondary \", 0x71E0],\n [\"Radeon X1650 Series\", 0x71C1],\n [\"Radeon X1650 Series \", 0x7293],\n [\"Radeon X1650 Series \", 0x7291],\n [\"Radeon X1650 Series \", 0x71C7],\n [\"Radeon X1650 Series Secondary\", 0x71E1],\n [\"Radeon X1650 Series Secondary \", 0x72B3],\n [\"Radeon X1650 Series Secondary \", 0x72B1],\n [\"Radeon X1650 Series Secondary \", 0x71E7],\n [1, \"Radeon X1800 Series\", 0x7100],\n [1, \"Radeon X1800 Series \", 0x7108],\n [1, \"Radeon X1800 Series \", 0x7109],\n [1, \"Radeon X1800 Series \", 0x710A],\n [1, \"Radeon X1800 Series \", 0x710B],\n [1, \"Radeon X1800 Series \", 0x710C],\n [1, \"Radeon X1800 Series Secondary\", 0x7120],\n [1, \"Radeon X1800 Series Secondary \", 0x7128],\n [1, \"Radeon X1800 Series Secondary \", 0x7129],\n [1, \"Radeon X1800 Series Secondary \", 0x712A],\n [1, \"Radeon X1800 Series Secondary \", 0x712B],\n [1, \"Radeon X1800 Series Secondary \", 0x712C],\n [1, \"Radeon X1900 Series\", 0x7243],\n [1, \"Radeon X1900 Series \", 0x7245],\n [1, \"Radeon X1900 Series \", 0x7246],\n [1, \"Radeon X1900 Series \", 0x7247],\n [1, \"Radeon X1900 Series \", 0x7248],\n [1, \"Radeon X1900 Series \", 0x7249],\n [1, \"Radeon X1900 Series \", 0x724A],\n [1, \"Radeon X1900 Series \", 0x724B],\n [1, \"Radeon X1900 Series \", 0x724C],\n [1, \"Radeon X1900 Series \", 0x724D],\n [1, \"Radeon X1900 Series \", 0x724F],\n [1, \"Radeon X1900 Series Secondary\", 0x7263],\n [1, \"Radeon X1900 Series Secondary \", 0x7265],\n [1, \"Radeon X1900 Series Secondary \", 0x7266],\n [1, \"Radeon X1900 Series Secondary \", 0x7267],\n [1, \"Radeon X1900 Series Secondary \", 0x7268],\n [1, \"Radeon X1900 Series Secondary \", 0x7269],\n [1, \"Radeon X1900 Series Secondary \", 0x726A],\n [1, \"Radeon X1900 Series Secondary \", 0x726B],\n [1, \"Radeon X1900 Series Secondary \", 0x726C],\n [1, \"Radeon X1900 Series Secondary \", 0x726D],\n [1, \"Radeon X1900 Series Secondary \", 0x726F],\n [1, \"Radeon X1950 Series\", 0x7280],\n [1, \"Radeon X1950 Series \", 0x7240],\n [1, \"Radeon X1950 Series \", 0x7244],\n [1, \"Radeon X1950 Series Secondary\", 0x72A0],\n [1, \"Radeon X1950 Series Secondary \", 0x7260],\n [1, \"Radeon X1950 Series Secondary \", 0x7264],\n [\"Radeon X300/X550/X1050 Series\", 0x5B60],\n [\"Radeon X300/X550/X1050 Series \", 0x5B63],\n [\"Radeon X300/X550/X1050 Series Secondary\", 0x5B73],\n [\"Radeon X300/X550/X1050 Series Secondary \", 0x5B70],\n [\"Radeon X550/X700 Series \", 0x5657],\n [\"Radeon X550/X700 Series Secondary\", 0x5677],\n [\"Radeon X600 Series\", 0x5B62],\n [\"Radeon X600 Series Secondary\", 0x5B72],\n [\"Radeon X600/X550 Series\", 0x3E50],\n [\"Radeon X600/X550 Series Secondary\", 0x3E70],\n [\"Radeon X700\", 0x5E4D],\n [\"Radeon X700 PRO\", 0x5E4B],\n [\"Radeon X700 PRO Secondary\", 0x5E6B],\n [\"Radeon X700 SE\", 0x5E4C],\n [\"Radeon X700 SE Secondary\", 0x5E6C],\n [\"Radeon X700 Secondary\", 0x5E6D],\n [\"Radeon X700 XT\", 0x5E4A],\n [\"Radeon X700 XT Secondary\", 0x5E6A],\n [\"Radeon X700/X550 Series\", 0x5E4F],\n [\"Radeon X700/X550 Series Secondary\", 0x5E6F],\n [1, \"Radeon X800 GT\", 0x554B],\n [1, \"Radeon X800 GT Secondary\", 0x556B],\n [1, \"Radeon X800 GTO\", 0x5549],\n [1, \"Radeon X800 GTO \", 0x554F],\n [1, \"Radeon X800 GTO \", 0x5D4F],\n [1, \"Radeon X800 GTO Secondary\", 0x5569],\n [1, \"Radeon X800 GTO Secondary \", 0x556F],\n [1, \"Radeon X800 GTO Secondary \", 0x5D6F],\n [1, \"Radeon X800 PRO\", 0x4A49],\n [1, \"Radeon X800 PRO Secondary\", 0x4A69],\n [1, \"Radeon X800 SE\", 0x4A4F],\n [1, \"Radeon X800 SE Secondary\", 0x4A6F],\n [1, \"Radeon X800 Series\", 0x4A48],\n [1, \"Radeon X800 Series \", 0x4A4A],\n [1, \"Radeon X800 Series \", 0x4A4C],\n [1, \"Radeon X800 Series \", 0x5548],\n [1, \"Radeon X800 Series Secondary\", 0x4A68],\n [1, \"Radeon X800 Series Secondary \", 0x4A6A],\n [1, \"Radeon X800 Series Secondary \", 0x4A6C],\n [1, \"Radeon X800 Series Secondary \", 0x5568],\n [1, \"Radeon X800 VE\", 0x4A54],\n [1, \"Radeon X800 VE Secondary\", 0x4A74],\n [1, \"Radeon X800 XT\", 0x4A4B],\n [1, \"Radeon X800 XT \", 0x5D57],\n [1, \"Radeon X800 XT Platinum Edition\", 0x4A50],\n [1, \"Radeon X800 XT Platinum Edition \", 0x554A],\n [1, \"Radeon X800 XT Platinum Edition Secondary\", 0x4A70],\n [1, \"Radeon X800 XT Platinum Edition Secondary \", 0x556A],\n [1, \"Radeon X800 XT Secondary\", 0x4A6B],\n [1, \"Radeon X800 XT Secondary \", 0x5D77],\n [1, \"Radeon X850 XT\", 0x5D52],\n [1, \"Radeon X850 XT Platinum Edition\", 0x5D4D],\n [1, \"Radeon X850 XT Platinum Edition Secondary\", 0x5D6D],\n [1, \"Radeon X850 XT Secondary\", 0x5D72],\n ]\n vendorId = 0x1002\n for entry in ati_device_list:\n if len(entry) == 3:\n flag, deviceName, deviceId = entry\n else:\n deviceName, deviceId = entry\n self.devices[(vendorId, deviceId)] = deviceName.strip()\n \n nvidia_device_list = [\n [0x014F, \"GeForce 6200\"],\n [0x00F3, \"GeForce 6200\"],\n [0x0221, \"GeForce 6200\"],\n [0x0163, \"GeForce 6200 LE\"],\n [0x0162, \"GeForce 6200SE TurboCache(TM)\"],\n [0x0161, \"GeForce 6200 TurboCache(TM)\"],\n [0x0162, \"GeForce 6200SE TurboCache(TM)\"],\n [0x0160, \"GeForce 6500\"],\n [1, 0x0141, \"GeForce 6600\"],\n [1, 0x00F2, \"GeForce 6600\"],\n [1, 0x0140, \"GeForce 6600 GT\"],\n [1, 0x00F1, \"GeForce 6600 GT\"],\n [1, 0x0142, \"GeForce 6600 LE\"],\n [1, 0x00F4, \"GeForce 6600 LE\"],\n [1, 0x0143, \"GeForce 6600 VE\"],\n [1, 0x0147, \"GeForce 6700 XL\"],\n [1, 0x0041, \"GeForce 6800\"],\n [1, 0x00C1, \"GeForce 6800\"],\n [1, 0x0047, \"GeForce 6800 GS\"],\n [1, 0x00F6, \"GeForce 6800 GS\"],\n [1, 0x00C0, \"GeForce 6800 GS\"],\n [1, 0x0045, \"GeForce 6800 GT\"],\n [1, 0x00F9, \"GeForce 6800 Series GPU\"],\n [1, 0x00C2, \"GeForce 6800 LE\"],\n [1, 0x0040, \"GeForce 6800 Ultra\"],\n [1, 0x00F9, \"GeForce 6800 Series GPU\"],\n [1, 0x0043, \"GeForce 6800 XE\"],\n [1, 0x0048, \"GeForce 6800 XT\"],\n [1, 0x0218, \"GeForce 6800 XT\"],\n [1, 0x00C3, \"GeForce 6800 XT\"],\n [0x01DF, \"GeForce 7300 GS\"],\n [0x0393, \"GeForce 7300 GT\"],\n [0x01D1, \"GeForce 7300 LE\"],\n [0x01D3, \"GeForce 7300 SE\"],\n [0x01DD, \"GeForce 7500 LE\"],\n [1, 0x0392, \"GeForce 7600 GS\"],\n [1, 0x0392, \"GeForce 7600 GS\"],\n [1, 0x02E1, \"GeForce 7600 GS\"],\n [1, 0x0391, \"GeForce 7600 GT\"],\n [1, 0x0394, \"GeForce 7600 LE\"],\n [1, 0x00F5, \"GeForce 7800 GS\"],\n [1, 0x0092, \"GeForce 7800 GT\"],\n [1, 0x0091, \"GeForce 7800 GTX\"],\n [1, 0x0291, \"GeForce 7900 GT/GTO\"],\n [1, 0x0290, \"GeForce 7900 GTX\"],\n [1, 0x0293, \"GeForce 7900 GX2\"],\n [1, 0x0294, \"GeForce 7950 GX2\"],\n [0x0322, \"GeForce FX 5200\"],\n [0x0321, \"GeForce FX 5200 Ultra\"],\n [0x0323, \"GeForce FX 5200LE\"],\n [0x0326, \"GeForce FX 5500\"],\n [0x0326, \"GeForce FX 5500\"],\n [0x0312, \"GeForce FX 5600\"],\n [0x0311, \"GeForce FX 5600 Ultra\"],\n [0x0314, \"GeForce FX 5600XT\"],\n [0x0342, \"GeForce FX 5700\"],\n [0x0341, \"GeForce FX 5700 Ultra\"],\n [0x0343, \"GeForce FX 5700LE\"],\n [0x0344, \"GeForce FX 5700VE\"],\n [0x0302, \"GeForce FX 5800\"],\n [0x0301, \"GeForce FX 5800 Ultra\"],\n [0x0331, \"GeForce FX 5900\"],\n [0x0330, \"GeForce FX 5900 Ultra\"],\n [0x0333, \"GeForce FX 5950 Ultra\"],\n [0x0324, \"GeForce FX Go5200 64M\"],\n [0x031A, \"GeForce FX Go5600\"],\n [0x0347, \"GeForce FX Go5700\"],\n [0x0167, \"GeForce Go 6200/6400\"],\n [0x0168, \"GeForce Go 6200/6400\"],\n [1, 0x0148, \"GeForce Go 6600\"],\n [1, 0x00c8, \"GeForce Go 6800\"],\n [1, 0x00c9, \"GeForce Go 6800 Ultra\"],\n [1, 0x0098, \"GeForce Go 7800\"],\n [1, 0x0099, \"GeForce Go 7800 GTX\"],\n [1, 0x0298, \"GeForce Go 7900 GS\"],\n [1, 0x0299, \"GeForce Go 7900 GTX\"],\n [0x0185, \"GeForce MX 4000\"],\n [0x00FA, \"GeForce PCX 5750\"],\n [0x00FB, \"GeForce PCX 5900\"],\n [0x0110, \"GeForce2 MX/MX 400\"],\n [0x0111, \"GeForce2 MX200\"],\n [0x0110, \"GeForce2 MX/MX 400\"],\n [0x0200, \"GeForce3\"],\n [0x0201, \"GeForce3 Ti200\"],\n [0x0202, \"GeForce3 Ti500\"],\n [0x0172, \"GeForce4 MX 420\"],\n [0x0171, \"GeForce4 MX 440\"],\n [0x0181, \"GeForce4 MX 440 with AGP8X\"],\n [0x0173, \"GeForce4 MX 440-SE\"],\n [0x0170, \"GeForce4 MX 460\"],\n [0x0253, \"GeForce4 Ti 4200\"],\n [0x0281, \"GeForce4 Ti 4200 with AGP8X\"],\n [0x0251, \"GeForce4 Ti 4400\"],\n [0x0250, \"GeForce4 Ti 4600\"],\n [0x0280, \"GeForce4 Ti 4800\"],\n [0x0282, \"GeForce4 Ti 4800SE\"],\n [0x0203, \"Quadro DCC\"],\n [0x0309, \"Quadro FX 1000\"],\n [0x034E, \"Quadro FX 1100\"],\n [0x00FE, \"Quadro FX 1300\"],\n [0x00CE, \"Quadro FX 1400\"],\n [0x0308, \"Quadro FX 2000\"],\n [0x0338, \"Quadro FX 3000\"],\n [0x00FD, \"Quadro PCI-E Series\"],\n [1, 0x00F8, \"Quadro FX 3400/4400\"],\n [1, 0x00CD, \"Quadro FX 3450/4000 SDI\"],\n [1, 0x004E, \"Quadro FX 4000\"],\n [1, 0x00CD, \"Quadro FX 3450/4000 SDI\"],\n [1, 0x00F8, \"Quadro FX 3400/4400\"],\n [1, 0x009D, \"Quadro FX 4500\"],\n [1, 0x029F, \"Quadro FX 4500 X2\"],\n [0x032B, \"Quadro FX 500/FX 600\"],\n [0x014E, \"Quadro FX 540\"],\n [0x014C, \"Quadro FX 540 MXM\"],\n [0x032B, \"Quadro FX 500/FX 600\"],\n [0X033F, \"Quadro FX 700\"],\n [0x034C, \"Quadro FX Go1000\"],\n [0x00CC, \"Quadro FX Go1400\"],\n [0x031C, \"Quadro FX Go700\"],\n [0x018A, \"Quadro NVS with AGP8X\"],\n [0x032A, \"Quadro NVS 280 PCI\"],\n [0x00FD, \"Quadro PCI-E Series\"],\n [0x0165, \"Quadro NVS 285\"],\n [0x017A, \"Quadro NVS\"],\n [0x018A, \"Quadro NVS with AGP8X\"],\n [0x0113, \"Quadro2 MXR/EX\"],\n [0x017A, \"Quadro NVS\"],\n [0x018B, \"Quadro4 380 XGL\"],\n [0x0178, \"Quadro4 550 XGL\"],\n [0x0188, \"Quadro4 580 XGL\"],\n [0x025B, \"Quadro4 700 XGL\"],\n [0x0259, \"Quadro4 750 XGL\"],\n [0x0258, \"Quadro4 900 XGL\"],\n [0x0288, \"Quadro4 980 XGL\"],\n [0x028C, \"Quadro4 Go700\"],\n [1, 0x0295, \"NVIDIA GeForce 7950 GT\"],\n [0x03D0, \"NVIDIA GeForce 6100 nForce 430\"],\n [0x03D1, \"NVIDIA GeForce 6100 nForce 405\"],\n [0x03D2, \"NVIDIA GeForce 6100 nForce 400\"],\n [0x0241, \"NVIDIA GeForce 6150 LE\"],\n [0x0242, \"NVIDIA GeForce 6100\"],\n [0x0245, \"NVIDIA Quadro NVS 210S / NVIDIA GeForce 6150LE\"],\n [1, 0x029C, \"NVIDIA Quadro FX 5500\"],\n [1, 0x0191, \"NVIDIA GeForce 8800 GTX\"],\n [1, 0x0193, \"NVIDIA GeForce 8800 GTS\"],\n [1, 0x0400, \"NVIDIA GeForce 8600 GTS\"],\n [1, 0x0402, \"NVIDIA GeForce 8600 GT\"],\n [0x0421, \"NVIDIA GeForce 8500 GT\"],\n [0x0422, \"NVIDIA GeForce 8400 GS\"],\n [0x0423, \"NVIDIA GeForce 8300 GS\"],\n ]\n vendorId = 0x10de\n for entry in nvidia_device_list:\n if len(entry) == 3:\n flag, deviceId, deviceName = entry\n else:\n deviceId, deviceName = entry\n self.devices[(vendorId, deviceId)] = deviceName.strip()", "def _data(self, data):\n# \"\"\"Send data to spi bus of display chip, most DC pin need set to HIGH \"\"\"\n# if self._spi == None: raise \"Do not setting SPI\"\n# GPIO.output( self._spi_dc, 1 )\n# self._spi.writebytes( data )\n raise NotImplementedError", "def __str__(self):\n s = \"Filename : %s\\n\" % self.fname\n s += \"Data size : %d x %d x %d\\n\" % (self._size[::-1])\n s += \"CCD Chip Size : %d x %d\\n\" % self._chipSize[::-1]\n s += \"File date : %s\\n\" % time.asctime(self._filedate)\n s += \"Exposure Time : %f\\n\" % self.Exposure\n s += \"Num ROI : %d\\n\" % self.NumROI\n s += \"Num ROI Exp : %d\\n\" % self.NumROIExperiment\n s += \"Contoller Ver.: %d\\n\" % self.ControllerVersion\n s += \"Logic Output : %d\\n\" % self.LogicOutput\n #self.AppHiCapLowNoise = self._readInt(4)\n s += \"Timing Mode : %d\\n\" % self.TimingMode\n s += \"Det. Temp : %d\\n\" % self.DetTemperature\n s += \"Det. Type : %d\\n\" % self.DetectorType\n s += \"Trigger Diode : %d\\n\" % self.TriggerDiode\n s += \"Delay Time : %d\\n\" % self.DelayTime\n s += \"Shutter Cont. : %d\\n\" % self.ShutterControl\n s += \"Absorb Live : %d\\n\" % self.AbsorbLive\n s += \"Absorb Mode : %d\\n\" % self.AbsorbMode\n s += \"Virtual Chip : %d\\n\" % self.CanDoVirtualChip\n s += \"Thresh. Min L : %d\\n\" % self.ThresholdMinLive\n s += \"Thresh. Min : %d\\n\" % self.ThresholdMin\n s += \"Thresh. Max L : %d\\n\" % self.ThresholdMaxLive\n s += \"Thresh. Max : %d\\n\" % self.ThresholdMax\n s += \"Geometric Op : %d\\n\" % self.GeometricOps\n s += \"ADC Offset : %d\\n\" % self.ADCOffset\n s += \"ADC Rate : %d\\n\" % self.ADCRate\n s += \"ADC Type : %d\\n\" % self.ADCType\n s += \"ADC Resol. : %d\\n\" % self.ADCRes\n s += \"ADC Bit. Adj. : %d\\n\" % self.ADCBitAdj\n s += \"ADC Gain : %d\\n\" % self.Gain\n \n i = 0\n for roi in self.allROI:\n s += \"ROI %-4d : %-5d %-5d %-5d %-5d %-5d %-5d\\n\" % (i,roi[0], roi[1], roi[2],\n roi[3], roi[4], roi[5])\n i += 1\n \n s += \"\\nComments :\\n\"\n i = 0\n for c in self._comments:\n s += \"%-3d : \" % i\n i += 1\n s += c\n s += \"\\n\"\n return s", "def __init__(self, hdw=['Soundcard'], devicename='dev1'):\n self.debugFlag = False\n self.task = None # NI Task\n self.required_hardware = hdw # Require specific hardware \n self.hardware = [] # list of hardware actually found on this system\n self.find_hardware(device_info={'devicename': devicename}) # population the self.hardware list", "def degibber(self):", "def update_firmware(self) -> str:", "def identifyChip(chipType):\n with open('../illumina_files/illumina_dict.pickle', \"rb\") as f:\n chipDict = pickle.load(f)\n\n values = chipDict[chipType]\n\n print('BPM: ' + values[0] + '\\n')\n print('EGT: ' + values[1] + '\\n')\n print('CSV: ' + values[2] + '\\n')\n\n return values[0], values[1], values[2]", "def info() -> None:", "def __init__(self, address=0x76):\n self.address = address\n self.bus = self._initialize_bus()\n\n self.chip_id, self.chip_version = self._get_info_about_sensor()", "async def identify(self):\n await self.send({\n \"op\": 2,\n \"d\" : {\n \"token\" : self.client.token,\n \"properties\": {\n \"$os\" : platform,\n \"$browser\": \"SpeedCord\",\n \"$device\" : \"SpeedCord\"\n },\n \"intents\" : self.client.intents,\n \"shard\" : (self.id, self.client.shard_count)\n }\n })", "def bootloader() -> NoReturn:", "def __set_chips(self):\n\n # Scan filesystem\n root_files = [root_file for root_file in os.walk(self.dataset_path)]\n\n # Decode truth.txt file\n truth_files = [os.path.join(walked[0], 'truth.txt') for walked in root_files if 'truth.txt' in walked[2]]\n if len(truth_files) == 0:\n raise IOError(\"No truth file found.\")\n elif len(truth_files) > 1:\n raise IOError(\"Too many truth files available.\")\n\n truth_data = self.__decode_truth_file(truth_files.pop())\n if len(truth_data) < 1:\n raise IOError(\"No truth loaded\")\n if self.__debug:\n print(\"{} truth records loaded.\".format(len(truth_data)))\n\n # Index all image chips\n file_paths = [[os.path.join(walked[0], wfile) for wfile in walked[2]] for walked in root_files]\n chip_idx = dict(filter(lambda t: t is not None, map(self.__index_chip, itertools.chain(*file_paths))))\n\n if len(chip_idx) != len(truth_data):\n raise IOError(\"Number of truth records not equal to number of chips.\")\n if self.__debug:\n print(\"{} image chips loaded.\".format(len(chip_idx)))\n\n # Create and store chips\n self.chips = {meta['file']: self.__create_chip(meta, truth_data[idx]) for idx, meta in chip_idx.items()}\n if self.__debug:\n print(\"{} chip.Chips loaded.\".format(len(self.chips)))", "def get_info(self):\n return \"TODO !\"", "def getInfo():", "def _default_setup(self):\n self._n_configs = 1\n self._sn_size = 100\n self._nt = 10000\n self._active_brdch = np.zeros(\n (), dtype=[(\"SIS 3302\", bool, (4, 8)), (\"SIS 3305\", bool, (2, 8))]\n )\n self._active_brdch[\"SIS 3302\"][0][0] = True\n self._active_brdch[\"SIS 3305\"][0][0] = True\n self._config_names = []\n self._active_config = (\"config01\",)\n self._sis3305_mode = 0", "def do_Device (self, line):", "def updateInterface(self):\n p = self.cxn[self.selectedADR].packet()\n p.magnetv().pscurrent().psvoltage()\n p.time()\n p.temperatures()\n p.get_state_var('CompressorStatus')\n p.get_instrument_state()\n state = yield p.send()\n # change instrument statuses\n for name,status in state['get_instrument_state']:\n if status[0] == False: color = 'red3'\n elif status[1] == False: color = 'orange3'\n elif status[1] == True: color = 'green3'\n else: color = 'gray70'\n self.instrumentStatuses[name].config(bg=color)\n # change compressor button\n if state['get_state_var'] == True:\n self.compressorButton.configure(text='Stop Compressor',\n command=self.stopCompressor,\n state=Tkinter.NORMAL)\n elif state['get_state_var'] == False:\n self.compressorButton.configure(text='Start Compressor',\n command=self.startCompressor,\n state=Tkinter.NORMAL)\n else: self.compressorButton.configure(state=Tkinter.DISABLED)\n # update current, voltage fields\n temps = {}\n stages = ('T_60K','T_3K','T_GGG','T_FAA')\n for i in range(len(stages)):\n temps[stages[i]] = state['temperatures'][i]\n #if temps[stages[i]] == 'nan': temps[stages[i]] = numpy.nan\n if numpy.isnan(state['magnetv']['V']):\n emf = 'ERR'\n else:\n emf = \"{0:.3f}\".format(state['magnetv']['V'])\n if numpy.isnan(state['pscurrent']['A']):\n psI = 'PS OFF'\n else:\n psI = \"{0:.3f}\".format(state['pscurrent']['A'])\n if numpy.isnan(state['psvoltage']['V']):\n psV = 'PS OFF'\n else:\n psV = \"{0:.3f}\".format(state['psvoltage']['V'])\n self.currentBackEMF.set( emf )\n self.currentI.set( psI )\n self.currentV.set( psV )\n # update plot:\n # change data to plot\n self.stage60K.set_xdata(numpy.append(self.stage60K.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stage60K.set_ydata(numpy.append(self.stage60K.get_ydata(),temps['T_60K']['K']))\n self.stage03K.set_xdata(numpy.append(self.stage03K.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stage03K.set_ydata(numpy.append(self.stage03K.get_ydata(),temps['T_3K']['K']))\n self.stageGGG.set_xdata(numpy.append(self.stageGGG.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stageGGG.set_ydata(numpy.append(self.stageGGG.get_ydata(),temps['T_GGG']['K']))\n self.stageFAA.set_xdata(numpy.append(self.stageFAA.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stageFAA.set_ydata(numpy.append(self.stageFAA.get_ydata(),temps['T_FAA']['K']))\n #update plot\n self.updatePlot()\n # update legend\n labelOrder = ['T_60K','T_3K','T_GGG','T_FAA']\n lines = [self.stage60K,self.stage03K,self.stageGGG,self.stageFAA]\n labels = [l.strip('T_')+' ['+\"{0:.3f}\".format(temps[l]['K'])+'K]' for l in labelOrder]\n labels = [s.replace('1.#QOK','OoR') for s in labels]\n # legend on top (if not using this, delete \\n in title)\n self.ax.legend(lines,labels,bbox_to_anchor=(0., 1.02, 1., .102), loc=3,\n ncol=4, mode=\"expand\", borderaxespad=0.)", "def _initialize_data(self):\n self.reset_count = 0\n self._idn_no_firmware = \"KEPCO,BOP 50-20,E1234,\"\n self._firmware = 2.6\n self._init_data()", "def on(self):", "def cx():", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_hardware_sound_systems'] = True", "def __init__(self, sensor, temperature_resolution, humidity_resolution):\n self.sensor = sensor\n self.sensor.turnHeaterOn() \n time.sleep(1.0) # Burn off condensed stuff.\n self.sensor.turnHeaterOff() \n self.update()\n # Main Program\n #print \"------------\"\n #print \"Manfacturer ID=0x%X\"% self.sensor.readManufacturerID() \n #print \"Device ID=0x%X\"% self.sensor.readDeviceID() \n #print \"Serial Number ID=0x%X\"% self.sensor.readSerialNumber() \n \n # change temperature resolution\n self.sensor.setTemperatureResolution(temperature_resolution)\n self.sensor.setHumidityResolution(humidity_resolution)", "def setup(self):", "def setup(self):", "def setup(self):", "def setup(self):", "def init_IR_codes():\n IR_codes.update( {b'FF629D' : say_temp} ) # Say temperature status\n IR_codes.update( {b'84FF9375' : say_temp} ) # Say temperature status\n #IR_codes.update( {b'FFA857' : volume_inc} ) # increase volume\n #IR_codes.update( {b'FFE01F' : volume_dec} ) # reduce volume\n IR_codes.update( {b'FF906F' : toSecureMode} ) # Will be noBodyHome\n IR_codes.update( {b'FFC23D' : ultra.switch} ) # On/off radio\n IR_codes.update( {b'BF09C35C' : ultra.switch} ) # On/off radio (big)\n #IR_codes.update( {b'8BE68656' : holeNightLightAuto} )\n #IR_codes.update( {b'B21F28AE' : hole_night_light.setManualStateOff} )\n #IR_codes.update( {b'A6B1096A' : hole_night_light.setManualStateOn} )\n IR_codes.update( {b'24014B0' : noolite_hole_set_off} )\n IR_codes.update( {b'8FC212DB' : noolite_hole_set_on} )\n IR_codes.update( {b'7960556F' : noolite_hole_set_auto} )\n #IR_codes.update( {b'FF10EF' : holeNightLightAuto} )\n #IR_codes.update( {b'FF38C7' : hole_night_light.setManualStateOff} )\n #IR_codes.update( {b'FF5AA5' : hole_night_light.setManualStateOn} )\n IR_codes.update( {b'FF30CF' : noolite_hole_set_off} )\n IR_codes.update( {b'FF18E7' : noolite_hole_set_on} )\n IR_codes.update( {b'FF7A85' : noolite_hole_set_auto} )", "def __init__(self):\n i2c.Pn532_i2c.__init__(self)\n self._uid = False", "def __init__(self, device):\n self.device = device\n self.io = serial.Serial(device, 57600, timeout=1)\n self.keys = ['time', 'centroid_x', 'centroid_y', 'centroid_r',\n 'level_1', 'level_2', 'level_3',\n 'width_1', 'width_2', 'width_3',\n 'height_1', 'height_2', 'height_3',\n 'power']", "def get_coulomb_info(self):\n return", "def about( cls, ):\n url = r\"http://www.opencircuits.com/Python_Smart_Terminal\"\n __, mem_msg = cls.show_process_memory( )\n msg = ( f\"{cls.controller.app_name} version:{cls.controller.version} \\nmode: {cls.parameters.mode}\"\n f\"\\n by Russ Hensel\"\n f\"\\nMemory in use {mem_msg} \\nCheck <Help> or \\n{url} \\nfor more info.\" )\n messagebox.showinfo( \"About\", msg )", "def healthcare():", "def test_card_info_lookup(self):\n pass", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_rgb_dmd'] = True", "def dicom_cli():", "def state_information(self) -> Dict[str, Any]:\n raise NotImplementedError(\"Device subclass needs to implement this.\")", "def __init__(self, starting_point=-1):\n self.i_read = starting_point\n self.data = [['fake_chip_id', 'fake_version'],\n [96, 110, 203, 104, 50, 0, 29, 145, 59, 215, 208, 11,\n 232, 38, 42, 255, 249, 255, 172, 38, 10, 216, 189, 16],\n [75],\n [129, 1, 0, 16, 44, 3, 30],\n [76, 60, 128, 129, 49, 128, 94, 120]]", "def sth():", "def calc_misc(self, model):\n\n part_family = model.part_family.lower()\n\n # Legacy Demod Registers\n # FIXME: calculate these\n\n self._reg_write(model.vars.MODEM_CTRL2_BRDIVA, 0)\n self._reg_write(model.vars.MODEM_CTRL2_BRDIVB, 0)\n self._reg_write(model.vars.MODEM_CTRL2_DEVMULA, 0)\n self._reg_write(model.vars.MODEM_CTRL2_DEVMULB, 0)\n self._reg_write(model.vars.MODEM_CTRL2_RATESELMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL2_RXFRCDIS, 0)\n self._reg_write(model.vars.MODEM_CTRL2_SQITHRESH, 0)\n self._reg_write(model.vars.MODEM_CTRL2_TXPINMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL4_ADCSATDENS, 0)\n self._reg_write(model.vars.MODEM_CTRL4_ADCSATLEVEL, 6)\n self._reg_write(model.vars.MODEM_CTRL4_OFFSETPHASESCALING, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PHASECLICKFILT, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTAVG, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTDEB, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTGAIN, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTRST, 0)\n self._reg_write(model.vars.MODEM_CTRL4_SOFTDSSSMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL5_BBSS, 0)\n self._reg_write(model.vars.MODEM_CTRL5_DEMODRAWDATASEL2, 0)\n self._reg_write(model.vars.MODEM_CTRL5_DETDEL, 0)\n self._reg_write(model.vars.MODEM_CTRL5_POEPER, 0)\n self._reg_write(model.vars.MODEM_CTRL5_RESYNCLIMIT, 0)\n self._reg_write(model.vars.MODEM_CTRL6_CODINGB, 0)\n self._reg_write(model.vars.MODEM_CTRL6_CPLXCORREN, 0)\n self._reg_write(model.vars.MODEM_CTRL6_DEMODRESTARTALL, 0)\n self._reg_write(model.vars.MODEM_CTRL6_DSSS3SYMBOLSYNCEN, 0)\n self._reg_write(model.vars.MODEM_CTRL6_PREBASES, 0)\n self._reg_write(model.vars.MODEM_CTRL6_RXRESTARTUPONRSSI, 0)\n self._reg_write(model.vars.MODEM_CTRL6_RXRESTARTUPONSHORTRSSI, 0)\n self._reg_write(model.vars.MODEM_CTRL6_TXDBPSKINV, 0)\n self._reg_write(model.vars.MODEM_CTRL6_TXDBPSKRAMPEN, 0)\n self._reg_write(model.vars.MODEM_ANARAMPCTRL_VMIDCTRL, 1)\n self._reg_write(model.vars.MODEM_ANARAMPCTRL_MUTEDLY, 0)\n self._reg_write(model.vars.MODEM_ETSCTRL_CAPTRIG, 0)\n self._reg_write(model.vars.MODEM_ETSCTRL_ETSLOC, 0)\n self._reg_write(model.vars.MODEM_ETSTIM_ETSCOUNTEREN, 0)\n self._reg_write(model.vars.MODEM_ETSTIM_ETSTIMVAL, 0)\n\n self._reg_write(model.vars.MODEM_OOKSHAPING_OOKSHAPINGEN, 0)\n self._reg_write(model.vars.MODEM_OOKSHAPING_OOKSHAPINGLUTSIZE, 0)\n self._reg_write(model.vars.MODEM_OOKSHAPING_OOKSHAPINGSTEP, 0)\n self._reg_write(model.vars.MODEM_PRE_DSSSPRE, 0)\n self._reg_write(model.vars.MODEM_PRE_PRESYMB4FSK, 0)\n self._reg_write(model.vars.MODEM_PRE_SYNCSYMB4FSK, 0)\n self._reg_write(model.vars.MODEM_TIMING_FASTRESYNC, 0)\n self._reg_write(model.vars.MODEM_TIMING_TIMSEQINVEN, 0)\n self._reg_write(model.vars.MODEM_TIMING_TIMSEQSYNC, 0)\n self._reg_write(model.vars.MODEM_TIMING_TSAGCDEL, 0)\n\n # Long Range registers\n # FIXME: calculate these\n self._reg_write(model.vars.MODEM_LONGRANGE1_LOGICBASEDLRDEMODGATE, 0)\n self._reg_write(model.vars.MODEM_LONGRANGE1_LOGICBASEDPUGATE, 0)\n self._reg_write(model.vars.MODEM_LONGRANGE1_LRSPIKETHADD, 0)\n self._reg_write(model.vars.MODEM_LONGRANGE1_LRSS, 0)\n self._reg_write(model.vars.MODEM_LRFRC_CI500, 1)\n self._reg_write(model.vars.MODEM_LRFRC_FRCACKTIMETHD, 0)\n self._reg_write(model.vars.MODEM_LRFRC_LRCORRMODE, 1)\n\n # DSA registers\n # FIXME: what do we need to calculate here?\n self._reg_write(model.vars.MODEM_DSACTRL_AGCBAUDEN, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_AMPJUPTHD, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_ARRTOLERTHD0, 2)\n self._reg_write(model.vars.MODEM_DSACTRL_ARRTOLERTHD1, 4)\n self._reg_write(model.vars.MODEM_DSACTRL_DSARSTON, 1)\n self._reg_write(model.vars.MODEM_DSACTRL_FREQAVGSYM, 1)\n self._reg_write(model.vars.MODEM_DSACTRL_GAINREDUCDLY, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_LOWDUTY, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_RESTORE, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_SCHPRD, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_TRANRSTDSA, 0)\n self._reg_write(model.vars.MODEM_DSATHD0_FDEVMAXTHD, 0x78)\n self._reg_write(model.vars.MODEM_DSATHD0_FDEVMINTHD, 12)\n self._reg_write(model.vars.MODEM_DSATHD0_SPIKETHD, 0x64)\n self._reg_write(model.vars.MODEM_DSATHD0_UNMODTHD, 4)\n self._reg_write(model.vars.MODEM_DSATHD1_AMPFLTBYP, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_DSARSTCNT, 2)\n self._reg_write(model.vars.MODEM_DSATHD1_FREQLATDLY, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_FREQSCALE, 0)\n self._reg_write(model.vars.MODEM_DSATHD1_POWABSTHD, 0x1388)\n self._reg_write(model.vars.MODEM_DSATHD1_POWRELTHD, 0)\n self._reg_write(model.vars.MODEM_DSATHD1_PWRDETDIS, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_PWRFLTBYP, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_RSSIJMPTHD, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_FDADJTHD, 1)\n self._reg_write(model.vars.MODEM_DSATHD2_FREQESTTHD, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_INTERFERDET, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_JUMPDETEN, 1)\n self._reg_write(model.vars.MODEM_DSATHD2_PMDETFORCE, 0)\n self._reg_write(model.vars.MODEM_DSATHD2_PMDETPASSTHD, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_POWABSTHDLOG, 100)\n self._reg_write(model.vars.MODEM_DSATHD3_FDEVMAXTHDLO, 120)\n self._reg_write(model.vars.MODEM_DSATHD3_FDEVMINTHDLO, 12)\n self._reg_write(model.vars.MODEM_DSATHD3_SPIKETHDLO, 100)\n self._reg_write(model.vars.MODEM_DSATHD3_UNMODTHDLO, 4)\n self._reg_write(model.vars.MODEM_DSATHD4_ARRTOLERTHD0LO, 2)\n self._reg_write(model.vars.MODEM_DSATHD4_ARRTOLERTHD1LO, 4)\n self._reg_write(model.vars.MODEM_DSATHD4_POWABSTHDLO, 5000)\n self._reg_write(model.vars.MODEM_DSATHD4_SWTHD, 0)\n\n # FIXME: check with Subrata on how to calculate these\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_ANTWAIT, 20)\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_SKIP2ANT, 1)\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_SKIPCORRTHD, 100)\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_SKIPRSSITHD, 0)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR0,1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR1, 1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR2, 1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR3, 1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSIANDDIVTHD, 20)\n self._reg_write(model.vars.MODEM_PHANTDECSION_CORRANDDIVTHD, 100)\n\n # FIXME: figure out how these AGC registers need to be calculated\n self._reg_write(model.vars.AGC_RSSISTEPTHR_DEMODRESTARTPER, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_DEMODRESTARTTHR, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_NEGSTEPTHR, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_POSSTEPTHR, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_STEPPER, 0)\n\n # Antenna Diversity Registers\n # FIXME: check with Amey if we need to calculate these\n self._reg_write(model.vars.MODEM_ANTDIVCTRL_ADPRETHRESH, 0)\n self._reg_write(model.vars.MODEM_ANTDIVCTRL_ENADPRETHRESH, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL1_TIMEPERIOD, 436906)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTCOUNT, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTDFLTSEL, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWENABLE, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWTYPE, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_CFGANTPATTEN, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_EXTDSTOPPULSECNT, 30)\n self._reg_write(model.vars.MODEM_ANTSWEND_ANTSWENDTIM, 0)\n self._reg_write(model.vars.MODEM_ANTSWSTART_ANTSWSTARTTIM, 0)\n self._reg_write(model.vars.MODEM_CFGANTPATT_CFGANTPATTVAL, 0)\n #self._reg_write(model.vars.MODEM_CTRL3_ANTDIVMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL3_ANTDIVREPEATDIS, 0)\n\n # Coherent Demod Registers\n #FIXME: Check with Yan/Per on how to calculate these\n self._reg_write(model.vars.MODEM_COH2_DSAPEAKCHPWRTH, 0)\n self._reg_write(model.vars.MODEM_COH3_COHDSADETDIS, 0)\n self._reg_write(model.vars.MODEM_COH3_DSAPEAKCHPWREN, 0)\n self._reg_write(model.vars.MODEM_COH3_LOGICBASEDCOHDEMODGATE, 0)\n self._reg_write(model.vars.MODEM_COH3_ONEPEAKQUALEN, 0)\n self._reg_write(model.vars.MODEM_COH3_PEAKCHKTIMOUT, 0)\n\n # Clock-gating register\n self._reg_write(model.vars.MODEM_AUTOCG_AUTOCGEN, 0) #We calculate MODEM_CGCLKSTOP_FORCEOFF in calculator instead\n self._reg_write(model.vars.FRC_AUTOCG_AUTOCGEN, 7)\n\n # Shaping filter coefficients\n #FIXME: check with Yan on how to calculate these\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF40, 0)\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF41, 0)\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF42, 0)\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF43, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF44, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF45, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF46, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF47, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF48, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF49, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF50, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF51, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF52, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF53, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF54, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF55, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF56, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF57, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF58, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF59, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF60, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF61, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF62, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF63, 0)\n self._reg_write(model.vars.MODEM_SHAPING2_COEFF10, 0)\n self._reg_write(model.vars.MODEM_SHAPING2_COEFF11, 0)\n self._reg_write(model.vars.MODEM_SHAPING2_COEFF9, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF12, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF13, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF14, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF15, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF16, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF17, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF18, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF19, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF20, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF21, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF22, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF23, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF24, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF25, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF26, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF27, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF28, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF29, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF30, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF31, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF32, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF33, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF34, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF35, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF36, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF37, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF38, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF39, 0)\n\n # Modem Registers with fixed value\n self._reg_write(model.vars.MODEM_AFC_AFCTXMODE, 0)\n# self._reg_write(model.vars.MODEM_AFC_AFCGEAR, 3)\n self._reg_write(model.vars.MODEM_CTRL0_DEMODRAWDATASEL, 0)\n self._reg_write(model.vars.MODEM_CTRL2_DMASEL, 0)\n self._reg_write(model.vars.MODEM_CTRL3_PRSDINEN, 0)\n self._reg_write(model.vars.MODEM_CTRL4_CLKUNDIVREQ, 0)\n self._reg_write(model.vars.MODEM_CTRL3_RAMTESTEN, 0)\n self._reg_write(model.vars.MODEM_DIRECTMODE_CLKWIDTH, 1)\n self._reg_write(model.vars.MODEM_DIRECTMODE_DMENABLE, 0)\n self._reg_write(model.vars.MODEM_DIRECTMODE_SYNCASYNC, 0)\n self._reg_write(model.vars.MODEM_DIRECTMODE_SYNCPREAM, 3)\n self._reg_write(model.vars.MODEM_PADEBUG_ENMANPACLKAMPCTRL, 0)\n self._reg_write(model.vars.MODEM_PADEBUG_ENMANPAPOWER, 0)\n self._reg_write(model.vars.MODEM_PADEBUG_ENMANPASELSLICE, 0)\n self._reg_write(model.vars.MODEM_PADEBUG_MANPACLKAMPCTRL, 0)\n self._reg_write(model.vars.MODEM_CTRL0_OOKASYNCPIN, 0)\n self._reg_write(model.vars.MODEM_CTRL0_DETDIS, 0)\n self._reg_write(model.vars.MODEM_CTRL0_DUALCORROPTDIS, 0)\n self._reg_write(model.vars.MODEM_CTRL0_FRAMEDETDEL, 0)\n self._reg_write(model.vars.MODEM_CTRL1_SYNC1INV, 0)\n\n # FRC Registers with fixed value\n self._reg_write(model.vars.FRC_BOICTRL_BOIBITPOS, 0)\n self._reg_write(model.vars.FRC_BOICTRL_BOIEN, 0)\n self._reg_write(model.vars.FRC_BOICTRL_BOIFIELDLOC, 0)\n self._reg_write(model.vars.FRC_BOICTRL_BOIMATCHVAL, 0)\n self._reg_write(model.vars.FRC_CTRL_LPMODEDIS, 0)\n self._reg_write(model.vars.FRC_CTRL_RATESELECT, 0)\n self._reg_write(model.vars.FRC_CTRL_WAITEOFEN, 0)\n self._reg_write(model.vars.FRC_DFLCTRL_DFLBOIOFFSET, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLBITORDER, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLBITS, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLMINLENGTH, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLMODE, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLOFFSET, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLSHIFT, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_RXSUPRECEPMODE, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_STORESUP, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_SUPSHFFACTOR, 0)\n self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TRAILTXREPLEN, 0)\n self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TXSUPPLENOVERIDE, 0)\n self._reg_write(model.vars.FRC_WCNTCMP3_SUPPLENFIELDLOC, 0)\n self._reg_write(model.vars.FRC_WCNTCMP4_SUPPLENGTH, 0)\n\n # Added new reg-fields related to 15.4 subG OQPSK phys\n self._reg_write(model.vars.MODEM_COH3_COHDSACMPLX, 0)\n self._reg_write(model.vars.MODEM_SYNCPROPERTIES_STATICSYNCTHRESH, 0)\n\n # Added new reg-fields related to Internal Long Range\n self._reg_write(model.vars.MODEM_PRE_PREWNDERRORS, 0)\n self._reg_write(model.vars.MODEM_CTRL3_TIMINGBASESGAIN, 0)\n\n #AGC default settings\n self._reg_write(model.vars.AGC_CTRL0_CFLOOPNFADJ, 0)\n self._reg_write(model.vars.AGC_CTRL6_DUALRFPKDDEC, 240296)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD1_RFPKDHITHD0, 1)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD1_RFPKDHITHD1, 40)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD0_RFPKDLOWTHD0, 1)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD0_RFPKDLOWTHD1, 10)\n self._reg_write(model.vars.AGC_CTRL6_ENDUALRFPKD, 1)\n\n self._reg_write(model.vars.MODEM_SQ_SQEN , 0)\n self._reg_write(model.vars.MODEM_SQ_SQTIMOUT , 0)\n self._reg_write(model.vars.MODEM_SQEXT_SQSTG2TIMOUT , 0)\n self._reg_write(model.vars.MODEM_SQEXT_SQSTG3TIMOUT , 0)\n\n # reg-fields to modify sync detection reset behavior PGOCELOT-5282\n self._reg_write(model.vars.MODEM_FRMSCHTIME_PMRSTSYCNEN, 0)\n self._reg_write(model.vars.MODEM_FRMSCHTIME_DSARSTSYCNEN, 0)\n\n #RAC settings\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVN, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVP, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENREG3, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENBYPASS40MHZ, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTREG3ADJV, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN1_CLKMULTDRVAMPSEL, part_family)\n self._reg_write_default(model.vars.RAC_IFADCTRIM0_IFADCSIDETONEAMP, part_family)", "def _command(self, commands):\n# \"\"\"Send command to spi bus of display chip, most DC pin need set to LOW \"\"\"\n# if self._spi == None: raise \"Do not setting SPI\"\n# GPIO.output( self._spi_dc, 0 )\n# self._spi.writebytes( commands )\n raise NotImplementedError", "def setup(instname):\n global reducer, inst_name,van_mass,bleed_switch,rate,pixels\n # debugging (allows to reload changed DirectEnergyConversion package from Mantid)\n\n if instname=='MAR' or instname=='mar':\n print 'setup mari'\n inst_name='MAR'\n reducer = DRC.setup_reducer('MARI')\n bleed_switch=False\n rate=0.0\n pixels=0\n elif instname=='MER' or instname=='mer':\n print 'setup merlin'\n inst_name='MER'\n reducer = DRC.setup_reducer('MERLIN')\n bleed_switch=True\n rate=0.01\n pixels=80\n elif instname=='MAP' or instname=='map':\n print 'setup maps'\n inst_name='MAP'\n reducer = DRC.setup_reducer('MAPS')\n bleed_switch=False\n rate=0.0\n pixels=0.0\n elif instname=='LET' or instname=='let':\n print 'setup let'\n inst_name='LET'\n reducer = DRC.setup_reducer('LET')\n bleed_switch=True\n rate=0.01\n pixels=80\n elif instname=='ARCS' or instname=='arcs':\n print 'setup Arcs'\n inst_name='ARC'\n reducer = DRC.setup_reducer('ARCS')\n bleed_switch=False\n rate=0.01\n pixels=80\n elif instname=='SEQ' or instname=='seq':\n print 'setup Sequoia'\n inst_name='SEQ'\n reducer = DRC.setup_reducer('SEQUOIA')\n bleed_switch=False\n rate=0.01\n pixels=80\n elif instname=='CNCS' or instname=='cncs':\n print 'setup cncs'\n inst_name='SEQ'\n reducer = DRC.setup_reducer('CNCS')\n bleed_switch=False\n rate=0.01\n pixels=80\n elif instname=='HYSPEC' or instname=='hyspec':\n print 'setup hyspec'\n inst_name='SEQ'\n reducer = DRC.setup_reducer('HYSPEC')\n bleed_switch=False\n rate=0.01\n pixels=80\n else:\n print 'Instrument name not defined'\n return \n van_mass=reducer.get_default_parameter('vanadium-mass')", "def setup(self):\n header_print(self.data['intro'])\n header_print(self.data['help'])\n random.shuffle(self.data['draw'])\n random.shuffle(self.data['locations'])\n random.shuffle(self.data['events'])\n random.shuffle(self.data['aces'])\n random.shuffle(self.data['personalities'])\n self.stats = {\n 'round': 0,\n 'powers': {\n 'MOONS': 6,\n 'SUNS': 6,\n 'WAVES': 6,\n 'LEAVES': 6,\n 'WYRMS': 6,\n 'KNOTS': 6,\n },\n 'hand': self.data['draw'][:],\n 'discard': [],\n 'active': [],\n 'opponent': {},\n }", "def basic(self):\n pass", "def info(self) -> dict:", "def fetch_stick(self):\r\n print(\"There you go, sir!\\n\")", "def init_devices(self):\n self.hp_nb = int(self.rs_nb* self.hp_proportion/(1- self.hp_proportion))\n self.defense_cost = self.hp_nb * self.hp_unit_cost\n rs_devices = [True for i in range(self.rs_nb)] #rs --> True\n hp_devices = [False for i in range(self.hp_nb)] #hp --> False\n self.devices = rs_devices + hp_devices\n shuffle(self.devices)", "def load_device():", "def __init__(self, dev):\n self.dev = dev\n self.dev.cla = 0x80", "def take_control(self):\n pass", "def init(self):\n self.reset()\n\n self.__interface.send_command('POWER_SETTING')\n self.__interface.send_data(0x37)\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('PANEL_SETTING')\n self.__interface.send_data(0xCF)\n self.__interface.send_data(0x08)\n\n self.__interface.send_command('BOOSTER_SOFT_START')\n self.__interface.send_data(0xc7)\n self.__interface.send_data(0xcc)\n self.__interface.send_data(0x28)\n\n self.__interface.send_command('POWER_ON')\n self.wait_until_idle()\n\n self.__interface.send_command('PLL_CONTROL')\n self.__interface.send_data(0x3c)\n\n self.__interface.send_command('TEMPERATURE_CALIBRATION')\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('VCOM_AND_DATA_INTERVAL_SETTING')\n self.__interface.send_data(0x77)\n\n self.__interface.send_command('TCON_SETTING')\n self.__interface.send_data(0x22)\n\n self.__interface.send_command('TCON_RESOLUTION')\n self.__interface.send_data(0x02) #source 640\n self.__interface.send_data(0x80)\n self.__interface.send_data(0x01) #gate 384\n self.__interface.send_data(0x80)\n\n self.__interface.send_command('VCM_DC_SETTING')\n self.__interface.send_data(0x1E) #decide by LUT file\n\n self.__interface.send_command(0xe5, False) #FLASH MODE\n self.__interface.send_data(0x03)", "def info(self):\n\n print(\"pixellisation:\", self.pixel)\n print(\"number of components:\", self.ncomp)\n print(\"number of pixels:\", self.data.shape[:] if self.ncomp == 1 else self.data.shape[1:])\n print(\"nside:\", self.nside)\n print(\"geometry:\", self.geometry)\n print(\"coordinates:\", self.coordinate)", "def info():\n print(\"Made using the OOP RPG game creator (c) Claire.\\n\")", "def protocolInfoLaser(self, fh, inputs, derivative):\n #global summary\n try:\n nspikes = len(inputs)\n self.devicemode = 'Laser'\n #print inputs\n # print 'FH parent info: ', fh.parent().info()\n print('1')\n reps = fh.parent().info()['protocol']['conf']['repetitions'] # fh.info()[('protocol', 'repetitions')]\n print('2')\n print(list(fh.info().keys()))\n print(fh.info())\n try:\n pulseDurIndex = fh.info()['Laser-Blue', 'Shutter.duration']\n except:\n try:\n pulseDurIndex = fh.info()['Laser-UV', 'Shutter.duration']\n except:\n raise ValueError(\" No key for Laser-Blue or Laser-UV in data set\")\n # fh.info()[('Laser-Blue', 'Command.PulseTrain_length')]\n # print 'pulsedurindex: ', pulseDurIndex\n fn = fh.shortName()\n # find date string in the path, and return path to current data set\n # allows us to identify the data set by date, slice, cell, protocol, etc.\n dm = re.compile(r'(\\d{4,4})\\.(\\d{2,2})\\.(\\d{2,2})*')\n dsearch = dm.search(fh.name())\n expname = fh.name()[dsearch.start():] # pull full path for experiment here, but leave out everything above the date\n print('3')\n pulseDur = fh.parent().info()['sequenceParams'][('Laser-Blue','Shutter.duration')] # [pulseDurIndex]\n print('4')\n pulseDur = pulseDur[pulseDurIndex]\n print('5')\n pulseTrainCommandShutter = fh.parent().info()['devices']['Laser-Blue']['channels']['Shutter']\n print('6')\n pulseTrainFcn = pulseTrainCommandShutter['waveGeneratorWidget']['function']\n r = re.compile('(?P<type>pulse)\\((?P<delay>\\d+),\\s(?P<param>\\w+),\\s(?P<value>\\d+)\\)')\n s = r.match(pulseTrainFcn)\n print('6.5')\n startTime = float(s.group('delay'))*1e-3 # pulseTrainFcn['start']['value'] # retrieve start time\n print('7')\n rep = 0 # fh.info()[('protocol', 'repetitions')]\n ipi = 1 # pulseTrainInfo['interpulse_length']['value'] # retrieve interpulse interval\n npulses = 1 # pulseTrainInfo['pulse_number']['value'] # retrieve number of pulses in train\n spikeTimes = [t['time'] for t in inputs]\n # figure max of derivative of the data after each stimulus pulse. 5 msec window.\n t = derivative.xvals(\"Time\")\n slopes = np.zeros(npulses)\n print('8')\n for n in range(npulses):\n t0 = startTime + n * ipi\n t1 = t0 + 3e-3\n x = np.where((t > t0) & (t <= t1))\n print('n, x: ', n, x)\n slopes[n] = np.max(derivative[x])\n\n res = OrderedDict([('Experiment: ', expname), ('File: ', fn), ('startTime', startTime),\n ('NPulses', npulses), ('IPI', ipi), ('PulseDur', pulseDur), ('Reps', reps),\n ('thisRep', rep),\n ('NSpikes', nspikes), ('SpikeTimes', spikeTimes), ('Slopes', slopes)])\n self.summary.append(res)\n except:\n raise Exception('Laser stuff failed')\n return res", "def _connect(self):\n\n log.info(\"Loading HVI\")\n\n self._hvi = sd1.SD_HVI()\n hvi_file = pkg_resources.resource_filename(\"qtrl.keysight\", 'sequencer.hvi')\n log.info(hvi_file)\n self._hvi.open(hvi_file)\n # for some unknown reason, this has to be run twice before it will not error\n self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=3, index=0)\n self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=4, index=1)\n self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=5, index=2)\n\n assert self._hvi.open(hvi_file) >= 0, 'Failed to load HVI'\n assert self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=3, index=0) >= 0, 'Failed to load HVI'\n assert self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=4, index=1) >= 0, 'Failed to load HVI'\n assert self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=5, index=2) >= 0, 'Failed to load HVI'\n\n assert self._hvi.compile() >= 0, 'Failed to load HVI'\n\n assert self._hvi.load() >= 0, 'Failed to load HVI'\n self._hvi.reset()\n\n cur_chan = 0\n for i, card in enumerate(self.cards):\n if card.connection is not None:\n self.close()\n\n card_cxn = sd1.SD_AOU()\n assert card_cxn.openWithSlot(\"\", card.chassis, card.slot) > 0, 'Failed to connect to slot'\n\n self.cards[i] = card._replace(connection=card_cxn)\n # self.cards[i].connection.triggerIOconfig(sd1.SD_TriggerDirections.AOU_TRG_IN)\n\n for channel in range(card.channels):\n self.channels[cur_chan] = KeysightChannel(channel=channel,\n chassis=card.chassis,\n slot=card.slot,\n model=card.model,\n type=card.type,\n connection=card_cxn)\n\n self.channels[cur_chan].connection.channelWaveShape(channel+1, sd1.SD_Waveshapes.AOU_AWG)\n\n self.channels[cur_chan].connection.clockResetPhase(3, 0)\n\n # ext trig config, 0 is external source, 3 is rising edge\n # self.channels[cur_chan].connection.AWGtriggerExternalConfig(channel+1, 0, 3)\n\n cur_chan += 1\n self._hvi.start()\n self._hvi.stop()\n\n self._n_channels = cur_chan", "def SPIchiperase(self):\n self.writecmd(0x01,0x81);", "def __init__(self):\n self.ram = [0] * 256\n self.reg = [0] * 8\n self.pc = 0", "def c(self):\n pass", "def c(self):\n pass", "def ExtraInfo(self) -> object:", "def __init__(self):\n self.ser = serial.Serial('/dev/ttyUSB3',9600)\n collect_readings = False\n self.colours = []\n self.max_readings = 50 #maximum number of readings to use", "def get_info(self):\n\t\tret = 'Flash info\\n'\n\t\tret += '\\tGPNVM bits: ' + str(self.read_gpnvm()) + '\\n'\n\t\tret += '\\tUnique identifier area: ' + self.read_unique_identifier_area().decode('ascii', 'replace') + '\\n'\n\t\tret += '\\tDescriptor: ' + str(self.read_descriptor()) + '\\n'\n\t\treturn ret", "def refresh_description(self):\n # AIN\n code_ain = (self.CODE >> 12) & 0b0111\n # DICT_AIN = [[0, 1], [0, 3], [1, 3], [2, 3], [0, 4], [1, 4], [2, 4], [3, 4]]\n ind_p, ind_n = DICT_AIN[code_ain]\n self.AINP = \"AIN\" + str(ind_p)\n self.AINN = \"AIN\" + str(ind_n)\n if (ind_n == 4): self.AINN = \"GND\"\n\n # FSR\n code_fsr = (self.CODE >> 9) & 0b0000111\n # DICT_FSR = [\"6.144V\", \"4.096V\", \"2.048V\", \"1.024V\", \"0.512V\", \"0.256V\"]\n self.FSR = DICT_FSR[code_fsr]\n\n # MODE\n\n # rate\n code_rate = (self.CODE >> 5) & 0b00000000111\n # DICT_RATE = [\"8 SPS\", \"16 SPS\", \"32 SPS\", \"64 SPS\", \"128 SPS\", \"250 SPS\", \"475 SPS\", \"860 SPS\"]\n self.RATE = DICT_RATE[code_rate]", "def reckon(self):", "def polyChipOff(*args, attraction: Union[float, bool]=0.0, caching: bool=True,\n constructionHistory: bool=True, duplicate: bool=True, gain: Union[float,\n List[float], bool]=1.0, gravity: Union[List[float, float, float], bool]=None,\n gravityX: Union[float, bool]=0.0, gravityY: Union[float, bool]=0.0, gravityZ:\n Union[float, bool]=0.0, keepFacesTogether: bool=True, keepFacetTogether:\n bool=True, localCenter: Union[int, bool]=0, localDirection: Union[List[float,\n float, float], bool]=None, localDirectionX: Union[float, bool]=0.0,\n localDirectionY: Union[float, bool]=0.0, localDirectionZ: Union[float,\n bool]=0.0, localRotate: Union[List[float, float, float], bool]=None,\n localRotateX: Union[float, bool]=0.0, localRotateY: Union[float, bool]=0.0,\n localRotateZ: Union[float, bool]=0.0, localScale: Union[List[float, float,\n float], bool]=None, localScaleX: Union[float, bool]=0.0, localScaleY:\n Union[float, bool]=0.0, localScaleZ: Union[float, bool]=0.0, localTranslate:\n Union[List[float, float, float], bool]=None, localTranslateX: Union[float,\n bool]=0.0, localTranslateY: Union[float, bool]=0.0, localTranslateZ:\n Union[float, bool]=0.0, magnX: Union[float, bool]=0.0, magnY: Union[float,\n bool]=0.0, magnZ: Union[float, bool]=0.0, magnet: Union[List[float, float,\n float], bool]=None, name: AnyStr=\"\", nodeState: Union[int, bool]=0, offset:\n Union[float, bool]=0.0, pivot: Union[List[float, float, float], bool]=None,\n pivotX: Union[float, bool]=0.0, pivotY: Union[float, bool]=0.0, pivotZ:\n Union[float, bool]=0.0, random: Union[float, bool]=0.0, scale: Union[List[float,\n float, float], bool]=None, scaleX: Union[float, bool]=0.0, scaleY: Union[float,\n bool]=0.0, scaleZ: Union[float, bool]=0.0, translate: Union[List[float, float,\n float], bool]=None, translateX: Union[float, bool]=0.0, translateY: Union[float,\n bool]=0.0, translateZ: Union[float, bool]=0.0, weight: Union[float, bool]=0.0,\n worldSpace: bool=True, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def data(self):", "def __init__(self):\n self.bytes = bytearray(3)\n MCP4725.__init__(self)", "def __init__(self, busRestriction=0, devAddressRestriction=0, serialNumber=\"\"):\n self.handle = libcaer.caerDeviceOpen(1, libcaer.CAER_DEVICE_DAVIS, busRestriction, devAddressRestriction, serialNumber)\n self.info = libcaer.caerDavisInfoGet(self.handle)\n\n print(\"device ID: \" + str(libcaer.caer_davis_info_deviceID_get(self.info)))\n\n if (libcaer.caer_davis_info_deviceIsMaster_get(self.info)):\n print(\"device is Master\")\n else:\n print(\"device is Slave\")\n\n print(\"device Serial Number: \" + str(libcaer.caer_davis_info_deviceSerialNumber_get(self.info)))\n print(libcaer.caer_davis_info_deviceString_get(self.info))\n\n self.dvsSizeX = libcaer.caer_davis_info_dvsSizeX_get(self.info)\n self.dvsSizeY = libcaer.caer_davis_info_dvsSizeY_get(self.info)\n\n self.apsSizeX = libcaer.caer_davis_info_apsSizeX_get(self.info)\n self.apsSizeY = libcaer.caer_davis_info_apsSizeY_get(self.info)\n\n # init default biases\n ret = libcaer.caerDeviceSendDefaultConfig(self.handle)\n if(ret == True):\n print(\"Default biases loaded\")\n else:\n print(\"Error while loading default biases\")\n raise Exception\n\n # set blocking data exchange\n ret = libcaer.caerDeviceConfigSet(self.handle, libcaer.CAER_HOST_CONFIG_DATAEXCHANGE, libcaer.CAER_HOST_CONFIG_DATAEXCHANGE_BLOCKING, True)\n if(ret == True):\n print(\"Data exchange set to blocking mode\")\n else:\n print(\"Error in communicating with the device, please check your setup\")\n raise Exception\n\n # start data transfer from device\n ret = libcaer.caerDeviceDataStart(self.handle, None, None, None, None, None)\n if(ret == True):\n print(\"Data transfer started\")\n else:\n print(\"Error in starting data transfer\")\n raise Exception", "def __init__(self, make, model, year):\r\n super().__init__(make, model, year)\r\n self.battery_size = 70\r\n # self.autopilot = autopilot\r", "def moi(self):\n\n pass", "def enable(self) -> None:" ]
[ "0.67811126", "0.6066584", "0.5964433", "0.59339035", "0.589296", "0.58123773", "0.58010674", "0.57554996", "0.5728705", "0.56449646", "0.56439716", "0.56414634", "0.56379575", "0.56379575", "0.5594598", "0.55358076", "0.5497361", "0.54925936", "0.5483349", "0.5473763", "0.5462485", "0.5460163", "0.54515415", "0.5441278", "0.5419607", "0.54022926", "0.53795564", "0.53726125", "0.5338211", "0.5338211", "0.5337915", "0.5328013", "0.5326133", "0.53258103", "0.52956903", "0.5283403", "0.528021", "0.5279774", "0.5276576", "0.52745867", "0.52732724", "0.52700603", "0.5265963", "0.5249531", "0.52298385", "0.52277905", "0.5222615", "0.52194977", "0.5206727", "0.5206362", "0.5201322", "0.51996315", "0.51887923", "0.51804155", "0.51804155", "0.51804155", "0.51804155", "0.51774514", "0.5176834", "0.516562", "0.5158818", "0.5148516", "0.5147972", "0.51450545", "0.51415676", "0.5134778", "0.5133039", "0.5128874", "0.51275474", "0.51258636", "0.5125459", "0.5123773", "0.5122468", "0.5119849", "0.51123893", "0.5109508", "0.5105934", "0.51058424", "0.50912714", "0.5090566", "0.50820786", "0.5077213", "0.5076216", "0.5076194", "0.50725013", "0.5070938", "0.5069573", "0.5067492", "0.5067492", "0.5065867", "0.5065824", "0.506569", "0.50642616", "0.5063172", "0.5062251", "0.5059142", "0.505062", "0.50505215", "0.50487983", "0.5045563", "0.5045133" ]
0.0
-1
Everything important about the chip
def __init__(self, length=None, width=None, height=None, material=None, adhesion_material=None, dielectric_coating=None): self.length = length self.linspace_x = np.linspace(-length/2, length/2, num=100) self.width = width self.height = height self.material = material if self.material.thickness: if self.material.thickness != self.height: raise ValueError("BPE height must equal BPE material thickness") # adhesion layer used for thin metal film BPE self.adhesion_material = adhesion_material # dielectric coating on top of BPE if dielectric_coating: self.dielectric_coating = dielectric_coating else: self.dielectric_coating = material_solid(name='no_dielectric', permittivity=1, thickness=1e-12, Ka=6, Kb=2, reaction_site_density=5)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self):\n ChipData.ChipData.__init__(self)", "def read_chip_info(self):\n return [self.read_chip_type(), self.read_chip_revision()]", "def support(self):", "def show(self):\n # Disable IRQ to improve speed\n with NoIRQ():\n for chip in range(NB_CHIPS):\n self._select(chip)\n row = 0 if chip in (0, 1) else 1\n col = 0 if chip in (0, 2) else 1\n data = self.get_ht1632_data(row, col)\n green = (is_green(value) for value in data)\n red = (is_red(value) for value in data)\n self._write_data(green, red)", "def plugh():", "def run_all(self):\n self.formatter.section_start('Firmware info')\n self.analyse_firmware_id() # Always do this first!\n # If the chip has not panicked, the preserved\n # block is populated with random values, therefore\n # until the magic_value is implemented, do a try and except\n self.analyse_panic_state()\n self.analyse_slt() # Kind of pointless but why not.\n self.formatter.section_end()", "def presenetCar():", "def produce_features(self, chip):\n raise NotImplementedError(\"produce_features() not implemented\")", "def _init_hardware(self):\n return", "def probe(self):", "def bioinfo():\n\n pass", "def update(self):\n try:\n if not self._sysinfo:\n self._sysinfo = self.smartplug.sys_info\n self._mac = self.smartplug.mac\n self._model = self.smartplug.model\n if self.smartplug.context is None:\n self._alias = self.smartplug.alias\n self._device_id = self._mac\n else:\n self._alias = self._plug_from_context[\"alias\"]\n self._device_id = self.smartplug.context\n\n if self.smartplug.context is None:\n self._state = self.smartplug.state == self.smartplug.SWITCH_STATE_ON\n else:\n self._state = self._plug_from_context[\"state\"] == 1\n\n if self.smartplug.has_emeter:\n emeter_readings = self.smartplug.get_emeter_realtime()\n\n self._emeter_params[ATTR_CURRENT_POWER_W] = \"{:.2f}\".format(\n emeter_readings[\"power\"]\n )\n self._emeter_params[ATTR_TOTAL_ENERGY_KWH] = \"{:.3f}\".format(\n emeter_readings[\"total\"]\n )\n self._emeter_params[ATTR_VOLTAGE] = \"{:.1f}\".format(\n emeter_readings[\"voltage\"]\n )\n self._emeter_params[ATTR_CURRENT_A] = \"{:.2f}\".format(\n emeter_readings[\"current\"]\n )\n\n emeter_statics = self.smartplug.get_emeter_daily()\n try:\n self._emeter_params[ATTR_TODAY_ENERGY_KWH] = \"{:.3f}\".format(\n emeter_statics[int(time.strftime(\"%e\"))]\n )\n except KeyError:\n # Device returned no daily history\n pass\n\n self._available = True\n\n except (SmartDeviceException, OSError) as ex:\n if self._available:\n _LOGGER.warning(\n \"Could not read state for %s: %s\", self.smartplug.host, ex\n )\n self._available = False", "def info(self):", "def info(self):", "def __init__(self):\r\n # Check device ID.\r\n chip_id = self._read_byte(_BME280_REGISTER_CHIPID)\r\n if _BME280_CHIPID != chip_id:\r\n raise RuntimeError('Failed to find BME280! Chip ID 0x%x' % chip_id)\r\n self._write_register_byte(_BME280_REGISTER_SOFTRESET, 0xB6)\r\n time.sleep(0.5)\r\n self._read_coefficients()\r\n self.sea_level_pressure = 1013.25\r\n \"\"\"Pressure in hectoPascals at sea level. Used to calibrate `altitude`.\"\"\"\r\n # turn on humidity oversample 16x\r\n self._write_register_byte(_BME280_REGISTER_CTRL_HUM, 0x03)\r\n self._t_fine = None", "def __init__(self):\n self._device_info = None", "def serial(self):", "def _pending_chips(self):\n assert not (self._deleted and self._new)\n # NOTE: This is ce4 specific code and could be factored out.\n deliver_at = gametime.now()\n\n chips = []\n # If this is a DELETE, send an empty dict.\n if self._deleted:\n chips.append({\n 'action':DELETE,\n 'path':self._chip_path(),\n 'value':{},\n 'time':deliver_at\n })\n # If this is an ADD, add all fields and collections.\n elif self._new:\n chips.append({\n 'action':ADD,\n 'path':self._chip_path(),\n 'value':self.to_struct(),\n 'time':deliver_at\n })\n # If this is a MOD, add only the changed fields and id_field.\n elif len(self._changed_fields) > 0:\n chips.append({\n 'action':MOD,\n 'path':self._chip_path(),\n 'value':self.to_struct(fields=self._changed_fields),\n 'time':deliver_at})\n return chips", "def info(rom):\n rom = ROM(rom, detect=True)", "def manage_info():", "def _get_info_about_sensor(self):\n reg_id = 0xD0\n chip_id, chip_version = self.bus.read_i2c_block_data(self.address,\n reg_id,\n 2)\n return chip_id, chip_version", "def gmcp_setup_data(self):\n yield \"Core.Supports.Debug\", 20\n yield \"Core.Supports.Set\", [ \"MG.char 1\", \"MG.room 1\", \"comm.channel 1\" ]", "def pick_up(self):", "def _read_cardiochip(self):\n cur_leadstatus = 0\n sample_count =0\n while self.connected:\n sample_count+=1\n #check for sync bytes\n readbyte = ord(self.ser.read(1))\n #print readbyte, SYNC_BYTE\n if readbyte != SYNC_BYTE:\n continue\n readbyte = ord(self.ser.read(1))\n if readbyte != SYNC_BYTE:\n continue\n\n #parse length byte\n while True:\n pLength = ord(self.ser.read(1))\n if pLength != SYNC_BYTE:\n break\n if pLength > 169:\n continue\n #print \"L: %i\" % pLength\n\n # collect payload bytes\n payload = self.ser.read(pLength)\n payload = [ord(x) for x in payload] #convert to int from string\n #print \"payload: \" + str(payload).strip('[]')\n # ones complement inverse of 8-bit payload sum\n checksum = sum(payload) & 0xFF\n checksum = ~checksum & 0xFF\n\n # catch and verify checksum byte\n chk = ord(self.ser.read(1))\n #print \"chk: \" + str(checksum)\n if chk != checksum:\n print \"checksum error, %i != %i\" % (chk, checksum)\n continue\n\n output = self._parseData(payload)\n\n lead_status = next(( d for d in output if 'leadoff' in d), None)\n if lead_status is not None:\n if cur_leadstatus != lead_status['leadoff']:\n #we have a change\n if lead_status['leadoff']==200:\n print \"LEAD ON\"\n elif lead_status['leadoff']==0:\n print \"LEAD OFF\"\n cur_leadstatus = lead_status['leadoff']\n\n # store the output data in a queue\n # first, create a tuple with the sample index and dict with the timestamp and ecg\n ecgdict = next(((i,d) for i,d in enumerate(output) if 'ecg_raw' in d), None)\n if ecgdict is not None and sample_count>self.Fs*2:\n #let's just ignore the first 2 seconds of crappy data\n ecgdict[1]['leadoff'] = cur_leadstatus\n #print ecgdict[1]\n self.ecg_buffer.put(ecgdict[1]) # this should save the ecg and timestamp keys\n\n return", "def CL(self):", "def use(self):", "def _get_information(self):\n pass", "def enable(self):", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_i2c'] = True", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_i2c'] = True", "def __init__(self):\n self.Revision = '0'\n self.Serial = None\n try:\n with open('/proc/cpuinfo','r') as f:\n for line in f:\n splitLine = line.split(':')\n if len(splitLine) < 2:\n continue\n key = splitLine[0].strip()\n value = splitLine[1].strip()\n if key == 'Revision':\n self.Revision = value\n if key == 'Serial' and value != len(value) * '0':\n self.Serial = value\n except:\n exception (\"Error reading cpuinfo\")\n self.model = 'Unknown'\n if self.Revision == 'Beta':\n self.model = 'Raspberry Pi Model B (Beta)'\n if self.Revision in ('000d', '000e', '000f', '0002', '0003', '0004', '0005', '0006'):\n self.model = 'Raspberry Pi Model B'\n if self.Revision in ('0007', '0008', '0009'):\n self.model = 'Raspberry Pi Model A'\n if self.Revision in ('0010', '0013', '900032'):\n self.model = 'Raspberry Pi Model B +'\n if self.Revision in ('0011', '0014'):\n self.model = 'Raspberry Pi Compute Module'\n if self.Revision in ('0012', '0015'):\n self.model = 'Raspberry Pi Model A+'\n if self.Revision in ('a01040', 'a01041', 'a21041', 'a22042'):\n self.model = 'Raspberry Pi 2 Model B'\n if self.Revision in ('900092', '900093', '920093'):\n self.model = 'Raspberry Pi Zero'\n if self.Revision in ('9000c1',):\n self.model = 'Raspberry Pi Zero W'\n if self.Revision in ('a02082', 'a22082', 'a32082'):\n self.model = 'Raspberry Pi 3 Model B' \n if self.Revision in ('a020d3'):\n self.model = 'Raspberry Pi 3 Model B+'\n if self.Revision in ('a020a0'):\n self.model = 'Raspberry Pi Compute Module 3'\n if 'Rockchip' in CPU_HARDWARE:\n self.model = 'Tinker Board'\n self.manufacturer = 'Element14/Premier Farnell'\n if self.Revision in ('a01041', '900092', 'a02082', '0012', '0011', '0010', '000e', '0008', '0004', 'a020d3', 'a01040', 'a020a0'):\n self.manufacturer = 'Sony, UK'\n if self.Revision in ('a32082'):\n self.manufacturer = 'Sony, Japan'\n if self.Revision in ('0014', '0015', 'a21041', 'a22082', '920093'):\n self.manufacturer = 'Embest, China'\n if self.Revision in ('0005', '0009', '000f'):\n self.manufacturer = 'Qisda'\n if self.Revision in ('0006', '0007', '000d'):\n self.manufacturer = 'Egoman'\n if self.Revision == '0000':\n if 'Rockchip' in CPU_HARDWARE:\n self.manufacturer = 'ASUS'\n else:\n try:\n with open('/proc/device-tree/model', 'r') as model_file:\n for line in model_file:\n if 'BeagleBone' in line:\n index = line.index('BeagleBone')\n self.manufacturer = line[:index - 1].strip(' \\n\\t\\0')\n self.model = line[index:].strip(' \\n\\t\\0')\n break\n except:\n exception (\"Error reading model\")", "def addExtraDevices(self):\n \n # These tables were extracted from\n # pirates/src/piratesgui/GameOptions.py.\n \n ati_device_list = [ \n [\"ATI MOBILITY/RADEON X700\", 0x5653],\n [1, \"Radeon X1950 XTX Uber - Limited Edition\", 0x7248],\n [1, \"Radeon X1950 XTX Uber - Limited Edition Secondary\", 0x7268],\n [1, \"Radeon X800 CrossFire Edition\", 0x554D],\n [1, \"Radeon X800 CrossFire Edition Secondary\", 0x556D],\n [1, \"Radeon X850 CrossFire Edition\", 0x5D52],\n [1, \"Radeon X850 CrossFire Edition Secondary\", 0x5D72],\n [\"Radeon X550/X700 Series\", 0x564F],\n [\"ATI FireGL T2\", 0x4154],\n [\"ATI FireGL T2 Secondary\", 0x4174],\n [\"ATI FireGL V3100\", 0x5B64],\n [\"ATI FireGL V3100 Secondary\", 0x5B74],\n [\"ATI FireGL V3200\", 0x3E54],\n [\"ATI FireGL V3200 Secondary\", 0x3E74],\n [\"ATI FireGL V3300\", 0x7152],\n [\"ATI FireGL V3300 Secondary\", 0x7172],\n [\"ATI FireGL V3350\", 0x7153],\n [\"ATI FireGL V3350 Secondary\", 0x7173],\n [\"ATI FireGL V3400\", 0x71D2],\n [\"ATI FireGL V3400 Secondary\", 0x71F2],\n [\"ATI FireGL V5000\", 0x5E48],\n [\"ATI FireGL V5000 Secondary\", 0x5E68],\n [\"ATI FireGL V5100\", 0x5551],\n [\"ATI FireGL V5100 Secondary\", 0x5571],\n [\"ATI FireGL V5200\", 0x71DA],\n [\"ATI FireGL V5200 Secondary\", 0x71FA],\n [\"ATI FireGL V5300\", 0x7105],\n [\"ATI FireGL V5300 Secondary\", 0x7125],\n [\"ATI FireGL V7100\", 0x5550],\n [\"ATI FireGL V7100 Secondary\", 0x5570],\n [\"ATI FireGL V7200\", 0x5D50],\n [\"ATI FireGL V7200 \", 0x7104],\n [\"ATI FireGL V7200 Secondary\", 0x5D70],\n [\"ATI FireGL V7200 Secondary \", 0x7124],\n [\"ATI FireGL V7300\", 0x710E],\n [\"ATI FireGL V7300 Secondary\", 0x712E],\n [\"ATI FireGL V7350\", 0x710F],\n [\"ATI FireGL V7350 Secondary\", 0x712F],\n [\"ATI FireGL X1\", 0x4E47],\n [\"ATI FireGL X1 Secondary\", 0x4E67],\n [\"ATI FireGL X2-256/X2-256t\", 0x4E4B],\n [\"ATI FireGL X2-256/X2-256t Secondary\", 0x4E6B],\n [\"ATI FireGL X3-256\", 0x4A4D],\n [\"ATI FireGL X3-256 Secondary\", 0x4A6D],\n [\"ATI FireGL Z1\", 0x4147],\n [\"ATI FireGL Z1 Secondary\", 0x4167],\n [\"ATI FireMV 2200\", 0x5B65],\n [\"ATI FireMV 2200 Secondary\", 0x5B75],\n [\"ATI FireMV 2250\", 0x719B],\n [\"ATI FireMV 2250 Secondary\", 0x71BB],\n [\"ATI FireMV 2400\", 0x3151],\n [\"ATI FireMV 2400 Secondary\", 0x3171],\n [\"ATI FireStream 2U\", 0x724E],\n [\"ATI FireStream 2U Secondary\", 0x726E],\n [\"ATI MOBILITY FIRE GL 7800\", 0x4C58],\n [\"ATI MOBILITY FIRE GL T2/T2e\", 0x4E54],\n [\"ATI MOBILITY FireGL V3100\", 0x5464],\n [\"ATI MOBILITY FireGL V3200\", 0x3154],\n [\"ATI MOBILITY FireGL V5000\", 0x564A],\n [\"ATI MOBILITY FireGL V5000 \", 0x564B],\n [\"ATI MOBILITY FireGL V5100\", 0x5D49],\n [\"ATI MOBILITY FireGL V5200\", 0x71C4],\n [\"ATI MOBILITY FireGL V5250\", 0x71D4],\n [\"ATI MOBILITY FireGL V7100\", 0x7106],\n [\"ATI MOBILITY FireGL V7200\", 0x7103],\n [\"ATI MOBILITY RADEON\", 0x4C59],\n [\"ATI MOBILITY RADEON 7500\", 0x4C57],\n [\"ATI MOBILITY RADEON 9500\", 0x4E52],\n [\"ATI MOBILITY RADEON 9550\", 0x4E56],\n [\"ATI MOBILITY RADEON 9600/9700 Series\", 0x4E50],\n [\"ATI MOBILITY RADEON 9800\", 0x4A4E],\n [\"ATI Mobility Radeon HD 2300\", 0x7210],\n [\"ATI Mobility Radeon HD 2300 \", 0x7211],\n [\"ATI Mobility Radeon HD 2400\", 0x94C9],\n [\"ATI Mobility Radeon HD 2400 XT\", 0x94C8],\n [1, \"ATI Mobility Radeon HD 2600\", 0x9581],\n [1, \"ATI Mobility Radeon HD 2600 XT\", 0x9583],\n [\"ATI Mobility Radeon X1300\", 0x714A],\n [\"ATI Mobility Radeon X1300 \", 0x7149],\n [\"ATI Mobility Radeon X1300 \", 0x714B],\n [\"ATI Mobility Radeon X1300 \", 0x714C],\n [\"ATI Mobility Radeon X1350\", 0x718B],\n [\"ATI Mobility Radeon X1350 \", 0x718C],\n [\"ATI Mobility Radeon X1350 \", 0x7196],\n [\"ATI Mobility Radeon X1400\", 0x7145],\n [\"ATI Mobility Radeon X1450\", 0x7186],\n [\"ATI Mobility Radeon X1450 \", 0x718D],\n [\"ATI Mobility Radeon X1600\", 0x71C5],\n [\"ATI Mobility Radeon X1700\", 0x71D5],\n [\"ATI Mobility Radeon X1700 \", 0x71DE],\n [\"ATI Mobility Radeon X1700 XT\", 0x71D6],\n [1, \"ATI Mobility Radeon X1800\", 0x7102],\n [1, \"ATI Mobility Radeon X1800 XT\", 0x7101],\n [1, \"ATI Mobility Radeon X1900\", 0x7284],\n [1, \"ATI Mobility Radeon X2300\", 0x718A],\n [1, \"ATI Mobility Radeon X2300 \", 0x7188],\n [\"ATI MOBILITY RADEON X300\", 0x5461],\n [\"ATI MOBILITY RADEON X300 \", 0x5460],\n [\"ATI MOBILITY RADEON X300 \", 0x3152],\n [\"ATI MOBILITY RADEON X600\", 0x3150],\n [\"ATI MOBILITY RADEON X600 SE\", 0x5462],\n [\"ATI MOBILITY RADEON X700\", 0x5652],\n [\"ATI MOBILITY RADEON X700 \", 0x5653],\n [\"ATI MOBILITY RADEON X700 Secondary\", 0x5673],\n [1, \"ATI MOBILITY RADEON X800\", 0x5D4A],\n [1, \"ATI MOBILITY RADEON X800 XT\", 0x5D48],\n [\"ATI Radeon 9550/X1050 Series\", 0x4153],\n [\"ATI Radeon 9550/X1050 Series Secondary\", 0x4173],\n [\"ATI RADEON 9600 Series\", 0x4150],\n [\"ATI RADEON 9600 Series \", 0x4E51],\n [\"ATI RADEON 9600 Series \", 0x4151],\n [\"ATI RADEON 9600 Series \", 0x4155],\n [\"ATI RADEON 9600 Series \", 0x4152],\n [\"ATI RADEON 9600 Series Secondary\", 0x4E71],\n [\"ATI RADEON 9600 Series Secondary \", 0x4171],\n [\"ATI RADEON 9600 Series Secondary \", 0x4170],\n [\"ATI RADEON 9600 Series Secondary \", 0x4175],\n [\"ATI RADEON 9600 Series Secondary \", 0x4172],\n [1, \"ATI Radeon HD 2900 XT\", 0x9402],\n [1, \"ATI Radeon HD 2900 XT \", 0x9403],\n [1, \"ATI Radeon HD 2900 XT \", 0x9400],\n [1, \"ATI Radeon HD 2900 XT \", 0x9401],\n [\"ATI Radeon X1200 Series\", 0x791E],\n [\"ATI Radeon X1200 Series \", 0x791F],\n [1, \"ATI Radeon X1950 GT\", 0x7288],\n [1, \"ATI Radeon X1950 GT Secondary\", 0x72A8],\n [1, \"ATI RADEON X800 GT\", 0x554E],\n [1, \"ATI RADEON X800 GT Secondary\", 0x556E],\n [1, \"ATI RADEON X800 XL\", 0x554D],\n [1, \"ATI RADEON X800 XL Secondary\", 0x556D],\n [1, \"ATI RADEON X850 PRO\", 0x4B4B],\n [1, \"ATI RADEON X850 PRO Secondary\", 0x4B6B],\n [1, \"ATI RADEON X850 SE\", 0x4B4A],\n [1, \"ATI RADEON X850 SE Secondary\", 0x4B6A],\n [1, \"ATI RADEON X850 XT\", 0x4B49],\n [1, \"ATI RADEON X850 XT Platinum Edition\", 0x4B4C],\n [1, \"ATI RADEON X850 XT Platinum Edition Secondary\", 0x4B6C],\n [1, \"ATI RADEON X850 XT Secondary\", 0x4B69],\n [\"ATI Radeon Xpress 1200 Series\", 0x793F],\n [\"ATI Radeon Xpress 1200 Series \", 0x7941],\n [\"ATI Radeon Xpress 1200 Series \", 0x7942],\n [\"ATI Radeon Xpress Series\", 0x5A61],\n [\"ATI Radeon Xpress Series \", 0x5A63],\n [\"ATI Radeon Xpress Series \", 0x5A62],\n [\"ATI Radeon Xpress Series \", 0x5A41],\n [\"ATI Radeon Xpress Series \", 0x5A43],\n [\"ATI Radeon Xpress Series \", 0x5A42],\n [\"ATI Radeon Xpress Series \", 0x5954],\n [\"ATI Radeon Xpress Series \", 0x5854],\n [\"ATI Radeon Xpress Series \", 0x5955],\n [\"ATI Radeon Xpress Series \", 0x5974],\n [\"ATI Radeon Xpress Series \", 0x5874],\n [\"ATI Radeon Xpress Series \", 0x5975],\n [\"Radeon 9500\", 0x4144],\n [\"Radeon 9500 \", 0x4149],\n [\"Radeon 9500 PRO / 9700\", 0x4E45],\n [\"Radeon 9500 PRO / 9700 Secondary\", 0x4E65],\n [\"Radeon 9500 Secondary\", 0x4164],\n [\"Radeon 9500 Secondary \", 0x4169],\n [\"Radeon 9600 TX\", 0x4E46],\n [\"Radeon 9600 TX Secondary\", 0x4E66],\n [\"Radeon 9600TX\", 0x4146],\n [\"Radeon 9600TX Secondary\", 0x4166],\n [\"Radeon 9700 PRO\", 0x4E44],\n [\"Radeon 9700 PRO Secondary\", 0x4E64],\n [\"Radeon 9800\", 0x4E49],\n [\"Radeon 9800 PRO\", 0x4E48],\n [\"Radeon 9800 PRO Secondary\", 0x4E68],\n [\"Radeon 9800 SE\", 0x4148],\n [\"Radeon 9800 SE Secondary\", 0x4168],\n [\"Radeon 9800 Secondary\", 0x4E69],\n [\"Radeon 9800 XT\", 0x4E4A],\n [\"Radeon 9800 XT Secondary\", 0x4E6A],\n [\"Radeon X1300 / X1550 Series\", 0x7146],\n [\"Radeon X1300 / X1550 Series Secondary\", 0x7166],\n [\"Radeon X1300 Series\", 0x714E],\n [\"Radeon X1300 Series \", 0x715E],\n [\"Radeon X1300 Series \", 0x714D],\n [\"Radeon X1300 Series \", 0x71C3],\n [\"Radeon X1300 Series \", 0x718F],\n [\"Radeon X1300 Series Secondary\", 0x716E],\n [\"Radeon X1300 Series Secondary \", 0x717E],\n [\"Radeon X1300 Series Secondary \", 0x716D],\n [\"Radeon X1300 Series Secondary \", 0x71E3],\n [\"Radeon X1300 Series Secondary \", 0x71AF],\n [\"Radeon X1300/X1550 Series\", 0x7142],\n [\"Radeon X1300/X1550 Series \", 0x7180],\n [\"Radeon X1300/X1550 Series \", 0x7183],\n [\"Radeon X1300/X1550 Series \", 0x7187],\n [\"Radeon X1300/X1550 Series Secondary\", 0x7162],\n [\"Radeon X1300/X1550 Series Secondary \", 0x71A0],\n [\"Radeon X1300/X1550 Series Secondary \", 0x71A3],\n [\"Radeon X1300/X1550 Series Secondary \", 0x71A7],\n [\"Radeon X1550 64-bit\", 0x7147],\n [\"Radeon X1550 64-bit \", 0x715F],\n [\"Radeon X1550 64-bit \", 0x719F],\n [\"Radeon X1550 64-bit Secondary\", 0x7167],\n [\"Radeon X1550 64-bit Secondary \", 0x717F],\n [\"Radeon X1550 Series\", 0x7143],\n [\"Radeon X1550 Series \", 0x7193],\n [\"Radeon X1550 Series Secondary\", 0x7163],\n [\"Radeon X1550 Series Secondary \", 0x71B3],\n [\"Radeon X1600 Pro / Radeon X1300 XT\", 0x71CE],\n [\"Radeon X1600 Pro / Radeon X1300 XT Secondary\", 0x71EE],\n [\"Radeon X1600 Series\", 0x7140],\n [\"Radeon X1600 Series \", 0x71C0],\n [\"Radeon X1600 Series \", 0x71C2],\n [\"Radeon X1600 Series \", 0x71C6],\n [\"Radeon X1600 Series \", 0x7181],\n [\"Radeon X1600 Series \", 0x71CD],\n [\"Radeon X1600 Series Secondary\", 0x7160],\n [\"Radeon X1600 Series Secondary \", 0x71E2],\n [\"Radeon X1600 Series Secondary \", 0x71E6],\n [\"Radeon X1600 Series Secondary \", 0x71A1],\n [\"Radeon X1600 Series Secondary \", 0x71ED],\n [\"Radeon X1600 Series Secondary \", 0x71E0],\n [\"Radeon X1650 Series\", 0x71C1],\n [\"Radeon X1650 Series \", 0x7293],\n [\"Radeon X1650 Series \", 0x7291],\n [\"Radeon X1650 Series \", 0x71C7],\n [\"Radeon X1650 Series Secondary\", 0x71E1],\n [\"Radeon X1650 Series Secondary \", 0x72B3],\n [\"Radeon X1650 Series Secondary \", 0x72B1],\n [\"Radeon X1650 Series Secondary \", 0x71E7],\n [1, \"Radeon X1800 Series\", 0x7100],\n [1, \"Radeon X1800 Series \", 0x7108],\n [1, \"Radeon X1800 Series \", 0x7109],\n [1, \"Radeon X1800 Series \", 0x710A],\n [1, \"Radeon X1800 Series \", 0x710B],\n [1, \"Radeon X1800 Series \", 0x710C],\n [1, \"Radeon X1800 Series Secondary\", 0x7120],\n [1, \"Radeon X1800 Series Secondary \", 0x7128],\n [1, \"Radeon X1800 Series Secondary \", 0x7129],\n [1, \"Radeon X1800 Series Secondary \", 0x712A],\n [1, \"Radeon X1800 Series Secondary \", 0x712B],\n [1, \"Radeon X1800 Series Secondary \", 0x712C],\n [1, \"Radeon X1900 Series\", 0x7243],\n [1, \"Radeon X1900 Series \", 0x7245],\n [1, \"Radeon X1900 Series \", 0x7246],\n [1, \"Radeon X1900 Series \", 0x7247],\n [1, \"Radeon X1900 Series \", 0x7248],\n [1, \"Radeon X1900 Series \", 0x7249],\n [1, \"Radeon X1900 Series \", 0x724A],\n [1, \"Radeon X1900 Series \", 0x724B],\n [1, \"Radeon X1900 Series \", 0x724C],\n [1, \"Radeon X1900 Series \", 0x724D],\n [1, \"Radeon X1900 Series \", 0x724F],\n [1, \"Radeon X1900 Series Secondary\", 0x7263],\n [1, \"Radeon X1900 Series Secondary \", 0x7265],\n [1, \"Radeon X1900 Series Secondary \", 0x7266],\n [1, \"Radeon X1900 Series Secondary \", 0x7267],\n [1, \"Radeon X1900 Series Secondary \", 0x7268],\n [1, \"Radeon X1900 Series Secondary \", 0x7269],\n [1, \"Radeon X1900 Series Secondary \", 0x726A],\n [1, \"Radeon X1900 Series Secondary \", 0x726B],\n [1, \"Radeon X1900 Series Secondary \", 0x726C],\n [1, \"Radeon X1900 Series Secondary \", 0x726D],\n [1, \"Radeon X1900 Series Secondary \", 0x726F],\n [1, \"Radeon X1950 Series\", 0x7280],\n [1, \"Radeon X1950 Series \", 0x7240],\n [1, \"Radeon X1950 Series \", 0x7244],\n [1, \"Radeon X1950 Series Secondary\", 0x72A0],\n [1, \"Radeon X1950 Series Secondary \", 0x7260],\n [1, \"Radeon X1950 Series Secondary \", 0x7264],\n [\"Radeon X300/X550/X1050 Series\", 0x5B60],\n [\"Radeon X300/X550/X1050 Series \", 0x5B63],\n [\"Radeon X300/X550/X1050 Series Secondary\", 0x5B73],\n [\"Radeon X300/X550/X1050 Series Secondary \", 0x5B70],\n [\"Radeon X550/X700 Series \", 0x5657],\n [\"Radeon X550/X700 Series Secondary\", 0x5677],\n [\"Radeon X600 Series\", 0x5B62],\n [\"Radeon X600 Series Secondary\", 0x5B72],\n [\"Radeon X600/X550 Series\", 0x3E50],\n [\"Radeon X600/X550 Series Secondary\", 0x3E70],\n [\"Radeon X700\", 0x5E4D],\n [\"Radeon X700 PRO\", 0x5E4B],\n [\"Radeon X700 PRO Secondary\", 0x5E6B],\n [\"Radeon X700 SE\", 0x5E4C],\n [\"Radeon X700 SE Secondary\", 0x5E6C],\n [\"Radeon X700 Secondary\", 0x5E6D],\n [\"Radeon X700 XT\", 0x5E4A],\n [\"Radeon X700 XT Secondary\", 0x5E6A],\n [\"Radeon X700/X550 Series\", 0x5E4F],\n [\"Radeon X700/X550 Series Secondary\", 0x5E6F],\n [1, \"Radeon X800 GT\", 0x554B],\n [1, \"Radeon X800 GT Secondary\", 0x556B],\n [1, \"Radeon X800 GTO\", 0x5549],\n [1, \"Radeon X800 GTO \", 0x554F],\n [1, \"Radeon X800 GTO \", 0x5D4F],\n [1, \"Radeon X800 GTO Secondary\", 0x5569],\n [1, \"Radeon X800 GTO Secondary \", 0x556F],\n [1, \"Radeon X800 GTO Secondary \", 0x5D6F],\n [1, \"Radeon X800 PRO\", 0x4A49],\n [1, \"Radeon X800 PRO Secondary\", 0x4A69],\n [1, \"Radeon X800 SE\", 0x4A4F],\n [1, \"Radeon X800 SE Secondary\", 0x4A6F],\n [1, \"Radeon X800 Series\", 0x4A48],\n [1, \"Radeon X800 Series \", 0x4A4A],\n [1, \"Radeon X800 Series \", 0x4A4C],\n [1, \"Radeon X800 Series \", 0x5548],\n [1, \"Radeon X800 Series Secondary\", 0x4A68],\n [1, \"Radeon X800 Series Secondary \", 0x4A6A],\n [1, \"Radeon X800 Series Secondary \", 0x4A6C],\n [1, \"Radeon X800 Series Secondary \", 0x5568],\n [1, \"Radeon X800 VE\", 0x4A54],\n [1, \"Radeon X800 VE Secondary\", 0x4A74],\n [1, \"Radeon X800 XT\", 0x4A4B],\n [1, \"Radeon X800 XT \", 0x5D57],\n [1, \"Radeon X800 XT Platinum Edition\", 0x4A50],\n [1, \"Radeon X800 XT Platinum Edition \", 0x554A],\n [1, \"Radeon X800 XT Platinum Edition Secondary\", 0x4A70],\n [1, \"Radeon X800 XT Platinum Edition Secondary \", 0x556A],\n [1, \"Radeon X800 XT Secondary\", 0x4A6B],\n [1, \"Radeon X800 XT Secondary \", 0x5D77],\n [1, \"Radeon X850 XT\", 0x5D52],\n [1, \"Radeon X850 XT Platinum Edition\", 0x5D4D],\n [1, \"Radeon X850 XT Platinum Edition Secondary\", 0x5D6D],\n [1, \"Radeon X850 XT Secondary\", 0x5D72],\n ]\n vendorId = 0x1002\n for entry in ati_device_list:\n if len(entry) == 3:\n flag, deviceName, deviceId = entry\n else:\n deviceName, deviceId = entry\n self.devices[(vendorId, deviceId)] = deviceName.strip()\n \n nvidia_device_list = [\n [0x014F, \"GeForce 6200\"],\n [0x00F3, \"GeForce 6200\"],\n [0x0221, \"GeForce 6200\"],\n [0x0163, \"GeForce 6200 LE\"],\n [0x0162, \"GeForce 6200SE TurboCache(TM)\"],\n [0x0161, \"GeForce 6200 TurboCache(TM)\"],\n [0x0162, \"GeForce 6200SE TurboCache(TM)\"],\n [0x0160, \"GeForce 6500\"],\n [1, 0x0141, \"GeForce 6600\"],\n [1, 0x00F2, \"GeForce 6600\"],\n [1, 0x0140, \"GeForce 6600 GT\"],\n [1, 0x00F1, \"GeForce 6600 GT\"],\n [1, 0x0142, \"GeForce 6600 LE\"],\n [1, 0x00F4, \"GeForce 6600 LE\"],\n [1, 0x0143, \"GeForce 6600 VE\"],\n [1, 0x0147, \"GeForce 6700 XL\"],\n [1, 0x0041, \"GeForce 6800\"],\n [1, 0x00C1, \"GeForce 6800\"],\n [1, 0x0047, \"GeForce 6800 GS\"],\n [1, 0x00F6, \"GeForce 6800 GS\"],\n [1, 0x00C0, \"GeForce 6800 GS\"],\n [1, 0x0045, \"GeForce 6800 GT\"],\n [1, 0x00F9, \"GeForce 6800 Series GPU\"],\n [1, 0x00C2, \"GeForce 6800 LE\"],\n [1, 0x0040, \"GeForce 6800 Ultra\"],\n [1, 0x00F9, \"GeForce 6800 Series GPU\"],\n [1, 0x0043, \"GeForce 6800 XE\"],\n [1, 0x0048, \"GeForce 6800 XT\"],\n [1, 0x0218, \"GeForce 6800 XT\"],\n [1, 0x00C3, \"GeForce 6800 XT\"],\n [0x01DF, \"GeForce 7300 GS\"],\n [0x0393, \"GeForce 7300 GT\"],\n [0x01D1, \"GeForce 7300 LE\"],\n [0x01D3, \"GeForce 7300 SE\"],\n [0x01DD, \"GeForce 7500 LE\"],\n [1, 0x0392, \"GeForce 7600 GS\"],\n [1, 0x0392, \"GeForce 7600 GS\"],\n [1, 0x02E1, \"GeForce 7600 GS\"],\n [1, 0x0391, \"GeForce 7600 GT\"],\n [1, 0x0394, \"GeForce 7600 LE\"],\n [1, 0x00F5, \"GeForce 7800 GS\"],\n [1, 0x0092, \"GeForce 7800 GT\"],\n [1, 0x0091, \"GeForce 7800 GTX\"],\n [1, 0x0291, \"GeForce 7900 GT/GTO\"],\n [1, 0x0290, \"GeForce 7900 GTX\"],\n [1, 0x0293, \"GeForce 7900 GX2\"],\n [1, 0x0294, \"GeForce 7950 GX2\"],\n [0x0322, \"GeForce FX 5200\"],\n [0x0321, \"GeForce FX 5200 Ultra\"],\n [0x0323, \"GeForce FX 5200LE\"],\n [0x0326, \"GeForce FX 5500\"],\n [0x0326, \"GeForce FX 5500\"],\n [0x0312, \"GeForce FX 5600\"],\n [0x0311, \"GeForce FX 5600 Ultra\"],\n [0x0314, \"GeForce FX 5600XT\"],\n [0x0342, \"GeForce FX 5700\"],\n [0x0341, \"GeForce FX 5700 Ultra\"],\n [0x0343, \"GeForce FX 5700LE\"],\n [0x0344, \"GeForce FX 5700VE\"],\n [0x0302, \"GeForce FX 5800\"],\n [0x0301, \"GeForce FX 5800 Ultra\"],\n [0x0331, \"GeForce FX 5900\"],\n [0x0330, \"GeForce FX 5900 Ultra\"],\n [0x0333, \"GeForce FX 5950 Ultra\"],\n [0x0324, \"GeForce FX Go5200 64M\"],\n [0x031A, \"GeForce FX Go5600\"],\n [0x0347, \"GeForce FX Go5700\"],\n [0x0167, \"GeForce Go 6200/6400\"],\n [0x0168, \"GeForce Go 6200/6400\"],\n [1, 0x0148, \"GeForce Go 6600\"],\n [1, 0x00c8, \"GeForce Go 6800\"],\n [1, 0x00c9, \"GeForce Go 6800 Ultra\"],\n [1, 0x0098, \"GeForce Go 7800\"],\n [1, 0x0099, \"GeForce Go 7800 GTX\"],\n [1, 0x0298, \"GeForce Go 7900 GS\"],\n [1, 0x0299, \"GeForce Go 7900 GTX\"],\n [0x0185, \"GeForce MX 4000\"],\n [0x00FA, \"GeForce PCX 5750\"],\n [0x00FB, \"GeForce PCX 5900\"],\n [0x0110, \"GeForce2 MX/MX 400\"],\n [0x0111, \"GeForce2 MX200\"],\n [0x0110, \"GeForce2 MX/MX 400\"],\n [0x0200, \"GeForce3\"],\n [0x0201, \"GeForce3 Ti200\"],\n [0x0202, \"GeForce3 Ti500\"],\n [0x0172, \"GeForce4 MX 420\"],\n [0x0171, \"GeForce4 MX 440\"],\n [0x0181, \"GeForce4 MX 440 with AGP8X\"],\n [0x0173, \"GeForce4 MX 440-SE\"],\n [0x0170, \"GeForce4 MX 460\"],\n [0x0253, \"GeForce4 Ti 4200\"],\n [0x0281, \"GeForce4 Ti 4200 with AGP8X\"],\n [0x0251, \"GeForce4 Ti 4400\"],\n [0x0250, \"GeForce4 Ti 4600\"],\n [0x0280, \"GeForce4 Ti 4800\"],\n [0x0282, \"GeForce4 Ti 4800SE\"],\n [0x0203, \"Quadro DCC\"],\n [0x0309, \"Quadro FX 1000\"],\n [0x034E, \"Quadro FX 1100\"],\n [0x00FE, \"Quadro FX 1300\"],\n [0x00CE, \"Quadro FX 1400\"],\n [0x0308, \"Quadro FX 2000\"],\n [0x0338, \"Quadro FX 3000\"],\n [0x00FD, \"Quadro PCI-E Series\"],\n [1, 0x00F8, \"Quadro FX 3400/4400\"],\n [1, 0x00CD, \"Quadro FX 3450/4000 SDI\"],\n [1, 0x004E, \"Quadro FX 4000\"],\n [1, 0x00CD, \"Quadro FX 3450/4000 SDI\"],\n [1, 0x00F8, \"Quadro FX 3400/4400\"],\n [1, 0x009D, \"Quadro FX 4500\"],\n [1, 0x029F, \"Quadro FX 4500 X2\"],\n [0x032B, \"Quadro FX 500/FX 600\"],\n [0x014E, \"Quadro FX 540\"],\n [0x014C, \"Quadro FX 540 MXM\"],\n [0x032B, \"Quadro FX 500/FX 600\"],\n [0X033F, \"Quadro FX 700\"],\n [0x034C, \"Quadro FX Go1000\"],\n [0x00CC, \"Quadro FX Go1400\"],\n [0x031C, \"Quadro FX Go700\"],\n [0x018A, \"Quadro NVS with AGP8X\"],\n [0x032A, \"Quadro NVS 280 PCI\"],\n [0x00FD, \"Quadro PCI-E Series\"],\n [0x0165, \"Quadro NVS 285\"],\n [0x017A, \"Quadro NVS\"],\n [0x018A, \"Quadro NVS with AGP8X\"],\n [0x0113, \"Quadro2 MXR/EX\"],\n [0x017A, \"Quadro NVS\"],\n [0x018B, \"Quadro4 380 XGL\"],\n [0x0178, \"Quadro4 550 XGL\"],\n [0x0188, \"Quadro4 580 XGL\"],\n [0x025B, \"Quadro4 700 XGL\"],\n [0x0259, \"Quadro4 750 XGL\"],\n [0x0258, \"Quadro4 900 XGL\"],\n [0x0288, \"Quadro4 980 XGL\"],\n [0x028C, \"Quadro4 Go700\"],\n [1, 0x0295, \"NVIDIA GeForce 7950 GT\"],\n [0x03D0, \"NVIDIA GeForce 6100 nForce 430\"],\n [0x03D1, \"NVIDIA GeForce 6100 nForce 405\"],\n [0x03D2, \"NVIDIA GeForce 6100 nForce 400\"],\n [0x0241, \"NVIDIA GeForce 6150 LE\"],\n [0x0242, \"NVIDIA GeForce 6100\"],\n [0x0245, \"NVIDIA Quadro NVS 210S / NVIDIA GeForce 6150LE\"],\n [1, 0x029C, \"NVIDIA Quadro FX 5500\"],\n [1, 0x0191, \"NVIDIA GeForce 8800 GTX\"],\n [1, 0x0193, \"NVIDIA GeForce 8800 GTS\"],\n [1, 0x0400, \"NVIDIA GeForce 8600 GTS\"],\n [1, 0x0402, \"NVIDIA GeForce 8600 GT\"],\n [0x0421, \"NVIDIA GeForce 8500 GT\"],\n [0x0422, \"NVIDIA GeForce 8400 GS\"],\n [0x0423, \"NVIDIA GeForce 8300 GS\"],\n ]\n vendorId = 0x10de\n for entry in nvidia_device_list:\n if len(entry) == 3:\n flag, deviceId, deviceName = entry\n else:\n deviceId, deviceName = entry\n self.devices[(vendorId, deviceId)] = deviceName.strip()", "def _data(self, data):\n# \"\"\"Send data to spi bus of display chip, most DC pin need set to HIGH \"\"\"\n# if self._spi == None: raise \"Do not setting SPI\"\n# GPIO.output( self._spi_dc, 1 )\n# self._spi.writebytes( data )\n raise NotImplementedError", "def __str__(self):\n s = \"Filename : %s\\n\" % self.fname\n s += \"Data size : %d x %d x %d\\n\" % (self._size[::-1])\n s += \"CCD Chip Size : %d x %d\\n\" % self._chipSize[::-1]\n s += \"File date : %s\\n\" % time.asctime(self._filedate)\n s += \"Exposure Time : %f\\n\" % self.Exposure\n s += \"Num ROI : %d\\n\" % self.NumROI\n s += \"Num ROI Exp : %d\\n\" % self.NumROIExperiment\n s += \"Contoller Ver.: %d\\n\" % self.ControllerVersion\n s += \"Logic Output : %d\\n\" % self.LogicOutput\n #self.AppHiCapLowNoise = self._readInt(4)\n s += \"Timing Mode : %d\\n\" % self.TimingMode\n s += \"Det. Temp : %d\\n\" % self.DetTemperature\n s += \"Det. Type : %d\\n\" % self.DetectorType\n s += \"Trigger Diode : %d\\n\" % self.TriggerDiode\n s += \"Delay Time : %d\\n\" % self.DelayTime\n s += \"Shutter Cont. : %d\\n\" % self.ShutterControl\n s += \"Absorb Live : %d\\n\" % self.AbsorbLive\n s += \"Absorb Mode : %d\\n\" % self.AbsorbMode\n s += \"Virtual Chip : %d\\n\" % self.CanDoVirtualChip\n s += \"Thresh. Min L : %d\\n\" % self.ThresholdMinLive\n s += \"Thresh. Min : %d\\n\" % self.ThresholdMin\n s += \"Thresh. Max L : %d\\n\" % self.ThresholdMaxLive\n s += \"Thresh. Max : %d\\n\" % self.ThresholdMax\n s += \"Geometric Op : %d\\n\" % self.GeometricOps\n s += \"ADC Offset : %d\\n\" % self.ADCOffset\n s += \"ADC Rate : %d\\n\" % self.ADCRate\n s += \"ADC Type : %d\\n\" % self.ADCType\n s += \"ADC Resol. : %d\\n\" % self.ADCRes\n s += \"ADC Bit. Adj. : %d\\n\" % self.ADCBitAdj\n s += \"ADC Gain : %d\\n\" % self.Gain\n \n i = 0\n for roi in self.allROI:\n s += \"ROI %-4d : %-5d %-5d %-5d %-5d %-5d %-5d\\n\" % (i,roi[0], roi[1], roi[2],\n roi[3], roi[4], roi[5])\n i += 1\n \n s += \"\\nComments :\\n\"\n i = 0\n for c in self._comments:\n s += \"%-3d : \" % i\n i += 1\n s += c\n s += \"\\n\"\n return s", "def __init__(self, hdw=['Soundcard'], devicename='dev1'):\n self.debugFlag = False\n self.task = None # NI Task\n self.required_hardware = hdw # Require specific hardware \n self.hardware = [] # list of hardware actually found on this system\n self.find_hardware(device_info={'devicename': devicename}) # population the self.hardware list", "def degibber(self):", "def update_firmware(self) -> str:", "def identifyChip(chipType):\n with open('../illumina_files/illumina_dict.pickle', \"rb\") as f:\n chipDict = pickle.load(f)\n\n values = chipDict[chipType]\n\n print('BPM: ' + values[0] + '\\n')\n print('EGT: ' + values[1] + '\\n')\n print('CSV: ' + values[2] + '\\n')\n\n return values[0], values[1], values[2]", "def info() -> None:", "def __init__(self, address=0x76):\n self.address = address\n self.bus = self._initialize_bus()\n\n self.chip_id, self.chip_version = self._get_info_about_sensor()", "async def identify(self):\n await self.send({\n \"op\": 2,\n \"d\" : {\n \"token\" : self.client.token,\n \"properties\": {\n \"$os\" : platform,\n \"$browser\": \"SpeedCord\",\n \"$device\" : \"SpeedCord\"\n },\n \"intents\" : self.client.intents,\n \"shard\" : (self.id, self.client.shard_count)\n }\n })", "def bootloader() -> NoReturn:", "def __set_chips(self):\n\n # Scan filesystem\n root_files = [root_file for root_file in os.walk(self.dataset_path)]\n\n # Decode truth.txt file\n truth_files = [os.path.join(walked[0], 'truth.txt') for walked in root_files if 'truth.txt' in walked[2]]\n if len(truth_files) == 0:\n raise IOError(\"No truth file found.\")\n elif len(truth_files) > 1:\n raise IOError(\"Too many truth files available.\")\n\n truth_data = self.__decode_truth_file(truth_files.pop())\n if len(truth_data) < 1:\n raise IOError(\"No truth loaded\")\n if self.__debug:\n print(\"{} truth records loaded.\".format(len(truth_data)))\n\n # Index all image chips\n file_paths = [[os.path.join(walked[0], wfile) for wfile in walked[2]] for walked in root_files]\n chip_idx = dict(filter(lambda t: t is not None, map(self.__index_chip, itertools.chain(*file_paths))))\n\n if len(chip_idx) != len(truth_data):\n raise IOError(\"Number of truth records not equal to number of chips.\")\n if self.__debug:\n print(\"{} image chips loaded.\".format(len(chip_idx)))\n\n # Create and store chips\n self.chips = {meta['file']: self.__create_chip(meta, truth_data[idx]) for idx, meta in chip_idx.items()}\n if self.__debug:\n print(\"{} chip.Chips loaded.\".format(len(self.chips)))", "def get_info(self):\n return \"TODO !\"", "def getInfo():", "def _default_setup(self):\n self._n_configs = 1\n self._sn_size = 100\n self._nt = 10000\n self._active_brdch = np.zeros(\n (), dtype=[(\"SIS 3302\", bool, (4, 8)), (\"SIS 3305\", bool, (2, 8))]\n )\n self._active_brdch[\"SIS 3302\"][0][0] = True\n self._active_brdch[\"SIS 3305\"][0][0] = True\n self._config_names = []\n self._active_config = (\"config01\",)\n self._sis3305_mode = 0", "def do_Device (self, line):", "def updateInterface(self):\n p = self.cxn[self.selectedADR].packet()\n p.magnetv().pscurrent().psvoltage()\n p.time()\n p.temperatures()\n p.get_state_var('CompressorStatus')\n p.get_instrument_state()\n state = yield p.send()\n # change instrument statuses\n for name,status in state['get_instrument_state']:\n if status[0] == False: color = 'red3'\n elif status[1] == False: color = 'orange3'\n elif status[1] == True: color = 'green3'\n else: color = 'gray70'\n self.instrumentStatuses[name].config(bg=color)\n # change compressor button\n if state['get_state_var'] == True:\n self.compressorButton.configure(text='Stop Compressor',\n command=self.stopCompressor,\n state=Tkinter.NORMAL)\n elif state['get_state_var'] == False:\n self.compressorButton.configure(text='Start Compressor',\n command=self.startCompressor,\n state=Tkinter.NORMAL)\n else: self.compressorButton.configure(state=Tkinter.DISABLED)\n # update current, voltage fields\n temps = {}\n stages = ('T_60K','T_3K','T_GGG','T_FAA')\n for i in range(len(stages)):\n temps[stages[i]] = state['temperatures'][i]\n #if temps[stages[i]] == 'nan': temps[stages[i]] = numpy.nan\n if numpy.isnan(state['magnetv']['V']):\n emf = 'ERR'\n else:\n emf = \"{0:.3f}\".format(state['magnetv']['V'])\n if numpy.isnan(state['pscurrent']['A']):\n psI = 'PS OFF'\n else:\n psI = \"{0:.3f}\".format(state['pscurrent']['A'])\n if numpy.isnan(state['psvoltage']['V']):\n psV = 'PS OFF'\n else:\n psV = \"{0:.3f}\".format(state['psvoltage']['V'])\n self.currentBackEMF.set( emf )\n self.currentI.set( psI )\n self.currentV.set( psV )\n # update plot:\n # change data to plot\n self.stage60K.set_xdata(numpy.append(self.stage60K.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stage60K.set_ydata(numpy.append(self.stage60K.get_ydata(),temps['T_60K']['K']))\n self.stage03K.set_xdata(numpy.append(self.stage03K.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stage03K.set_ydata(numpy.append(self.stage03K.get_ydata(),temps['T_3K']['K']))\n self.stageGGG.set_xdata(numpy.append(self.stageGGG.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stageGGG.set_ydata(numpy.append(self.stageGGG.get_ydata(),temps['T_GGG']['K']))\n self.stageFAA.set_xdata(numpy.append(self.stageFAA.get_xdata(),mpl.dates.date2num(state['time'])))\n self.stageFAA.set_ydata(numpy.append(self.stageFAA.get_ydata(),temps['T_FAA']['K']))\n #update plot\n self.updatePlot()\n # update legend\n labelOrder = ['T_60K','T_3K','T_GGG','T_FAA']\n lines = [self.stage60K,self.stage03K,self.stageGGG,self.stageFAA]\n labels = [l.strip('T_')+' ['+\"{0:.3f}\".format(temps[l]['K'])+'K]' for l in labelOrder]\n labels = [s.replace('1.#QOK','OoR') for s in labels]\n # legend on top (if not using this, delete \\n in title)\n self.ax.legend(lines,labels,bbox_to_anchor=(0., 1.02, 1., .102), loc=3,\n ncol=4, mode=\"expand\", borderaxespad=0.)", "def _initialize_data(self):\n self.reset_count = 0\n self._idn_no_firmware = \"KEPCO,BOP 50-20,E1234,\"\n self._firmware = 2.6\n self._init_data()", "def on(self):", "def cx():", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_hardware_sound_systems'] = True", "def __init__(self, sensor, temperature_resolution, humidity_resolution):\n self.sensor = sensor\n self.sensor.turnHeaterOn() \n time.sleep(1.0) # Burn off condensed stuff.\n self.sensor.turnHeaterOff() \n self.update()\n # Main Program\n #print \"------------\"\n #print \"Manfacturer ID=0x%X\"% self.sensor.readManufacturerID() \n #print \"Device ID=0x%X\"% self.sensor.readDeviceID() \n #print \"Serial Number ID=0x%X\"% self.sensor.readSerialNumber() \n \n # change temperature resolution\n self.sensor.setTemperatureResolution(temperature_resolution)\n self.sensor.setHumidityResolution(humidity_resolution)", "def setup(self):", "def setup(self):", "def setup(self):", "def setup(self):", "def init_IR_codes():\n IR_codes.update( {b'FF629D' : say_temp} ) # Say temperature status\n IR_codes.update( {b'84FF9375' : say_temp} ) # Say temperature status\n #IR_codes.update( {b'FFA857' : volume_inc} ) # increase volume\n #IR_codes.update( {b'FFE01F' : volume_dec} ) # reduce volume\n IR_codes.update( {b'FF906F' : toSecureMode} ) # Will be noBodyHome\n IR_codes.update( {b'FFC23D' : ultra.switch} ) # On/off radio\n IR_codes.update( {b'BF09C35C' : ultra.switch} ) # On/off radio (big)\n #IR_codes.update( {b'8BE68656' : holeNightLightAuto} )\n #IR_codes.update( {b'B21F28AE' : hole_night_light.setManualStateOff} )\n #IR_codes.update( {b'A6B1096A' : hole_night_light.setManualStateOn} )\n IR_codes.update( {b'24014B0' : noolite_hole_set_off} )\n IR_codes.update( {b'8FC212DB' : noolite_hole_set_on} )\n IR_codes.update( {b'7960556F' : noolite_hole_set_auto} )\n #IR_codes.update( {b'FF10EF' : holeNightLightAuto} )\n #IR_codes.update( {b'FF38C7' : hole_night_light.setManualStateOff} )\n #IR_codes.update( {b'FF5AA5' : hole_night_light.setManualStateOn} )\n IR_codes.update( {b'FF30CF' : noolite_hole_set_off} )\n IR_codes.update( {b'FF18E7' : noolite_hole_set_on} )\n IR_codes.update( {b'FF7A85' : noolite_hole_set_auto} )", "def __init__(self):\n i2c.Pn532_i2c.__init__(self)\n self._uid = False", "def __init__(self, device):\n self.device = device\n self.io = serial.Serial(device, 57600, timeout=1)\n self.keys = ['time', 'centroid_x', 'centroid_y', 'centroid_r',\n 'level_1', 'level_2', 'level_3',\n 'width_1', 'width_2', 'width_3',\n 'height_1', 'height_2', 'height_3',\n 'power']", "def get_coulomb_info(self):\n return", "def about( cls, ):\n url = r\"http://www.opencircuits.com/Python_Smart_Terminal\"\n __, mem_msg = cls.show_process_memory( )\n msg = ( f\"{cls.controller.app_name} version:{cls.controller.version} \\nmode: {cls.parameters.mode}\"\n f\"\\n by Russ Hensel\"\n f\"\\nMemory in use {mem_msg} \\nCheck <Help> or \\n{url} \\nfor more info.\" )\n messagebox.showinfo( \"About\", msg )", "def healthcare():", "def test_card_info_lookup(self):\n pass", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_rgb_dmd'] = True", "def dicom_cli():", "def state_information(self) -> Dict[str, Any]:\n raise NotImplementedError(\"Device subclass needs to implement this.\")", "def __init__(self, starting_point=-1):\n self.i_read = starting_point\n self.data = [['fake_chip_id', 'fake_version'],\n [96, 110, 203, 104, 50, 0, 29, 145, 59, 215, 208, 11,\n 232, 38, 42, 255, 249, 255, 172, 38, 10, 216, 189, 16],\n [75],\n [129, 1, 0, 16, 44, 3, 30],\n [76, 60, 128, 129, 49, 128, 94, 120]]", "def sth():", "def calc_misc(self, model):\n\n part_family = model.part_family.lower()\n\n # Legacy Demod Registers\n # FIXME: calculate these\n\n self._reg_write(model.vars.MODEM_CTRL2_BRDIVA, 0)\n self._reg_write(model.vars.MODEM_CTRL2_BRDIVB, 0)\n self._reg_write(model.vars.MODEM_CTRL2_DEVMULA, 0)\n self._reg_write(model.vars.MODEM_CTRL2_DEVMULB, 0)\n self._reg_write(model.vars.MODEM_CTRL2_RATESELMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL2_RXFRCDIS, 0)\n self._reg_write(model.vars.MODEM_CTRL2_SQITHRESH, 0)\n self._reg_write(model.vars.MODEM_CTRL2_TXPINMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL4_ADCSATDENS, 0)\n self._reg_write(model.vars.MODEM_CTRL4_ADCSATLEVEL, 6)\n self._reg_write(model.vars.MODEM_CTRL4_OFFSETPHASESCALING, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PHASECLICKFILT, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTAVG, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTDEB, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTGAIN, 0)\n self._reg_write(model.vars.MODEM_CTRL4_PREDISTRST, 0)\n self._reg_write(model.vars.MODEM_CTRL4_SOFTDSSSMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL5_BBSS, 0)\n self._reg_write(model.vars.MODEM_CTRL5_DEMODRAWDATASEL2, 0)\n self._reg_write(model.vars.MODEM_CTRL5_DETDEL, 0)\n self._reg_write(model.vars.MODEM_CTRL5_POEPER, 0)\n self._reg_write(model.vars.MODEM_CTRL5_RESYNCLIMIT, 0)\n self._reg_write(model.vars.MODEM_CTRL6_CODINGB, 0)\n self._reg_write(model.vars.MODEM_CTRL6_CPLXCORREN, 0)\n self._reg_write(model.vars.MODEM_CTRL6_DEMODRESTARTALL, 0)\n self._reg_write(model.vars.MODEM_CTRL6_DSSS3SYMBOLSYNCEN, 0)\n self._reg_write(model.vars.MODEM_CTRL6_PREBASES, 0)\n self._reg_write(model.vars.MODEM_CTRL6_RXRESTARTUPONRSSI, 0)\n self._reg_write(model.vars.MODEM_CTRL6_RXRESTARTUPONSHORTRSSI, 0)\n self._reg_write(model.vars.MODEM_CTRL6_TXDBPSKINV, 0)\n self._reg_write(model.vars.MODEM_CTRL6_TXDBPSKRAMPEN, 0)\n self._reg_write(model.vars.MODEM_ANARAMPCTRL_VMIDCTRL, 1)\n self._reg_write(model.vars.MODEM_ANARAMPCTRL_MUTEDLY, 0)\n self._reg_write(model.vars.MODEM_ETSCTRL_CAPTRIG, 0)\n self._reg_write(model.vars.MODEM_ETSCTRL_ETSLOC, 0)\n self._reg_write(model.vars.MODEM_ETSTIM_ETSCOUNTEREN, 0)\n self._reg_write(model.vars.MODEM_ETSTIM_ETSTIMVAL, 0)\n\n self._reg_write(model.vars.MODEM_OOKSHAPING_OOKSHAPINGEN, 0)\n self._reg_write(model.vars.MODEM_OOKSHAPING_OOKSHAPINGLUTSIZE, 0)\n self._reg_write(model.vars.MODEM_OOKSHAPING_OOKSHAPINGSTEP, 0)\n self._reg_write(model.vars.MODEM_PRE_DSSSPRE, 0)\n self._reg_write(model.vars.MODEM_PRE_PRESYMB4FSK, 0)\n self._reg_write(model.vars.MODEM_PRE_SYNCSYMB4FSK, 0)\n self._reg_write(model.vars.MODEM_TIMING_FASTRESYNC, 0)\n self._reg_write(model.vars.MODEM_TIMING_TIMSEQINVEN, 0)\n self._reg_write(model.vars.MODEM_TIMING_TIMSEQSYNC, 0)\n self._reg_write(model.vars.MODEM_TIMING_TSAGCDEL, 0)\n\n # Long Range registers\n # FIXME: calculate these\n self._reg_write(model.vars.MODEM_LONGRANGE1_LOGICBASEDLRDEMODGATE, 0)\n self._reg_write(model.vars.MODEM_LONGRANGE1_LOGICBASEDPUGATE, 0)\n self._reg_write(model.vars.MODEM_LONGRANGE1_LRSPIKETHADD, 0)\n self._reg_write(model.vars.MODEM_LONGRANGE1_LRSS, 0)\n self._reg_write(model.vars.MODEM_LRFRC_CI500, 1)\n self._reg_write(model.vars.MODEM_LRFRC_FRCACKTIMETHD, 0)\n self._reg_write(model.vars.MODEM_LRFRC_LRCORRMODE, 1)\n\n # DSA registers\n # FIXME: what do we need to calculate here?\n self._reg_write(model.vars.MODEM_DSACTRL_AGCBAUDEN, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_AMPJUPTHD, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_ARRTOLERTHD0, 2)\n self._reg_write(model.vars.MODEM_DSACTRL_ARRTOLERTHD1, 4)\n self._reg_write(model.vars.MODEM_DSACTRL_DSARSTON, 1)\n self._reg_write(model.vars.MODEM_DSACTRL_FREQAVGSYM, 1)\n self._reg_write(model.vars.MODEM_DSACTRL_GAINREDUCDLY, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_LOWDUTY, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_RESTORE, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_SCHPRD, 0)\n self._reg_write(model.vars.MODEM_DSACTRL_TRANRSTDSA, 0)\n self._reg_write(model.vars.MODEM_DSATHD0_FDEVMAXTHD, 0x78)\n self._reg_write(model.vars.MODEM_DSATHD0_FDEVMINTHD, 12)\n self._reg_write(model.vars.MODEM_DSATHD0_SPIKETHD, 0x64)\n self._reg_write(model.vars.MODEM_DSATHD0_UNMODTHD, 4)\n self._reg_write(model.vars.MODEM_DSATHD1_AMPFLTBYP, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_DSARSTCNT, 2)\n self._reg_write(model.vars.MODEM_DSATHD1_FREQLATDLY, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_FREQSCALE, 0)\n self._reg_write(model.vars.MODEM_DSATHD1_POWABSTHD, 0x1388)\n self._reg_write(model.vars.MODEM_DSATHD1_POWRELTHD, 0)\n self._reg_write(model.vars.MODEM_DSATHD1_PWRDETDIS, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_PWRFLTBYP, 1)\n self._reg_write(model.vars.MODEM_DSATHD1_RSSIJMPTHD, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_FDADJTHD, 1)\n self._reg_write(model.vars.MODEM_DSATHD2_FREQESTTHD, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_INTERFERDET, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_JUMPDETEN, 1)\n self._reg_write(model.vars.MODEM_DSATHD2_PMDETFORCE, 0)\n self._reg_write(model.vars.MODEM_DSATHD2_PMDETPASSTHD, 6)\n self._reg_write(model.vars.MODEM_DSATHD2_POWABSTHDLOG, 100)\n self._reg_write(model.vars.MODEM_DSATHD3_FDEVMAXTHDLO, 120)\n self._reg_write(model.vars.MODEM_DSATHD3_FDEVMINTHDLO, 12)\n self._reg_write(model.vars.MODEM_DSATHD3_SPIKETHDLO, 100)\n self._reg_write(model.vars.MODEM_DSATHD3_UNMODTHDLO, 4)\n self._reg_write(model.vars.MODEM_DSATHD4_ARRTOLERTHD0LO, 2)\n self._reg_write(model.vars.MODEM_DSATHD4_ARRTOLERTHD1LO, 4)\n self._reg_write(model.vars.MODEM_DSATHD4_POWABSTHDLO, 5000)\n self._reg_write(model.vars.MODEM_DSATHD4_SWTHD, 0)\n\n # FIXME: check with Subrata on how to calculate these\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_ANTWAIT, 20)\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_SKIP2ANT, 1)\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_SKIPCORRTHD, 100)\n self._reg_write(model.vars.MODEM_PHDMODANTDIV_SKIPRSSITHD, 0)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR0,1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR1, 1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR2, 1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSICORR3, 1)\n self._reg_write(model.vars.MODEM_PHANTDECSION_RSSIANDDIVTHD, 20)\n self._reg_write(model.vars.MODEM_PHANTDECSION_CORRANDDIVTHD, 100)\n\n # FIXME: figure out how these AGC registers need to be calculated\n self._reg_write(model.vars.AGC_RSSISTEPTHR_DEMODRESTARTPER, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_DEMODRESTARTTHR, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_NEGSTEPTHR, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_POSSTEPTHR, 0)\n self._reg_write(model.vars.AGC_RSSISTEPTHR_STEPPER, 0)\n\n # Antenna Diversity Registers\n # FIXME: check with Amey if we need to calculate these\n self._reg_write(model.vars.MODEM_ANTDIVCTRL_ADPRETHRESH, 0)\n self._reg_write(model.vars.MODEM_ANTDIVCTRL_ENADPRETHRESH, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL1_TIMEPERIOD, 436906)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTCOUNT, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTDFLTSEL, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWENABLE, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_ANTSWTYPE, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_CFGANTPATTEN, 0)\n self._reg_write(model.vars.MODEM_ANTSWCTRL_EXTDSTOPPULSECNT, 30)\n self._reg_write(model.vars.MODEM_ANTSWEND_ANTSWENDTIM, 0)\n self._reg_write(model.vars.MODEM_ANTSWSTART_ANTSWSTARTTIM, 0)\n self._reg_write(model.vars.MODEM_CFGANTPATT_CFGANTPATTVAL, 0)\n #self._reg_write(model.vars.MODEM_CTRL3_ANTDIVMODE, 0)\n self._reg_write(model.vars.MODEM_CTRL3_ANTDIVREPEATDIS, 0)\n\n # Coherent Demod Registers\n #FIXME: Check with Yan/Per on how to calculate these\n self._reg_write(model.vars.MODEM_COH2_DSAPEAKCHPWRTH, 0)\n self._reg_write(model.vars.MODEM_COH3_COHDSADETDIS, 0)\n self._reg_write(model.vars.MODEM_COH3_DSAPEAKCHPWREN, 0)\n self._reg_write(model.vars.MODEM_COH3_LOGICBASEDCOHDEMODGATE, 0)\n self._reg_write(model.vars.MODEM_COH3_ONEPEAKQUALEN, 0)\n self._reg_write(model.vars.MODEM_COH3_PEAKCHKTIMOUT, 0)\n\n # Clock-gating register\n self._reg_write(model.vars.MODEM_AUTOCG_AUTOCGEN, 0) #We calculate MODEM_CGCLKSTOP_FORCEOFF in calculator instead\n self._reg_write(model.vars.FRC_AUTOCG_AUTOCGEN, 7)\n\n # Shaping filter coefficients\n #FIXME: check with Yan on how to calculate these\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF40, 0)\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF41, 0)\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF42, 0)\n self._reg_write(model.vars.MODEM_SHAPING10_COEFF43, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF44, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF45, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF46, 0)\n self._reg_write(model.vars.MODEM_SHAPING11_COEFF47, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF48, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF49, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF50, 0)\n self._reg_write(model.vars.MODEM_SHAPING12_COEFF51, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF52, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF53, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF54, 0)\n self._reg_write(model.vars.MODEM_SHAPING13_COEFF55, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF56, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF57, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF58, 0)\n self._reg_write(model.vars.MODEM_SHAPING14_COEFF59, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF60, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF61, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF62, 0)\n self._reg_write(model.vars.MODEM_SHAPING15_COEFF63, 0)\n self._reg_write(model.vars.MODEM_SHAPING2_COEFF10, 0)\n self._reg_write(model.vars.MODEM_SHAPING2_COEFF11, 0)\n self._reg_write(model.vars.MODEM_SHAPING2_COEFF9, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF12, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF13, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF14, 0)\n self._reg_write(model.vars.MODEM_SHAPING3_COEFF15, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF16, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF17, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF18, 0)\n self._reg_write(model.vars.MODEM_SHAPING4_COEFF19, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF20, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF21, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF22, 0)\n self._reg_write(model.vars.MODEM_SHAPING5_COEFF23, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF24, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF25, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF26, 0)\n self._reg_write(model.vars.MODEM_SHAPING6_COEFF27, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF28, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF29, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF30, 0)\n self._reg_write(model.vars.MODEM_SHAPING7_COEFF31, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF32, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF33, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF34, 0)\n self._reg_write(model.vars.MODEM_SHAPING8_COEFF35, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF36, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF37, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF38, 0)\n self._reg_write(model.vars.MODEM_SHAPING9_COEFF39, 0)\n\n # Modem Registers with fixed value\n self._reg_write(model.vars.MODEM_AFC_AFCTXMODE, 0)\n# self._reg_write(model.vars.MODEM_AFC_AFCGEAR, 3)\n self._reg_write(model.vars.MODEM_CTRL0_DEMODRAWDATASEL, 0)\n self._reg_write(model.vars.MODEM_CTRL2_DMASEL, 0)\n self._reg_write(model.vars.MODEM_CTRL3_PRSDINEN, 0)\n self._reg_write(model.vars.MODEM_CTRL4_CLKUNDIVREQ, 0)\n self._reg_write(model.vars.MODEM_CTRL3_RAMTESTEN, 0)\n self._reg_write(model.vars.MODEM_DIRECTMODE_CLKWIDTH, 1)\n self._reg_write(model.vars.MODEM_DIRECTMODE_DMENABLE, 0)\n self._reg_write(model.vars.MODEM_DIRECTMODE_SYNCASYNC, 0)\n self._reg_write(model.vars.MODEM_DIRECTMODE_SYNCPREAM, 3)\n self._reg_write(model.vars.MODEM_PADEBUG_ENMANPACLKAMPCTRL, 0)\n self._reg_write(model.vars.MODEM_PADEBUG_ENMANPAPOWER, 0)\n self._reg_write(model.vars.MODEM_PADEBUG_ENMANPASELSLICE, 0)\n self._reg_write(model.vars.MODEM_PADEBUG_MANPACLKAMPCTRL, 0)\n self._reg_write(model.vars.MODEM_CTRL0_OOKASYNCPIN, 0)\n self._reg_write(model.vars.MODEM_CTRL0_DETDIS, 0)\n self._reg_write(model.vars.MODEM_CTRL0_DUALCORROPTDIS, 0)\n self._reg_write(model.vars.MODEM_CTRL0_FRAMEDETDEL, 0)\n self._reg_write(model.vars.MODEM_CTRL1_SYNC1INV, 0)\n\n # FRC Registers with fixed value\n self._reg_write(model.vars.FRC_BOICTRL_BOIBITPOS, 0)\n self._reg_write(model.vars.FRC_BOICTRL_BOIEN, 0)\n self._reg_write(model.vars.FRC_BOICTRL_BOIFIELDLOC, 0)\n self._reg_write(model.vars.FRC_BOICTRL_BOIMATCHVAL, 0)\n self._reg_write(model.vars.FRC_CTRL_LPMODEDIS, 0)\n self._reg_write(model.vars.FRC_CTRL_RATESELECT, 0)\n self._reg_write(model.vars.FRC_CTRL_WAITEOFEN, 0)\n self._reg_write(model.vars.FRC_DFLCTRL_DFLBOIOFFSET, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLBITORDER, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLBITS, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLMINLENGTH, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLMODE, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLOFFSET, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_DSLSHIFT, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_RXSUPRECEPMODE, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_STORESUP, 0)\n self._reg_write(model.vars.FRC_DSLCTRL_SUPSHFFACTOR, 0)\n self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TRAILTXREPLEN, 0)\n self._reg_write(model.vars.FRC_TRAILTXDATACTRL_TXSUPPLENOVERIDE, 0)\n self._reg_write(model.vars.FRC_WCNTCMP3_SUPPLENFIELDLOC, 0)\n self._reg_write(model.vars.FRC_WCNTCMP4_SUPPLENGTH, 0)\n\n # Added new reg-fields related to 15.4 subG OQPSK phys\n self._reg_write(model.vars.MODEM_COH3_COHDSACMPLX, 0)\n self._reg_write(model.vars.MODEM_SYNCPROPERTIES_STATICSYNCTHRESH, 0)\n\n # Added new reg-fields related to Internal Long Range\n self._reg_write(model.vars.MODEM_PRE_PREWNDERRORS, 0)\n self._reg_write(model.vars.MODEM_CTRL3_TIMINGBASESGAIN, 0)\n\n #AGC default settings\n self._reg_write(model.vars.AGC_CTRL0_CFLOOPNFADJ, 0)\n self._reg_write(model.vars.AGC_CTRL6_DUALRFPKDDEC, 240296)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD1_RFPKDHITHD0, 1)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD1_RFPKDHITHD1, 40)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD0_RFPKDLOWTHD0, 1)\n self._reg_write(model.vars.AGC_DUALRFPKDTHD0_RFPKDLOWTHD1, 10)\n self._reg_write(model.vars.AGC_CTRL6_ENDUALRFPKD, 1)\n\n self._reg_write(model.vars.MODEM_SQ_SQEN , 0)\n self._reg_write(model.vars.MODEM_SQ_SQTIMOUT , 0)\n self._reg_write(model.vars.MODEM_SQEXT_SQSTG2TIMOUT , 0)\n self._reg_write(model.vars.MODEM_SQEXT_SQSTG3TIMOUT , 0)\n\n # reg-fields to modify sync detection reset behavior PGOCELOT-5282\n self._reg_write(model.vars.MODEM_FRMSCHTIME_PMRSTSYCNEN, 0)\n self._reg_write(model.vars.MODEM_FRMSCHTIME_DSARSTSYCNEN, 0)\n\n #RAC settings\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVN, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENDRVP, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENREG3, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTENBYPASS40MHZ, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN0_CLKMULTREG3ADJV, part_family)\n self._reg_write_default(model.vars.RAC_CLKMULTEN1_CLKMULTDRVAMPSEL, part_family)\n self._reg_write_default(model.vars.RAC_IFADCTRIM0_IFADCSIDETONEAMP, part_family)", "def _command(self, commands):\n# \"\"\"Send command to spi bus of display chip, most DC pin need set to LOW \"\"\"\n# if self._spi == None: raise \"Do not setting SPI\"\n# GPIO.output( self._spi_dc, 0 )\n# self._spi.writebytes( commands )\n raise NotImplementedError", "def setup(instname):\n global reducer, inst_name,van_mass,bleed_switch,rate,pixels\n # debugging (allows to reload changed DirectEnergyConversion package from Mantid)\n\n if instname=='MAR' or instname=='mar':\n print 'setup mari'\n inst_name='MAR'\n reducer = DRC.setup_reducer('MARI')\n bleed_switch=False\n rate=0.0\n pixels=0\n elif instname=='MER' or instname=='mer':\n print 'setup merlin'\n inst_name='MER'\n reducer = DRC.setup_reducer('MERLIN')\n bleed_switch=True\n rate=0.01\n pixels=80\n elif instname=='MAP' or instname=='map':\n print 'setup maps'\n inst_name='MAP'\n reducer = DRC.setup_reducer('MAPS')\n bleed_switch=False\n rate=0.0\n pixels=0.0\n elif instname=='LET' or instname=='let':\n print 'setup let'\n inst_name='LET'\n reducer = DRC.setup_reducer('LET')\n bleed_switch=True\n rate=0.01\n pixels=80\n elif instname=='ARCS' or instname=='arcs':\n print 'setup Arcs'\n inst_name='ARC'\n reducer = DRC.setup_reducer('ARCS')\n bleed_switch=False\n rate=0.01\n pixels=80\n elif instname=='SEQ' or instname=='seq':\n print 'setup Sequoia'\n inst_name='SEQ'\n reducer = DRC.setup_reducer('SEQUOIA')\n bleed_switch=False\n rate=0.01\n pixels=80\n elif instname=='CNCS' or instname=='cncs':\n print 'setup cncs'\n inst_name='SEQ'\n reducer = DRC.setup_reducer('CNCS')\n bleed_switch=False\n rate=0.01\n pixels=80\n elif instname=='HYSPEC' or instname=='hyspec':\n print 'setup hyspec'\n inst_name='SEQ'\n reducer = DRC.setup_reducer('HYSPEC')\n bleed_switch=False\n rate=0.01\n pixels=80\n else:\n print 'Instrument name not defined'\n return \n van_mass=reducer.get_default_parameter('vanadium-mass')", "def setup(self):\n header_print(self.data['intro'])\n header_print(self.data['help'])\n random.shuffle(self.data['draw'])\n random.shuffle(self.data['locations'])\n random.shuffle(self.data['events'])\n random.shuffle(self.data['aces'])\n random.shuffle(self.data['personalities'])\n self.stats = {\n 'round': 0,\n 'powers': {\n 'MOONS': 6,\n 'SUNS': 6,\n 'WAVES': 6,\n 'LEAVES': 6,\n 'WYRMS': 6,\n 'KNOTS': 6,\n },\n 'hand': self.data['draw'][:],\n 'discard': [],\n 'active': [],\n 'opponent': {},\n }", "def basic(self):\n pass", "def info(self) -> dict:", "def fetch_stick(self):\r\n print(\"There you go, sir!\\n\")", "def init_devices(self):\n self.hp_nb = int(self.rs_nb* self.hp_proportion/(1- self.hp_proportion))\n self.defense_cost = self.hp_nb * self.hp_unit_cost\n rs_devices = [True for i in range(self.rs_nb)] #rs --> True\n hp_devices = [False for i in range(self.hp_nb)] #hp --> False\n self.devices = rs_devices + hp_devices\n shuffle(self.devices)", "def load_device():", "def __init__(self, dev):\n self.dev = dev\n self.dev.cla = 0x80", "def take_control(self):\n pass", "def init(self):\n self.reset()\n\n self.__interface.send_command('POWER_SETTING')\n self.__interface.send_data(0x37)\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('PANEL_SETTING')\n self.__interface.send_data(0xCF)\n self.__interface.send_data(0x08)\n\n self.__interface.send_command('BOOSTER_SOFT_START')\n self.__interface.send_data(0xc7)\n self.__interface.send_data(0xcc)\n self.__interface.send_data(0x28)\n\n self.__interface.send_command('POWER_ON')\n self.wait_until_idle()\n\n self.__interface.send_command('PLL_CONTROL')\n self.__interface.send_data(0x3c)\n\n self.__interface.send_command('TEMPERATURE_CALIBRATION')\n self.__interface.send_data(0x00)\n\n self.__interface.send_command('VCOM_AND_DATA_INTERVAL_SETTING')\n self.__interface.send_data(0x77)\n\n self.__interface.send_command('TCON_SETTING')\n self.__interface.send_data(0x22)\n\n self.__interface.send_command('TCON_RESOLUTION')\n self.__interface.send_data(0x02) #source 640\n self.__interface.send_data(0x80)\n self.__interface.send_data(0x01) #gate 384\n self.__interface.send_data(0x80)\n\n self.__interface.send_command('VCM_DC_SETTING')\n self.__interface.send_data(0x1E) #decide by LUT file\n\n self.__interface.send_command(0xe5, False) #FLASH MODE\n self.__interface.send_data(0x03)", "def info(self):\n\n print(\"pixellisation:\", self.pixel)\n print(\"number of components:\", self.ncomp)\n print(\"number of pixels:\", self.data.shape[:] if self.ncomp == 1 else self.data.shape[1:])\n print(\"nside:\", self.nside)\n print(\"geometry:\", self.geometry)\n print(\"coordinates:\", self.coordinate)", "def info():\n print(\"Made using the OOP RPG game creator (c) Claire.\\n\")", "def protocolInfoLaser(self, fh, inputs, derivative):\n #global summary\n try:\n nspikes = len(inputs)\n self.devicemode = 'Laser'\n #print inputs\n # print 'FH parent info: ', fh.parent().info()\n print('1')\n reps = fh.parent().info()['protocol']['conf']['repetitions'] # fh.info()[('protocol', 'repetitions')]\n print('2')\n print(list(fh.info().keys()))\n print(fh.info())\n try:\n pulseDurIndex = fh.info()['Laser-Blue', 'Shutter.duration']\n except:\n try:\n pulseDurIndex = fh.info()['Laser-UV', 'Shutter.duration']\n except:\n raise ValueError(\" No key for Laser-Blue or Laser-UV in data set\")\n # fh.info()[('Laser-Blue', 'Command.PulseTrain_length')]\n # print 'pulsedurindex: ', pulseDurIndex\n fn = fh.shortName()\n # find date string in the path, and return path to current data set\n # allows us to identify the data set by date, slice, cell, protocol, etc.\n dm = re.compile(r'(\\d{4,4})\\.(\\d{2,2})\\.(\\d{2,2})*')\n dsearch = dm.search(fh.name())\n expname = fh.name()[dsearch.start():] # pull full path for experiment here, but leave out everything above the date\n print('3')\n pulseDur = fh.parent().info()['sequenceParams'][('Laser-Blue','Shutter.duration')] # [pulseDurIndex]\n print('4')\n pulseDur = pulseDur[pulseDurIndex]\n print('5')\n pulseTrainCommandShutter = fh.parent().info()['devices']['Laser-Blue']['channels']['Shutter']\n print('6')\n pulseTrainFcn = pulseTrainCommandShutter['waveGeneratorWidget']['function']\n r = re.compile('(?P<type>pulse)\\((?P<delay>\\d+),\\s(?P<param>\\w+),\\s(?P<value>\\d+)\\)')\n s = r.match(pulseTrainFcn)\n print('6.5')\n startTime = float(s.group('delay'))*1e-3 # pulseTrainFcn['start']['value'] # retrieve start time\n print('7')\n rep = 0 # fh.info()[('protocol', 'repetitions')]\n ipi = 1 # pulseTrainInfo['interpulse_length']['value'] # retrieve interpulse interval\n npulses = 1 # pulseTrainInfo['pulse_number']['value'] # retrieve number of pulses in train\n spikeTimes = [t['time'] for t in inputs]\n # figure max of derivative of the data after each stimulus pulse. 5 msec window.\n t = derivative.xvals(\"Time\")\n slopes = np.zeros(npulses)\n print('8')\n for n in range(npulses):\n t0 = startTime + n * ipi\n t1 = t0 + 3e-3\n x = np.where((t > t0) & (t <= t1))\n print('n, x: ', n, x)\n slopes[n] = np.max(derivative[x])\n\n res = OrderedDict([('Experiment: ', expname), ('File: ', fn), ('startTime', startTime),\n ('NPulses', npulses), ('IPI', ipi), ('PulseDur', pulseDur), ('Reps', reps),\n ('thisRep', rep),\n ('NSpikes', nspikes), ('SpikeTimes', spikeTimes), ('Slopes', slopes)])\n self.summary.append(res)\n except:\n raise Exception('Laser stuff failed')\n return res", "def _connect(self):\n\n log.info(\"Loading HVI\")\n\n self._hvi = sd1.SD_HVI()\n hvi_file = pkg_resources.resource_filename(\"qtrl.keysight\", 'sequencer.hvi')\n log.info(hvi_file)\n self._hvi.open(hvi_file)\n # for some unknown reason, this has to be run twice before it will not error\n self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=3, index=0)\n self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=4, index=1)\n self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=5, index=2)\n\n assert self._hvi.open(hvi_file) >= 0, 'Failed to load HVI'\n assert self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=3, index=0) >= 0, 'Failed to load HVI'\n assert self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=4, index=1) >= 0, 'Failed to load HVI'\n assert self._hvi.assignHardwareWithIndexAndSlot(nChassis=1, nSlot=5, index=2) >= 0, 'Failed to load HVI'\n\n assert self._hvi.compile() >= 0, 'Failed to load HVI'\n\n assert self._hvi.load() >= 0, 'Failed to load HVI'\n self._hvi.reset()\n\n cur_chan = 0\n for i, card in enumerate(self.cards):\n if card.connection is not None:\n self.close()\n\n card_cxn = sd1.SD_AOU()\n assert card_cxn.openWithSlot(\"\", card.chassis, card.slot) > 0, 'Failed to connect to slot'\n\n self.cards[i] = card._replace(connection=card_cxn)\n # self.cards[i].connection.triggerIOconfig(sd1.SD_TriggerDirections.AOU_TRG_IN)\n\n for channel in range(card.channels):\n self.channels[cur_chan] = KeysightChannel(channel=channel,\n chassis=card.chassis,\n slot=card.slot,\n model=card.model,\n type=card.type,\n connection=card_cxn)\n\n self.channels[cur_chan].connection.channelWaveShape(channel+1, sd1.SD_Waveshapes.AOU_AWG)\n\n self.channels[cur_chan].connection.clockResetPhase(3, 0)\n\n # ext trig config, 0 is external source, 3 is rising edge\n # self.channels[cur_chan].connection.AWGtriggerExternalConfig(channel+1, 0, 3)\n\n cur_chan += 1\n self._hvi.start()\n self._hvi.stop()\n\n self._n_channels = cur_chan", "def SPIchiperase(self):\n self.writecmd(0x01,0x81);", "def __init__(self):\n self.ram = [0] * 256\n self.reg = [0] * 8\n self.pc = 0", "def c(self):\n pass", "def c(self):\n pass", "def ExtraInfo(self) -> object:", "def __init__(self):\n self.ser = serial.Serial('/dev/ttyUSB3',9600)\n collect_readings = False\n self.colours = []\n self.max_readings = 50 #maximum number of readings to use", "def get_info(self):\n\t\tret = 'Flash info\\n'\n\t\tret += '\\tGPNVM bits: ' + str(self.read_gpnvm()) + '\\n'\n\t\tret += '\\tUnique identifier area: ' + self.read_unique_identifier_area().decode('ascii', 'replace') + '\\n'\n\t\tret += '\\tDescriptor: ' + str(self.read_descriptor()) + '\\n'\n\t\treturn ret", "def refresh_description(self):\n # AIN\n code_ain = (self.CODE >> 12) & 0b0111\n # DICT_AIN = [[0, 1], [0, 3], [1, 3], [2, 3], [0, 4], [1, 4], [2, 4], [3, 4]]\n ind_p, ind_n = DICT_AIN[code_ain]\n self.AINP = \"AIN\" + str(ind_p)\n self.AINN = \"AIN\" + str(ind_n)\n if (ind_n == 4): self.AINN = \"GND\"\n\n # FSR\n code_fsr = (self.CODE >> 9) & 0b0000111\n # DICT_FSR = [\"6.144V\", \"4.096V\", \"2.048V\", \"1.024V\", \"0.512V\", \"0.256V\"]\n self.FSR = DICT_FSR[code_fsr]\n\n # MODE\n\n # rate\n code_rate = (self.CODE >> 5) & 0b00000000111\n # DICT_RATE = [\"8 SPS\", \"16 SPS\", \"32 SPS\", \"64 SPS\", \"128 SPS\", \"250 SPS\", \"475 SPS\", \"860 SPS\"]\n self.RATE = DICT_RATE[code_rate]", "def reckon(self):", "def polyChipOff(*args, attraction: Union[float, bool]=0.0, caching: bool=True,\n constructionHistory: bool=True, duplicate: bool=True, gain: Union[float,\n List[float], bool]=1.0, gravity: Union[List[float, float, float], bool]=None,\n gravityX: Union[float, bool]=0.0, gravityY: Union[float, bool]=0.0, gravityZ:\n Union[float, bool]=0.0, keepFacesTogether: bool=True, keepFacetTogether:\n bool=True, localCenter: Union[int, bool]=0, localDirection: Union[List[float,\n float, float], bool]=None, localDirectionX: Union[float, bool]=0.0,\n localDirectionY: Union[float, bool]=0.0, localDirectionZ: Union[float,\n bool]=0.0, localRotate: Union[List[float, float, float], bool]=None,\n localRotateX: Union[float, bool]=0.0, localRotateY: Union[float, bool]=0.0,\n localRotateZ: Union[float, bool]=0.0, localScale: Union[List[float, float,\n float], bool]=None, localScaleX: Union[float, bool]=0.0, localScaleY:\n Union[float, bool]=0.0, localScaleZ: Union[float, bool]=0.0, localTranslate:\n Union[List[float, float, float], bool]=None, localTranslateX: Union[float,\n bool]=0.0, localTranslateY: Union[float, bool]=0.0, localTranslateZ:\n Union[float, bool]=0.0, magnX: Union[float, bool]=0.0, magnY: Union[float,\n bool]=0.0, magnZ: Union[float, bool]=0.0, magnet: Union[List[float, float,\n float], bool]=None, name: AnyStr=\"\", nodeState: Union[int, bool]=0, offset:\n Union[float, bool]=0.0, pivot: Union[List[float, float, float], bool]=None,\n pivotX: Union[float, bool]=0.0, pivotY: Union[float, bool]=0.0, pivotZ:\n Union[float, bool]=0.0, random: Union[float, bool]=0.0, scale: Union[List[float,\n float, float], bool]=None, scaleX: Union[float, bool]=0.0, scaleY: Union[float,\n bool]=0.0, scaleZ: Union[float, bool]=0.0, translate: Union[List[float, float,\n float], bool]=None, translateX: Union[float, bool]=0.0, translateY: Union[float,\n bool]=0.0, translateZ: Union[float, bool]=0.0, weight: Union[float, bool]=0.0,\n worldSpace: bool=True, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def data(self):", "def __init__(self):\n self.bytes = bytearray(3)\n MCP4725.__init__(self)", "def __init__(self, busRestriction=0, devAddressRestriction=0, serialNumber=\"\"):\n self.handle = libcaer.caerDeviceOpen(1, libcaer.CAER_DEVICE_DAVIS, busRestriction, devAddressRestriction, serialNumber)\n self.info = libcaer.caerDavisInfoGet(self.handle)\n\n print(\"device ID: \" + str(libcaer.caer_davis_info_deviceID_get(self.info)))\n\n if (libcaer.caer_davis_info_deviceIsMaster_get(self.info)):\n print(\"device is Master\")\n else:\n print(\"device is Slave\")\n\n print(\"device Serial Number: \" + str(libcaer.caer_davis_info_deviceSerialNumber_get(self.info)))\n print(libcaer.caer_davis_info_deviceString_get(self.info))\n\n self.dvsSizeX = libcaer.caer_davis_info_dvsSizeX_get(self.info)\n self.dvsSizeY = libcaer.caer_davis_info_dvsSizeY_get(self.info)\n\n self.apsSizeX = libcaer.caer_davis_info_apsSizeX_get(self.info)\n self.apsSizeY = libcaer.caer_davis_info_apsSizeY_get(self.info)\n\n # init default biases\n ret = libcaer.caerDeviceSendDefaultConfig(self.handle)\n if(ret == True):\n print(\"Default biases loaded\")\n else:\n print(\"Error while loading default biases\")\n raise Exception\n\n # set blocking data exchange\n ret = libcaer.caerDeviceConfigSet(self.handle, libcaer.CAER_HOST_CONFIG_DATAEXCHANGE, libcaer.CAER_HOST_CONFIG_DATAEXCHANGE_BLOCKING, True)\n if(ret == True):\n print(\"Data exchange set to blocking mode\")\n else:\n print(\"Error in communicating with the device, please check your setup\")\n raise Exception\n\n # start data transfer from device\n ret = libcaer.caerDeviceDataStart(self.handle, None, None, None, None, None)\n if(ret == True):\n print(\"Data transfer started\")\n else:\n print(\"Error in starting data transfer\")\n raise Exception", "def __init__(self, make, model, year):\r\n super().__init__(make, model, year)\r\n self.battery_size = 70\r\n # self.autopilot = autopilot\r", "def moi(self):\n\n pass", "def enable(self) -> None:" ]
[ "0.67811126", "0.6066584", "0.5964433", "0.59339035", "0.589296", "0.58123773", "0.58010674", "0.57554996", "0.5728705", "0.56449646", "0.56439716", "0.56414634", "0.56379575", "0.56379575", "0.5594598", "0.55358076", "0.5497361", "0.54925936", "0.5483349", "0.5473763", "0.5462485", "0.5460163", "0.54515415", "0.5441278", "0.5419607", "0.54022926", "0.53795564", "0.53726125", "0.5338211", "0.5338211", "0.5337915", "0.5328013", "0.5326133", "0.53258103", "0.52956903", "0.5283403", "0.528021", "0.5279774", "0.5276576", "0.52745867", "0.52732724", "0.52700603", "0.5265963", "0.5249531", "0.52298385", "0.52277905", "0.5222615", "0.52194977", "0.5206727", "0.5206362", "0.5201322", "0.51996315", "0.51887923", "0.51804155", "0.51804155", "0.51804155", "0.51804155", "0.51774514", "0.5176834", "0.516562", "0.5158818", "0.5148516", "0.5147972", "0.51450545", "0.51415676", "0.5134778", "0.5133039", "0.5128874", "0.51275474", "0.51258636", "0.5125459", "0.5123773", "0.5122468", "0.5119849", "0.51123893", "0.5109508", "0.5105934", "0.51058424", "0.50912714", "0.5090566", "0.50820786", "0.5077213", "0.5076216", "0.5076194", "0.50725013", "0.5070938", "0.5069573", "0.5067492", "0.5067492", "0.5065867", "0.5065824", "0.506569", "0.50642616", "0.5063172", "0.5062251", "0.5059142", "0.505062", "0.50505215", "0.50487983", "0.5045563", "0.5045133" ]
0.0
-1
details about the optical setup
def __init__(self, basePath=None, source=None, excitation=None, emission=None, dichroic=None, illumination_distribution=None, calculate_illumination_distribution=False, illumPath=None, illumSavePath=None, illumSaveName=None, showIllumPlot=False, save_txt=False, save_plot=False, save_image=False): self.basePath = basePath # this should come from CurlypivTestCollection self.source = source self.excitation_wavelength = excitation self.emission_wavelength = emission self.dichroic = dichroic if illumination_distribution is not None: self.illumination_distribution = illumination_distribution elif illumPath is not None: flatfield = io.imread(illumPath, plugin='tifffile') if len(np.shape(flatfield)) > 2: flatfield = np.asarray(np.rint(np.mean(flatfield, axis=0)), dtype='uint16') self.illumination_distribution = flatfield elif calculate_illumination_distribution and illumination_distribution is None: self.illumination_distribution = measureIlluminationDistributionXY(basePath=self.basePath, illumPath=illumPath, show_image=showIllumPlot, save_image=save_image, save_img_type='.tif', save_txt=save_txt, show_plot=showIllumPlot, save_plot=save_plot, savePath=illumSavePath, savename=illumSaveName) else: self.illumination_distribution = illumination_distribution self.flatfield = self.illumination_distribution if self.flatfield is not None: self.flatfield_mean = np.mean(self.flatfield) self.flatfield_std = np.std(self.flatfield)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_help(self):\r\n\t\ttext = \"\\tName: ml_scikit_OPTICS\"\r\n\t\ttext += \"\\n\\t\\tThis machine learning plugin uses scikit-learn's OPTICS algorithm.\\n\"\r\n\t\ttext += \"\\n\\t\\tOptional Parameters:\"\r\n\t\ttext += \"\\n\\t\\t\\tOPTICS_skip_normalization: Do NOT perform normalization (scaling) of data, skip this step.\"\r\n\t\ttext += \"\\n\\t\\t\\OPTICS_eps: Specify eps parameter (default is 1.0).\"\r\n\t\ttext += \"\\n\\t\\t\\OPTICS_min_samples: Specify min_samples parameter (default is 5).\"\r\n#\r\n# OPTICS (with memory complexity n) is an alternative to DBSCAN (with memory complexity n^2)\r\n# which has time complexity n^2 in general with the default max_eps = np.inf. \r\n# We will set max_eps = eps to reduce the run-time.\r\n#\r\n\t\treturn text", "def help_opt(self):\n print(OPTIONS)", "def print_configuration_info():\n print(\"Selected dataset:\", DATASET) \n print(\"Dataset base directory:\", BASE_INPUT_DIR) \n print(\"Daytime option:\", DAYTIME) \n print(\"Nones option:\", NONES) \n print(\"Selected action/activity representation:\", OP)\n print(\"Number of epochs: \", EPOCHS)\n print(\"Number of folds for cross-validation: \", FOLDS)\n print(\"Input directory for data files:\", INPUT_DIR) \n print(\"Embedding matrix file:\", EMBEDDING_WEIGHTS)\n print(\"Action sequences (X) file:\", X_FILE) \n print(\"Word embedding file for activities:\", ACTIVITY_EMBEDDINGS) \n print(\"Activity to int mappings:\", ACTIVITY_TO_INT)\n print(\"Int to activity mappings:\", INT_TO_ACTIVITY) \n print(\"Experiment ID:\", EXPERIMENT_ID)\n print(\"Treat imbalance data:\", TREAT_IMBALANCE)\n print(\"Save intermediate plots:\", SAVE)\n print(\"Batch size:\", BATCH_SIZE)\n print(\"Dropout:\", DROPOUT)\n print(\"Loss:\", LOSS)", "def add_details(self):\n\n if self.co.algorithm == \"vv\":\n algo = \"Verlocity Verlot\"\n if self.co.algorithm == \"rk4o\":\n algo = \"Runge Kutta Forth Order\"\n if self.co.algorithm == \"herm\":\n algo = \"Hermite Fourth Order\"\n\n self.algorithm_title = self.ax.text(\n 1.01, 0.65, \"Algorithm:\", transform=self.ax.transAxes\n )\n self.algorithm_text = self.ax.text(\n 1.01, 0.58, algo, transform=self.ax.transAxes\n )\n self.timestep_text = self.ax.text(\n 1.01, 0.51, \"dt =\" + str(self.co.tstep), transform=self.ax.transAxes\n )\n self.length_softening_distance = self.ax.text(\n 1.01,\n 0.44,\n r\"$\\epsilon$ = \" + str(self.co.epsilon),\n transform=self.ax.transAxes,\n )", "def _print_setup(self):\r\n pr = lambda x: print(\"ht3_solver:\\t\" + x)\r\n pr(\"Start time is \" + str(python_time.asctime()))\r\n pr(\"\")\r\n pr(\"TIME SETTINGS:\")\r\n pr(\"Current time:\\t\\t\\t\\t\" + str(self.current_T))\r\n pr(\"Delta T:\\t\\t\\t\\t\" + str(self.d_T))\r\n pr(\"Finish time:\\t\\t\\t\\t\" + str(self.max_T))\r\n pr(\"\")\r\n pr(\"Using predefined funtions?:\\t\\t\" + str(self.redefined))\r\n pr(\"\")\r\n pr(\"PHYSICAL MODEL: \")\r\n pr(\"Background temperature:\\t\\t\\t\" + str(self.background_temperature))\r\n pr(\"Starting temp (maybe overrided):\\t\" + str(self.initial_temperature))\r\n pr(\"Diffusion scale:\\t\\t\\t\" + str(self.diff_scale))\r\n pr(\"Solid refractive index:\\t\\t\\t\" + str(self.refr_idx_vol))\r\n pr(\"Background refractive index:\\t\\t\" + str(self.refr_idx_background))\r\n pr(\"Solid density:\\t\\t\\t\\t\" + str(self.density))\r\n pr(\r\n \"Solid specific heat capacity:\\t\\t\" + str(\r\n self.heat_capacity))\r\n pr(\"Solid thermal conductivity:\\t\\t\" + str(self.thermal_conductivity))\r\n pr(\"Solid hemispheric emissivity:\\t\\t\" + str(self.alpha))\r\n pr(\"SP1 setting - r1:\\t\\t\\t\" + str(self.r1))\r\n pr(\"SP1 setting - r2:\\t\\t\\t\" + str(self.r2))\r\n pr(\"Convective coefficient:\\t\\t\\t\" + str(self.convect_coeff))\r\n pr(\"\")\r\n pr(\"RADIATION - FREQUENCIES:\")\r\n pr(\"Frequencies defined beyond base:\\t\" + str(len(self.fq_list)))\r\n pr(\"-----------------------------------------------------------------\")\r\n pr(\"Frequency (Hz)\\t\\tAbsorbtion coeff\")\r\n pr(\"-----------------------------------------------------------------\")\r\n pr(str(self.v0_frequency) + \"\\t\\t\" + \"-\")\r\n for i in range(0, len(self.fq_list)):\r\n pr(str(self.fq_list[i]) + \"\\t\" + str(self.absorb_coeffs[i]))\r\n pr(\"-----------------------------------------------------------------\")", "def gmcp_setup_data(self):\n yield \"Core.Supports.Debug\", 20\n yield \"Core.Supports.Set\", [ \"MG.char 1\", \"MG.room 1\", \"comm.channel 1\" ]", "def initialize(self,opt):\n ToolkitBase.initialize(self, opt)\n if self.method_name =='affine':\n self.affine_on = True\n self.warp_on = False\n raise ValueError(\"affine is not separately used in demons\")\n elif self.method_name =='demons':\n \"\"\" In this case, the nifty affine would be first called\"\"\"\n self.affine_on = False\n self.warp_on = True\n self.demons_param = opt['tsk_set']['reg']['demons']", "def training_info(self):\n pass", "def setups():\n setups = []\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F2 = dict()\n kotani2017_F2['name'] = 'kotani2017_F2'\n kotani2017_F2['piltemplate'] = kotani2017_F2_pil\n kotani2017_F2['pilparams'] = [None]\n kotani2017_F2['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F2['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=1'),\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=0.5'),\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=0.05')]\n kotani2017_F2['reporter'] = 'D'\n kotani2017_F2['exp_results'] = [(7733, 7.42), (11333, 6.18), (25533, 1.40)]\n setups.append(kotani2017_F2)\n\n\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F3 = dict()\n kotani2017_F3['name'] = 'kotani2017_F3'\n kotani2017_F3['piltemplate'] = kotani2017_F3_pil\n kotani2017_F3['pilparams'] = [None]\n kotani2017_F3['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F3['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.1'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.01'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.001')]\n kotani2017_F3['reporter'] = 'D'\n kotani2017_F3['exp_results'] = [(21220, 7.72), (64203, 3.12), (86996, 0.69)]\n setups.append(kotani2017_F3)\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F4 = dict()\n kotani2017_F4['name'] = 'kotani2017_F4'\n kotani2017_F4['piltemplate'] = kotani2017_F4_pil\n kotani2017_F4['pilparams'] = [None]\n kotani2017_F4['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F4['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.1'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.01'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.001'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0')]\n kotani2017_F4['reporter'] = 'D'\n kotani2017_F4['exp_results'] = [(6815, 6.06), (9004, 4.78), (10278, 4.03), (10795, 3.73)]\n setups.append(kotani2017_F4)\n\n return setups", "def print_info(self):\n\n print \"\\nALGORITHM INFO\"\n print \"modelnumber:\", self.modelnumber\n print \"restart:\", self.restart\n print \"particles:\", self.particles\n print \"beta:\", self.beta\n print \"dt:\", self.dt\n if self.mode != 1:\n if len(self.final_epsilon) == 0:\n print \"manual epsilon:\"\n for i in range(self.epsilon.shape[0]):\n print \"\\t\",\n for j in range(self.epsilon.shape[1]):\n print \"\", self.epsilon[i, j],\n print \"\"\n else:\n print \"auto epsilon:\"\n print \"\\t\", self.final_epsilon\n print \"\\talpha:\", self.alpha\n\n print \"kernel:\", self.kernel\n print \"model kernel:\", self.modelkernel\n print \"model prior:\", self.modelprior\n\n print \"DATA:\"\n print \"\\ttimes:\", self.times\n if self.mode == 0:\n print \"\\tvars:\"\n for i in range(len(self.data[0, :])):\n print \"\\t\",\n for j in range(self.ntimes):\n print \"\", self.data[j, i],\n print \"\"\n\n print \"MODELS:\", self.nmodels\n for i in range(self.nmodels):\n print \"\\t\", \"npar:\", self.nparameters[i]\n print \"\\t\", \"nspecies:\", self.nspecies[i]\n print \"\\t\", \"name:\", self.name[i]\n print \"\\t\", \"source:\", self.source[i]\n print \"\\t\", \"type:\", self.type[i]\n print \"\\t\", \"fit:\", self.fit[i]\n print \"\\t\", \"init:\", self.x0prior[i]\n print \"\\t\", \"prior:\", self.prior[i]\n print \"\\t\", \"logp:\", self.logp[i]\n print \"\\n\"", "def init_opt(self):\n raise NotImplementedError", "def init_opt(self):\n raise NotImplementedError", "def init_opt(self):\n raise NotImplementedError", "def init_opt(self):\n raise NotImplementedError", "def setup(self, optparser):\n\t\tpass", "def printConf(self):\n print \"\"\n for pname, pvalue in self.neededParams.items():\n print pname, pvalue\n for pname, pvalue in self.optionalParams.items():\n print pname, pvalue", "def __init__(self):\n self.label = \"Get SDM parameters\"\n self.description = \"This tool is used to view the Environment and SDM modeling parameters that have been set by the user. All of the values reported by this tool must be set to values specific to the model to be made. Using the ESRI default values will cause SDM to fail. If the Environment is not completely set, then an error message stating \\\"Improper SDM setup\\\" will occur. The successful running of this tool does not assure that the setup is correct; only that the default values have been changed. See the Environment Settings section of the Help file for Calculate Weights for the details.\"\n\n self.canRunInBackground = False\n self.category = \"Utilities\"", "def antenny_config_help(self):\n return self.antenny_config.get_help_info()", "def initializeParameters(self):\r\n\t\tself.input_raster.enabled = True\r\n\t\tself.approach.enabled = True\r\n\t\tself.predefined_pattern.enabled = False\r\n\t\tself.predefined_pattern.value = 'Mexican Hat wavelet'\r\n\t\tself.pattern_workspace.enabled = False\r\n\t\tself.point_matrix_size.enabled = False\r\n\t\tself.point_matrix_size.value = 3\r\n\t\tself.point_vectors.enabled = False\r\n\t\tself.mapping_field.enabled = False\r\n\t\tself.move_to_max.enabled = False\r\n\t\tself.move_to_max_distance.enabled = False\r\n\t\tself.move_to_max_distance.value = 3\r\n\t\tself.mh_iteration.enabled = False\r\n\t\tself.mh_dil_val.enabled = False\r\n\t\tself.mh_dil_val.value = 1\r\n\t\tself.mh_dil_start.value = 0.01\r\n\t\tself.mh_dil_stop.value = 1\r\n\t\tself.mh_dil_step.value = 0.1\r\n\t\tself.mh_dil_start.enabled = False\r\n\t\tself.mh_dil_stop.enabled = False\r\n\t\tself.mh_dil_step.enabled = False\r\n\t\tself.transform.enabled = False\r\n\t\tself.size_of_the_cell.enabled = False\r\n\t\tself.size_of_the_cell.value = 1\r\n\t\tself.output_sim_matrix.enabled = False\r\n\t\tself.output_table.enabled = False\r\n\t\tself.output_raster_workspace.enabled = False", "def info(self):\n\n if self.engine_name == 'RAGE':\n self._add_argument('-help')\n self._log('info', 'command line arguments')\n else:\n self._log('info', 'not supported', True)", "def param_info():\n\n\tgizmo_names = syn.getGizmoNames()\n\n\tfor gizmo in gizmo_names:\n\t\tparams = syn.getParameterNames(gizmo)\n\t#doesnt get all parameters from gizmos i.e. WaveFreq\n\n\t# get all info on the 'WaveFreq' parameter\n\tGIZMO = 'aStim2'\n\tPARAMETER = 'WaveFreq'\n\n\t# info = syn.getParameterInfo(GIZMO, PARAMETER)\n\t#\n\t# # get the array size (should be 100)\n\t# sz = syn.getParameterSize(GIZMO, PARAMETER)\n\t#\n\t# # write values 1 to 50 in second half of buffer\n\t# result = syn.setParameterValues(GIZMO, PARAMETER, np.arange(1, 51), 50)\n\t#\n\t# # read all values from buffer\n\t# syn.getParameterValues(GIZMO, PARAMETER, sz)\n\t#\n\t# # get all info on the 'Go' parameter\n\t# PARAMETER = 'Go'\n\t# info = syn.getParameterInfo(GIZMO, PARAMETER)\n\t#\n\t# # flip the switch\n\t# result = syn.setParameterValue(GIZMO, PARAMETER, 1)\n\t#\n\t# # check the value\n\tfreq = syn.getParameterValue(GIZMO, PARAMETER)\n\tprint('value =', freq)\n\tfreq = [freq]\n\n\t# also verify visually that the switch slipped in the run\n\t# time interface. This state change will be logged just\n\t# like any other variable change and saved with the runtime\n\t# state.\n\n\tnumTrials = 5 #total number of trials across stimuli\n\tISI = [2.0, 3.0, 4.0, 5.0] # ISI in seconds\n\n\t# flash parameters\n\tflash_dur = [.001] # flash durs in seconds (100 ms, 200 ms)\n\tluminance = [[1, 1, 1], [.86, .86, .86], [0, .1, 1]] # white , grayish, purple just for testing\n\n\t# auditory parameters\n\tduration = [.005] # in seconds; pulseDur in TDT\n\tsound_levels = [20.0, 40.0, 60.0, 80.0] # dB; waveAmp in TDT\n\n\t# Auditory on (T/F? if T then A+V, if F then Visual only)\n\tstims = {0: \"auditory_only\",\n\t\t\t 1: \"visual_only\",\n\t\t\t 2: \"A+V\"\n\t\t\t }\n\n\texper = Experiment(numTrials=numTrials, ISI=ISI, flash_dur=flash_dur, luminance=luminance, wave_freq=freq,\n\t\t\t\t\t pulse_dur=duration, wave_amp=sound_levels, stimulus=stims)\n\texper.run_experiment()", "def init_run_opt(self,value=1):\n self.run_opt={}\n self.run_opt['param']=value\n self.run_opt['analyzer']=value\n self.run_opt['compilation']=value\n self.run_opt['event']=value\n self.run_opt['dir']=value\n self.run_opt['launch']=value\n self.run_opt['control']=value\n self.run_opt['collect']=value\n self.run_opt['plot']=value \n self.run_opt['madweight_main']=value\n self.run_opt['relaunch']=0 #only for bugging case... -> desactivate\n self.run_opt['refine']=0 #only for bugging case... -> desactivate\n self.run_opt['clean']=0 #dangerous... -> desactivate\n self.control_opt()", "def __options(self):\n\t\ta = 1 if self.random else 0\n\t\tb = 2 if self.topoftheday else 0\n\t\tc = 4 if self.offline else 0\n\t\treturn a+b+c", "def print_info(self):\n print(\"Experiment key: \" + self.key)\n print(\"Experiment name: \" + self.name)\n print(\"Experiment path: \" + self.output_path)\n print(\"Auto-sync activated: \" + str(self.auto_sync))\n print(\"\")\n print(\"Experiment metadata: \")\n print(self.exp_metadata.to_str())", "def analysis_setup(self):\n pass", "def info(self):\n now = datetime.datetime.now().strftime(\"%Y-%m-%d-%HH-%MM-%SS\")\n print(f\"Exploration info ({now})\")\n print(f\"HDF name: {self.HDF_FILE}\")\n print(f\"Trajectory name: {self.trajectoryName}\")\n if self.model is not None:\n print(f\"Model: {self.model.name}\")\n if hasattr(self, \"nRuns\"):\n print(f\"Number of runs {self.nRuns}\")\n print(f\"Explored parameters: {self.exploreParameters.keys()}\")\n if hasattr(self, \"_t_end_exploration\") and hasattr(self, \"_t_start_exploration\"):\n print(f\"Duration of exploration: {self._t_end_exploration-self._t_start_exploration}\")", "def initialize_options(self):", "def get_details(self):\n\n return {\n \"embedder\": \"GloVe\",\n \"algorithm\": \"RNN-LSTM\"\n }", "def print_em_manual():\n print('------------ EM_Config Manual ------------')\n print('The list of keys for the configuration')\n print(EM_Config.CONFIG_KEYS)\n print()\n print('--- Option explanations ---')\n print('<parameter_name>_options available choices')\n print(EM_Config.OPTIONS_CHOICES)\n print('fixed: fix the parameter during training time')\n print('flexible: no constraint during traning time')\n print('diag: keep the parameter a diagnol matrix, only available for P_0_hat, Q, R')\n print('scalar: keep the parameter a scalar time identity matrix, only available for P_0_hat, Q, R')\n print('--- Option explanations ---')\n print()\n print('initial_<parameter_name> is the initial value of the EM algorithm for <parameter_name>')\n print()\n print('--- Stopping Criteria ---')\n print('threshold: considered converge whenever the improvement of log likelihood is less than threshold')\n print('num_iterations: perform EM algorithm of num_iterations')\n print('stop whenever either criteria is reached')\n print('--- Stopping Criteria ---')\n print()\n print('------------ EM_Config Manual ------------')", "def getShortDesc():\n\treturn \"Animator mode\"", "def info(self):\n print 'A= ', self.application\n print 'C= ', self.city\n print 'D= ', self.dataset.shape", "def _options(self):\n return", "def paargs(self):\n paopt_find = {'Night':self.night, 'Telescope':self.telescope, 'Field':self.field, 'RA':self.ra,\n 'DEC':self.dec, 'TimeBeforeDiscovery': self.t_before, 'TimeAfterDiscovery': self.t_after,\n 'Program':self.program, 'datadir':self.datadir, 'outdir':self.outdir}\n paopt_coadd = {'outdir':self.outdir}\n paopt_extract = {'outdir':self.outdir}\n paopt_subimage = {'Program':self.program, 'Telescope':self.telescope, 'RA':self.ra, 'DEC':self.dec,\n 'PixelRadius':self.pixrad, 'tempdir':self.tempdir, 'outdir':self.outdir}\n paopt_imdiff = {'outdir':self.outdir}\n paopt_refstars = {'RA':self.ra, 'DEC':self.dec, 'outdir':self.outdir}\n paopt_phot = {'outdir':self.outdir, 'dumpfile':self.dump_pa('Photometry')}\n\n paopts={}\n defList={'Find_Data' : paopt_find,\n 'Coaddition' : paopt_coadd,\n 'Source_Extraction' : paopt_extract,\n 'Make_Subimages' : paopt_subimage,\n 'Image_Differencing' : paopt_imdiff,\n 'Choose_Refstars' : paopt_refstars,\n 'Photometry' : paopt_phot}\n\n def getPAConfigFromFile(PA,algs):\n def mergeDicts(source,dest):\n for k in source:\n if k not in dest:\n dest[k]=source[k]\n userconfig={}\n if PA in algs:\n fc=algs[PA]\n for k in fc: #do a deep copy leave QA config out\n if k != \"QA\":\n userconfig[k]=fc[k]\n defconfig={}\n if PA in defList:\n defconfig=defList[PA]\n mergeDicts(defconfig,userconfig)\n return userconfig\n\n for PA in self.palist:\n paopts[PA]=getPAConfigFromFile(PA,self.algorithms)\n\n\n return paopts", "def test_setup_params(opp):\n state = opp.states.get(ENTITY_CLIMATE)\n assert state.state == HVAC_MODE_COOL\n assert 21 == state.attributes.get(ATTR_TEMPERATURE)\n assert 22 == state.attributes.get(ATTR_CURRENT_TEMPERATURE)\n assert \"On High\" == state.attributes.get(ATTR_FAN_MODE)\n assert 67 == state.attributes.get(ATTR_HUMIDITY)\n assert 54 == state.attributes.get(ATTR_CURRENT_HUMIDITY)\n assert \"Off\" == state.attributes.get(ATTR_SWING_MODE)\n assert STATE_OFF == state.attributes.get(ATTR_AUX_HEAT)\n assert state.attributes.get(ATTR_HVAC_MODES) == [\n \"off\",\n \"heat\",\n \"cool\",\n \"auto\",\n \"dry\",\n \"fan_only\",\n ]", "def is_shed_tool_conf(self):", "def setup(self, options, results):", "def ee_prom_get_optical_params(self):\n focal_length = c_float()\n angular_deviation = c_float()\n focal_tilt = c_float()\n self._dll.ShamrockEepromGetOpticalParams(\n self._device, byref(focal_length), byref(angular_deviation), byref(focal_tilt))\n return focal_length.value, angular_deviation.value, focal_tilt.value", "def option_setup(self):\n self.get_master_contracts(exchange=Exchanges.NFO.name)\n self._options_master_contracts = self._master_contracts[\"NSE-OPT\"]\n self._future_master_contracts = self._master_contracts[\"NSE-FUT\"]\n self.create_bnf_instruments()", "def setup_optims(self):\n lr = self.train_config['lr']\n b1 = self.train_config['b1']\n b2 = self.train_config['b2']\n weight_decay = self.train_config['weight_decay']\n self.opt = torch.optim.Adam(self.network.parameters(), lr=lr, betas=(b1, b2),\n weight_decay=weight_decay)", "def show_opt(self):\n print(\n ''\n '\\n\\t' + bc.OKBLUE + ('%-*s %-*s %-*s %s' % (15, 'OPTION', 8, 'RQ', 18, 'VALUE', 'DESCRIPTION')) + bc.ENDC +\n '\\n\\t' + ('%-*s %-*s %-*s %s' % (15, '------', 8, '--', 18, '-----', '-----------')) +\n '\\n\\t' + ('%-*s %-*s %-*s %s' % (15, 'ip:', 8, 'y', 18, self.ip, 'IP or subnet to scan (192.168.1.100 or 192.168.1.1/24')) +\n '\\n\\t' + ('%-*s %-*s %-*s %s' % (15, 'debug:', 8, 'n', 18, self.debug, 'Turn debugging on (y/n)')) +\n '\\n'\n )", "def describe():", "def experiment_init(self):\n pass", "def help(self):\n\t\treturn", "def _setup(self) -> None:\n # Call base implementation\n super()._setup()\n\n # Configure the low-level integrator\n engine_options = self.simulator.engine.get_options()\n engine_options[\"stepper\"][\"iterMax\"] = 0\n engine_options[\"stepper\"][\"dtMax\"] = min(0.02, self.step_dt)\n engine_options[\"stepper\"][\"logInternalStepperSteps\"] = False\n\n # Set maximum computation time for single internal integration steps\n if self.debug:\n engine_options[\"stepper\"][\"timeout\"] = 0.0\n else:\n engine_options[\"stepper\"][\"timeout\"] = 2.0\n\n # Enable logging of geometries in debug mode\n if self.debug:\n engine_options[\"telemetry\"][\"isPersistent\"] = True\n\n # Update engine options\n self.simulator.engine.set_options(engine_options)\n\n # Set robot in neutral configuration\n qpos = self._neutral()\n framesForwardKinematics(\n self.robot.pinocchio_model, self.robot.pinocchio_data, qpos)", "def _configure(self):\n dconfig = DConfiguration(self._le2mserv.gestionnaire_graphique.screen)\n if dconfig.exec_():\n pms.TEMPS_PARTIE, pms.TREATMENT, pms.GRILLES = dconfig.get_config()\n self._le2mserv.gestionnaire_graphique.infoserv(\n [trans_TC(u\"Part time: {}\").format(pms.TEMPS_PARTIE),\n trans_TC(u\"Treatment: {}\").format(pms.get_treatment(pms.TREATMENT)),\n trans_TC(u\"Grids: {}\").format(len(pms.GRILLES))])", "def wypisz_info(self):\n print(f\"Samochód: {self.producent} {self.model}\")", "def test_guess_optics():\n from ctapipe.instrument import guess_telescope\n\n answer = guess_telescope(1855, 28.0 * u.m)\n\n od = OpticsDescription.from_name(answer.name)\n\n assert od.equivalent_focal_length.to_value(u.m) == 28\n assert od.num_mirrors == 1", "def info(self):\n\n\t\tprint(\"Pixels on a side: {0}\".format(self.data.shape[0]))\n\t\tprint(\"Pixel size: {0}\".format(self.resolution))\n\t\tprint(\"Total angular size: {0}\".format(self.side_angle))\n\t\tprint(\"lmin={0:.1e} ; lmax={1:.1e}\".format(self.lmin,self.lmax))", "def info(self):\r\n\r\n return self.sim_info", "def experiment(self) -> Any:", "def info(self):\n\n print(\"pupil file =\", self.pupil_file)\n print(\"phase file =\", self.phase_file)\n print(\"wavelengths and weights =\")\n for i in range(len(self.filter[0])):\n print(\" %10.5f %6.4f\" % (self.filter[0][i], self.filter[1][i]))\n print(\"pupil diameter (meters) =\", self.D)\n if self.oversample == 2:\n print(\"oversampling factor = 2 (Nyquist sampling)\")\n else:\n r = float(self.oversample) / 2.\n print(\"oversampling factor = %d (%g * Nyquist sampling)\" % \\\n (self.oversample, r))\n if self.type == SINGLE_PREC:\n print(\"computations will use single precision\")\n else:\n print(\"computations will use double precision\")\n print(\"size of output image =\", self.output_size)\n if self.cdelt is not None:\n print(\"output pixel size (arcsec) =\", self.cdelt / ARCSECtoDEGREES)\n if self.output_written:\n print(\"The computed PSF has been written to the output file.\")\n else:\n print(\"The output file has not been written yet.\")", "def setup(self):\n\n if self.user is 'Daisy':\n import socket\n host = socket.gethostname()\n\n simName = self.name_prefix[:self.name_prefix.find('_')]\n\n if 'ursa' in host:\n self.raw_sim_dir = '/disk01/rad/sim/' + simName + '/' + self.feedback\n self.caesar_dir = '/disk01/rad/sim/' + simName + '/' + self.feedback + 'Groups/'\n self.redshiftFile = '/home/rad/gizmo-extra/outputs_boxspace50.info'\n self.d_data = '/home/dleung/Downloads/SIGAME_dev/sigame/temp/z' + str(int(self.zCloudy)) + '_data_files/'\n elif 'flatironinstitute.org' or 'worker' in host:\n self.raw_sim_dir = '/mnt/ceph/users/daisyleung/simba/sim/' + simName + '/' + self.feedback # dummy\n self.caesar_dir = '/mnt/ceph/users/daisyleung/simba/sim/' + simName + '/' + self.feedback + 'Groups/'\n self.redshiftFile = '/mnt/ceph/users/daisyleung/simba/gizmo-extra/outputs_boxspace50.info'\n self.d_data = '/mnt/home/daisyleung/Downloads/SIGAME_dev/sigame/temp/z' + str(int(self.zCloudy)) + '_data_files/'\n else:\n raise NotImplementedError", "def _analyze(self):\n self.sim_setup_name, self.sweep_name = self.renderer.initialize_drivenmodal(\n **self.setup)\n\n self.renderer.analyze_sweep(self.sweep_name, self.sim_setup_name)\n # TODO: return the impedance, admittance and scattering matrices for later use", "def otherOptionsFullScreen(self):\n\n # Set Storage List\n storageList = []\n # Create Intel explain menu\n menuDisplay = \"\"\"\n \\n\n [*] Information Verbose:\n Ontop of Asking for the Username and \n Password Should we Gather Even\n More Information about the User such as \n GEOIP / ISP / User Agent etc. etc. \n This Requires Curl to be installed or \n file_get_contents in PHP on selected Server \n \"\"\"\n # display About this\n self.outputText(menuDisplay, \"yellow\")\n # Set Verbose of Intel Gather\n self.results = input(\n \"\\nWould you like to Build a More In-depth Intel Report on Victim ( y Or n ): \")\n if self.results.lower()[0] == \"y\" or self.results.lower() == \"yes\":\n storageList.append(\"INTEL_VERBOSE_LOUD\")\n elif self.results.lower()[0] == \"n\" or self.results.lower() == \"no\":\n storageList.append(\"INTEL_VERBOSE_HUSH\")\n else:\n # Anything Else lets just Hush it then\n storageList.append(\"INTEL_VERBOSE_HUSH\")\n # Redirect Ask\n menuDisplay = \"\"\"\n \\n\n [*] Hitting Enter Keeps the Default \n = Redirect URL Which is the Same \n = URL of the Full-Screen Attack \n = you picked. For Instance If \n = it was AOL Full-Screen Attack\n = the default URL redirect would \n = be https://my.screenname.aol.com\n \"\"\"\n # display About this\n self.outputText(menuDisplay, \"yellow\")\n self.results = input(\n \"After the Victim Inputs Info Where Should the Script Redirect?: \")\n # Check if nothing was entered\n if self.results == \"\" or self.results == \" \":\n # Append Default Redirect Naaaow\n storageList.append(\"REDIRECT_DEFAULT\")\n else:\n # No Checking on URL Let Them Use Whatever lol there bad i guess\n # Append Default Redirect Naaaow\n storageList.append(self.results)\n\n # Spoof link\n menuDisplay = \"\"\"\n \\n\n [*] Hitting Enter Keeps the Default \n = What do you want the URL Link to be spoofed\n = to? This will be displayed when the user\n = rolls over the link. Basically tricking\n = them making them think they are going\n = to that URL..\n \"\"\"\n # display About this\n self.outputText(menuDisplay, \"yellow\")\n self.results = input(\n \"What should the URL be spoofed to? (ex: https://my.screenname.aol.com): \")\n # Check if nothing was entered\n if self.results == \"\" or self.results == \" \":\n # Append Default Redirect Naaaow\n storageList.append(\"DEFAULT_SPOOF\")\n else:\n # Append specified spoof url now\n storageList.append(self.results)\n\n # link name\n menuDisplay = \"\"\"\n \\n\n [*] Hitting Enter Keeps the Default \n = What do you want the Actual URL name\n = to be?\n \"\"\"\n # display About this\n self.outputText(menuDisplay, \"yellow\")\n self.results = input(\n \"What should the URL name be? (ex: Aol Login): \")\n # Check if nothing was entered\n if self.results == \"\" or self.results == \" \":\n # Append Default Redirect Naaaow\n storageList.append(\"DEFAULT_URL_NAME\")\n else:\n # Append url name\n storageList.append(self.results)\n\n menuDisplay = \"\"\"\n \\n\n [*] Hitting Enter Keeps the Default \n = name of Index.php If you feel \n = the need to change the name please \n = do not add the actual extension .php \n = along with it only add whatever crazy \n = name you come up with\n \"\"\"\n # display About this\n self.outputText(menuDisplay, \"yellow\")\n self.results = input(\n \"What Should the Main Index PHP File Be Called? ( ex: login ) : \")\n if self.results == \"\" or self.results == \" \":\n # Append Default Redirect Naaaow\n storageList.append(\"INDEX_DEFAULT\")\n else:\n check = self.results.find(\".\")\n # if it doesn't return a -1 it found a decimal\n if check != -1:\n # Throw Error we found a dot\n self.errorOutput(\n \"[*] Error - Didn't We Say Not to Add an Extension, WOW...\", \"yellow\")\n else:\n # Append name of the File\n storageList.append(self.results)\n\n menuDisplay = \"\"\"\n \\n\n [*] Hitting Enter Keeps the Default \n = Title of the Webpage.\n \"\"\"\n # display About this\n self.outputText(menuDisplay, \"blue\")\n self.results = input(\n \"What Should the Title of the Page be? (ex: AOL Login ) : \")\n if self.results == \"\" or self.results == \" \":\n # Append Default Redirect Naaaow\n storageList.append(\"TITLE_DEFAULT\")\n else:\n # Append name of the File\n storageList.append(self.results)\n\n # Return Storage List for Processing\n return storageList", "def setup(self):\n header_print(self.data['intro'])\n header_print(self.data['help'])\n random.shuffle(self.data['draw'])\n random.shuffle(self.data['locations'])\n random.shuffle(self.data['events'])\n random.shuffle(self.data['aces'])\n random.shuffle(self.data['personalities'])\n self.stats = {\n 'round': 0,\n 'powers': {\n 'MOONS': 6,\n 'SUNS': 6,\n 'WAVES': 6,\n 'LEAVES': 6,\n 'WYRMS': 6,\n 'KNOTS': 6,\n },\n 'hand': self.data['draw'][:],\n 'discard': [],\n 'active': [],\n 'opponent': {},\n }", "def _default_setup(self):\n self._n_configs = 1\n self._sn_size = 100\n self._nt = 10000\n self._active_brdch = np.zeros(\n (), dtype=[(\"SIS 3302\", bool, (4, 8)), (\"SIS 3305\", bool, (2, 8))]\n )\n self._active_brdch[\"SIS 3302\"][0][0] = True\n self._active_brdch[\"SIS 3305\"][0][0] = True\n self._config_names = []\n self._active_config = (\"config01\",)\n self._sis3305_mode = 0", "def info(self):", "def info(self):", "def get_improper_info(self):\n return", "def setup(self):\n\t\tpass", "def process_configuration(self):\n print \"I \",self.I", "def setup(self):\n pass # pragma: no cover", "def show_parameters(self):\n with np.printoptions(precision=3, suppress=True):\n print('number of wind phase = {}'.format(self.ncomp))\n print('galactic parameter = {}'.format(self.scaling_field))\n print('reference height = {}'.format(self.z0))\n for p in ['cool_params','hot_params','params','ref_params','scaling_params']:\n params = getattr(self,p)\n print(p)\n for k,v in params.items():\n print(' {} = {}'.format(k,v))", "def configuration():", "def setup(self):\n\n self._enable_torque(self._reg.TORQUE_ENABLE)\n self.change_operating_mode(self._reg.MODE_EXT_POSI)\n # set to max velocity\n self.change_veloity(self._default_velocity)", "def init(self, info):\r\n# info.object.mpl_setup()\r\n return True", "def options():\n print \"\"\"Options summary:\n -h, --help\n -u, --usage\n -v, --verbose <verb_level>\n -e, --endpoint <endpoint>\n -i, --interface-type <iface_type>\n -r, --recursive\n --dbs-conf <conf_file>\n --show-prod\n --show-caf\n --only-subscribed\n --only-custodial\n \"\"\"", "def _usage_options_example(self):\n pass", "def algorithmInfo():\n\t\treturn r\"\"\"TODO\"\"\"", "def default_config(cls) -> dict:\n return {\n \"observation\": {\n \"type\": \"Kinematics\"\n },\n \"action\": {\n \"type\": \"DiscreteMetaAction\"\n },\n \"simulation_frequency\": 15, # [Hz]\n \"policy_frequency\": 1, # [Hz]\n \"other_vehicles_type\": \"highway_env.vehicle.behavior.IDMVehicle\",\n \"screen_width\": 600, # [px]\n \"screen_height\": 150, # [px]\n \"centering_position\": [0.3, 0.5],\n \"scaling\": 5.5,\n \"show_trajectories\": False,\n \"render_agent\": True,\n \"offscreen_rendering\": os.environ.get(\"OFFSCREEN_RENDERING\", \"0\") == \"1\",\n \"manual_control\": False,\n \"real_time_rendering\": False\n }", "def info() -> None:", "def get_params_info(cls):\n return dict(\n config='laygo configuration dictionary.',\n threshold='transistor threshold flavor.',\n draw_boundaries='True to draw boundaries.',\n num_blk='number of driver segments.',\n show_pins='True to draw pin geometries.',\n )", "def control_opt(self):\n\n\n if self.run_opt['refine']:\n self.run_opt['relaunch']=1\n \n #check value for 'madweight_main'\n for i in range(3,9)+[-1,-3]:\n if self.run_opt[num_to_tag[i]]==1:\n self.run_opt['madweight_main']=1\n break\n\n if self.run_opt['relaunch']==1:\n self.run_opt['control']=1", "def help(self):", "def help(self):", "def __init__(self, folder):\n print \"folder passed is \", folder\n self.folder = folder\n self.geometry = gf.geometry(self.folder)\n self.elements = gf.dictionary_set()\n self.area = np.zeros(shape = (8))\n self.Vol = (self.geometry.properties['span_number']*(self.geometry.properties['span_width']*\n self.geometry.properties['span_height'] + self.geometry.properties['cover_height']\n *self.geometry.properties['span_width']/2))\n self.F = np.zeros(shape = (8, 8))\n of.view_factor(self.geometry, self.F, self.area, self.Vol)\n tran = [self.geometry.properties['tra_cover_out'],0.0,0.0,\n self.geometry.properties['tra_sidewall_out'],\n self.geometry.properties['tra_cover_in'],\n self.geometry.properties['tra_sidewall_in'],0.0,0.0]\n emi = [self.geometry.properties['emi_cover_out'],1.0,1.0,\n self.geometry.properties['emi_sidewall_out'],\n self.geometry.properties['emi_cover_in'],\n self.geometry.properties['emi_sidewall_in'],1.0,1.0] \n self.tr, self.em, self.re = of.optictal_prop(tran,emi)\n if ((self.tr + self.em).any() > 1.0):\n print \"error in optical properties\"\n self.T = np.zeros(shape = (2,10))\n self.RH = np.zeros(shape = (2,10))\n # 8 inside,9 outside \n self.qcond = np.zeros(shape = (2,8))\n self.qconv = np.zeros(shape = (2,8))\n self.qrad = np.zeros(shape = (2,8))\n self.j = np.zeros(shape = (2,8))\n self.g = np.zeros(shape = (2,8))\n self.alpha = np.zeros(shape = (2,8))\n deltaT = 300\n RH_in = 0.6\n fg.set_initial_conditions(self.geometry.properties['t_air_inside'],\n 278,\n RH_in,self.T,self.RH , self.geometry.properties['t_air'],self.g,\n self.geometry.properties['sky_temp'])\n self.T, self.j, self.g, self.alpha, self.qrad, self.qconv = fg.solver_T(self.T,self.qrad,self.qconv,self.alpha,self.j,self.g,self.em,self.tr,\n self.geometry.properties['wind_speed'],\n self.F,self.geometry.properties['heat_flux'],1,1.0,self.area,\n self.geometry.properties['rho'],self.geometry.properties['cp'],\n self.Vol,self.geometry.properties['degree_window'],deltaT)", "def optionHelp(self):\n return {}", "def __init__(self, **kwargs):\n\n self.is_complete = False\n\n # See if this is a tanh model calculation\n is_phenom = self.is_phenom = self._check_if_phenom(**kwargs)\n\n if 'problem_type' not in kwargs:\n kwargs['problem_type'] = 101\n\n self.kwargs = kwargs\n\n # Print info to screen\n if self.pf['verbose']:\n print_sim(self)", "def config():\n experiment_dir = './experiments'\n simulation_steps = 1000\n device = 'cpu'\n path_to_molecules = os.path.join(experiment_dir, 'data/ethanol.xyz')\n simulation_dir = os.path.join(experiment_dir, 'simulation')\n training_dir = os.path.join(experiment_dir, 'training')\n model_path = os.path.join(training_dir, 'best_model')\n overwrite = True", "def setup(self):\r\n pass", "def show_help():\n\tprint \"This is the Phototime script to manage pictures taken by date\"\n\tprint \"The list of valid parameters are :\"\n\tprint \"--ftype=ftype Enter the filetype\"\n\tprint \"--path=filepath Specify the path to the director containing the pictures\"\n\tprint \"Invoke this script thus :\"\n\tprint \"python phototime.py --ftype=filetype --path=filepath\\n\\n\"", "def details(self):\n pass", "def get_data_config(self):\n conf_map = {}\n\n if self.alien_alg.currentIndex() == 1:\n conf_map['alien_alg'] = '\"block_aliens\"'\n if len(self.aliens.text()) > 0:\n conf_map['aliens'] = str(self.aliens.text()).replace('\\n', '')\n if self.alien_alg.currentIndex() == 2:\n conf_map['alien_alg'] = '\"alien_file\"'\n if len(self.alien_file.text()) > 0:\n conf_map['alien_file'] = '\"' + str(self.alien_file.text()) + '\"'\n elif self.alien_alg.currentIndex() == 3:\n conf_map['alien_alg'] = '\"AutoAlien1\"'\n if len(self.AA1_size_threshold.text()) > 0:\n conf_map['AA1_size_threshold'] = str(self.AA1_size_threshold.text())\n if len(self.AA1_asym_threshold.text()) > 0:\n conf_map['AA1_asym_threshold'] = str(self.AA1_asym_threshold.text())\n if len(self.AA1_min_pts.text()) > 0:\n conf_map['AA1_min_pts'] = str(self.AA1_min_pts.text())\n if len(self.AA1_eps.text()) > 0:\n conf_map['AA1_eps'] = str(self.AA1_eps.text())\n if len(self.AA1_amp_threshold.text()) > 0:\n conf_map['AA1_amp_threshold'] = str(self.AA1_amp_threshold.text())\n if self.AA1_save_arrs.isChecked():\n conf_map['AA1_save_arrs'] = \"True\"\n if len(self.AA1_expandcleanedsigma.text()) > 0:\n conf_map['AA1_expandcleanedsigma'] = str(self.AA1_expandcleanedsigma.text())\n\n if len(self.amp_intensity.text()) > 0:\n conf_map['amp_threshold'] = str(self.amp_intensity.text())\n if len(self.binning.text()) > 0:\n conf_map['binning'] = str(self.binning.text()).replace('\\n', '')\n if len(self.center_shift.text()) > 0:\n conf_map['center_shift'] = str(self.center_shift.text()).replace('\\n', '')\n if len(self.adjust_dimensions.text()) > 0:\n conf_map['adjust_dimensions'] = str(self.adjust_dimensions.text()).replace('\\n', '')\n\n return conf_map", "def setup(self):", "def setup(self):", "def setup(self):", "def setup(self):", "def __simSetup(self):\n self.__prime_ip = [(io[0], '$', io[2]) for io in self.listPrimeIos(True) if io[1] == 'i']\n\n # setting primary output values to None\n for prime_op in self.__prime_op:\n self.dGrph[prime_op][1] = None\n\n # setting cfg_blck output values to None\n blck_ids = [blck[0] for blck in self.listCfgBlcks()]\n for cfg_id in blck_ids:\n self.dGrph[cfg_id][1][1] = None\n \n # setting ari_blck output values to None\n blck_ids = [blck[0] for blck in self.listAriBlcks()]\n for ari_id in blck_ids:\n self.dGrph[ari_id][1][0][1] = None\n self.dGrph[ari_id][1][1][1] = None\n self.dGrph[ari_id][1][2][1] = None\n \n # setting tribuf output values to None\n blck_ids = [blck[0] for blck in self.listTribufs()]\n for tri_id in blck_ids:\n self.dGrph[tri_id][2][1] = None\n \n # setting gate output values to None\n blck_ids = [blck[0] for blck in self.listGates()]\n for gate_id in blck_ids:\n self.dGrph[gate_id][1][2] = None", "def updateParameters(self):\r\n\r\n\t\tif self.approach.altered:\r\n\t\t\tself.transform.enabled = True\r\n\r\n\t\t\tif self.approach.value == 'Locations in the DEM generated from field observations':\r\n\t\t\t\tself.predefined_pattern.enabled = False\r\n\t\t\t\tself.pattern_workspace.enabled = False\r\n\t\t\t\tself.point_matrix_size.enabled = True\r\n\t\t\t\tself.point_vectors.enabled = True\r\n\t\t\t\tself.mapping_field.enabled = True\r\n\t\t\t\tself.move_to_max.enabled = True\r\n\t\t\t\tself.output_sim_matrix.enabled = True\r\n\t\t\t\tself.mh_dil_val.enabled = False\r\n\r\n\t\t\t\tself.mh_iteration.enabled = False\r\n\t\t\t\tself.mh_iteration.value = False\r\n\t\t\t\tself.output_table.enabled = False\r\n\t\t\t\tself.output_raster_workspace.enabled = False\r\n\t\t\t\tself.output_raster_workspace.value = ''\r\n\r\n\t\t\telif self.approach.value == 'Locations in the DEM versus pre-defined pattern':\r\n\t\t\t\tself.predefined_pattern.enabled = True\r\n\t\t\t\tself.point_matrix_size.enabled = True\r\n\t\t\t\tself.point_vectors.enabled = True\r\n\t\t\t\tself.mapping_field.enabled = True\r\n\t\t\t\tself.move_to_max.enabled = True\r\n\t\t\t\tself.mh_dil_val.enabled = True\r\n\t\t\t\tself.mh_iteration.enabled = True\r\n\t\t\t\tself.output_table.enabled = True\r\n\t\t\t\tself.output_sim_matrix.enabled = False\r\n\t\t\t\tself.output_sim_matrix.value = ''\r\n\t\t\t\tself.output_raster_workspace.enabled = False\r\n\t\t\t\tself.output_raster_workspace.value = ''\r\n\r\n\t\t\telse: # seek pre-defined pattern in DEM\r\n\t\t\t\tself.predefined_pattern.enabled = True\r\n\t\t\t\tself.point_matrix_size.enabled = True\r\n\t\t\t\tself.mh_iteration.enabled = True\r\n\t\t\t\tself.output_raster_workspace.enabled = True\r\n\t\t\t\tself.point_vectors.enabled = False\r\n\t\t\t\tself.point_vectors.value = ''\r\n\t\t\t\tself.mapping_field.enabled = False\r\n\t\t\t\tself.move_to_max.enabled = False\r\n\t\t\t\tself.move_to_max.value = False\r\n\t\t\t\tself.mh_dil_val.enabled = True\r\n\t\t\t\tself.output_sim_matrix.enabled = False\r\n\t\t\t\tself.output_sim_matrix.value = ''\r\n\t\t\t\tself.output_table.enabled = False\r\n\t\t\t\tself.output_table.value = ''\r\n\r\n\t\tif self.mh_iteration.altered:\r\n\r\n\t\t\tif self.mh_iteration.value is True:\r\n\t\t\t\tself.mh_dil_start.enabled = True\r\n\t\t\t\tself.mh_dil_stop.enabled = True\r\n\t\t\t\tself.mh_dil_step.enabled = True\r\n\t\t\t\tself.mh_dil_val.enabled = False\r\n\t\t\t\tself.mh_dil_val.value = 1\r\n\r\n\t\t\telse:\r\n\t\t\t\tif self.approach.value == 'Locations in the DEM generated from field observations':\r\n\t\t\t\t\tself.mh_dil_val.enabled = False\r\n\t\t\t\t\tself.mh_dil_val.value = 1\r\n\t\t\t\telse:\r\n\t\t\t\t\tself.mh_dil_val.enabled = True\r\n\r\n\t\t\t\tself.mh_dil_start.enabled = False\r\n\t\t\t\tself.mh_dil_stop.enabled = False\r\n\t\t\t\tself.mh_dil_step.enabled = False\r\n\t\t\t\tself.mh_dil_start.value = 0.01\r\n\t\t\t\tself.mh_dil_stop.value = 1\r\n\t\t\t\tself.mh_dil_step.value = 0.1\r\n\r\n\t\tif self.move_to_max.altered:\r\n\t\t\tif self.move_to_max.value is True:\r\n\t\t\t\tself.move_to_max_distance.enabled = True\r\n\t\t\telse:\r\n\t\t\t\tself.move_to_max_distance.enabled = False\r\n\t\t\t\tself.move_to_max_distance.value = 3\r\n\r\n\t\tif self.transform.altered:\r\n\t\t\tif self.transform.value == 'Work directly on the elevation matrix':\r\n\t\t\t\tself.size_of_the_cell.enabled = False\r\n\t\t\telif self.transform.value == 'Perform a local translation':\r\n\t\t\t\tself.size_of_the_cell.enabled = False\r\n\t\t\telif self.transform.value == 'Compute slopes' or self.transform.value == \\\r\n\t\t\t\t\t'Compute slopes and perform local translation':\r\n\t\t\t\tself.size_of_the_cell.enabled = True\r\n\r\n\t\tif self.predefined_pattern.altered:\r\n\t\t\tif self.predefined_pattern.value == 'Custom pattern':\r\n\t\t\t\tself.pattern_workspace.enabled = True\r\n\r\n\t\t\t\tself.mh_iteration.value = False\r\n\t\t\t\tself.mh_iteration.enabled = False\r\n\t\t\t\tself.mh_dil_start.enabled = False\r\n\t\t\t\tself.mh_dil_stop.enabled = False\r\n\t\t\t\tself.mh_dil_step.enabled = False\r\n\t\t\t\tself.mh_dil_start.value = 0.01\r\n\t\t\t\tself.mh_dil_stop.value = 1\r\n\t\t\t\tself.mh_dil_step.value = 0.1\r\n\t\t\t\tself.mh_dil_val.enabled = False\r\n\t\t\t\tself.mh_dil_val.value = 1\r\n\t\t\telse:\r\n\t\t\t\tself.pattern_workspace.enabled = False", "def show_info(self):\n print 'Querying the station for the configuration...'\n config = self.station.getConfig()\n for key in sorted(config):\n print '%s: %s' % (key, config[key])", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass" ]
[ "0.6185093", "0.60498106", "0.60332525", "0.6013268", "0.59627753", "0.592616", "0.5911552", "0.58811486", "0.58507645", "0.58053464", "0.5802036", "0.5802036", "0.5802036", "0.5802036", "0.57227117", "0.56944853", "0.56828725", "0.56805164", "0.5645888", "0.56457156", "0.56414586", "0.5622991", "0.56153584", "0.5611901", "0.5602003", "0.5601346", "0.5598582", "0.5569052", "0.5552781", "0.55505633", "0.55475575", "0.55431455", "0.55341345", "0.55330473", "0.55258894", "0.5510652", "0.55067503", "0.54990447", "0.5496476", "0.5495971", "0.54871565", "0.54751706", "0.5472065", "0.5452694", "0.5447673", "0.54474217", "0.54393023", "0.54373556", "0.5428729", "0.5428141", "0.54281324", "0.5427374", "0.54271406", "0.5415695", "0.54122007", "0.5407447", "0.5406158", "0.5406158", "0.54011595", "0.5398101", "0.53967094", "0.5394056", "0.5382471", "0.53805506", "0.5374554", "0.5374497", "0.53712535", "0.53687537", "0.5367182", "0.53569365", "0.5352889", "0.5351297", "0.53505003", "0.53395194", "0.53395194", "0.5335506", "0.5330032", "0.53235316", "0.53229964", "0.5322841", "0.5321675", "0.5320614", "0.5318581", "0.5317784", "0.5317784", "0.5317784", "0.5317784", "0.53174406", "0.53174216", "0.5317186", "0.5317056", "0.5317056", "0.5317056", "0.5317056", "0.5317056", "0.5317056", "0.5317056", "0.5317056", "0.5317056", "0.5317056", "0.5317056" ]
0.0
-1
details about dark field image
def __init__(self, basePath, darkframePath=None, flip_image_across_axis=None, show_image=False, save_image=False, save_img_type='.tif', savePath=None, savename=None, save_plot=False): self.basePath = basePath img, mean, std = calculate_darkfield(self.basePath, darkframePath=darkframePath, flip_image_axes=flip_image_across_axis, show_image=show_image, save_image=save_image, save_img_type=save_img_type, savePath=savePath, savename=savename, save_plot=save_plot) self.img = img self.mean = mean self.std = std
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dark(s='dark'):\n s = s.strip()[:80] #truncate to 80 char to fit in FITS header\n print camera.SetShutter(2)\n camera.status.imgtype = 'DARK'\n camera.status.object = s\n camera.status.update()", "def im_show(image_path):\n img = cv.imread(image_path, cv.IMREAD_ANYDEPTH)\n cv.namedWindow('image', cv.WINDOW_NORMAL)\n ret, threshed = cv.threshold(img, 0, 2 ** 16, cv.THRESH_BINARY)\n print(ret)\n print(threshed.shape, threshed.dtype)\n cv.imshow('image', threshed)\n cv.waitKey(0)\n cv.destroyAllWindows()", "def getimage(self):", "def image_info(img):\n\tprint(img.format)\n\tprint(img.size)\n\tprint(img.mode)", "def test_check_image_color(self):\n result = analyzer.check_image_color(\"tests/test_files/sample.jpg\")\n self.assertEqual(result, \"light\")", "def image(self,v):\n self.set('heightfield.image',v)\n #assert fileExists(environment.makeFilePath(v)), \"Warning: HeightField's image file, {}, not found in images folder.\".format(v) \n return self", "def get_dark_channel(self,img, *, size):\n #Extract the dark/hazy part from the image\n minch = np.amin(img, axis=2)\n box = cv2.getStructuringElement(cv2.MORPH_RECT, (size // 2, size // 2))\n return cv2.erode(minch, box)", "def small_image(self):\n pass", "def print_image_info(input_image):\n print()\n print(\"Basic Information on image: {}\".format(input_image.filename))\n print(\"Format: {}\".format(input_image.format))\n print(\"Mode: {}\".format(input_image.mode))\n print(\"Size: {}\".format(input_image.size))\n print(\"Width: {}\".format(input_image.width))\n print(\"Height: {}\".format(input_image.height))\n print(\"Palette: {}\".format(input_image.palette))\n print()", "def on_image(self, image):", "def get_image_attributes(self, element):", "def image_cb(self, msg):\n self.has_image = True\n self.camera_image = msg\n self.get_light_state()", "def darkText(img):\n kernel = np.ones((30, 30), np.uint8) \n img_orig = cv2.morphologyEx(img, cv2.MORPH_BLACKHAT, kernel)\n \n TH = 150\n img_orig[(img_orig[:,:,0] < TH) | (img_orig[:,:,1] < TH) | (img_orig[:,:,2] < TH)] = (0,0,0)\n \n img_orig = closing(img_orig, size=(1, int(img.shape[1] / 8)))\n \n return (cv2.cvtColor(img_orig, cv2.COLOR_BGR2GRAY) != 0).astype(np.uint8)", "def detail(self):\n return self.uniform(\"detail\",\n self.img_scale * .05,\n self.img_scale * .2)", "def state_img(self):\n if self.master.v_state == 'victory':\n return self.vict_img\n elif self.master.v_state == 'draw':\n return self.draw_img\n if self.master.v_state == 'defeat':\n return self.def_img", "def do_info (self, line) :\n\t\tprint\n\t\tprint get_info_string( self.__image )\n\t\tprint", "def get_image(self):\n if self._image is None:\n image_data = np.load(self.image_file)\n if not isinstance(image_data, np.ndarray):\n image_data = image_data['arr_0']\n self.meta_data = ImageWrapper.load_metadata(self.image_file+\".meta\")\n exposure_time = self.meta_data['exposure_time_us'] * 1e-6\n dark_level = float(self.meta_data['black_level'])\n # saturation_mask = image_data.max(axis=2) >= 4094\n image_data = np.clip((image_data.astype(np.float32) - dark_level),\n a_min=0.0, a_max=None) / exposure_time\n if self.original_vignetting is not None:\n image_data = image_data / self.original_vignetting\n if self.crop is not None:\n image_data = image_data[\n self.crop[1,0]:self.crop[1,1],\n self.crop[0,0]:self.crop[0,1]\n ]\n # saturation_mask = saturation_mask[\n # self.crop[1,0]:self.crop[1,1],\n # self.crop[0,0]:self.crop[0,1]\n # ]\n if self.down_sample is not None:\n image_data = cv2.resize(\n image_data,\n dsize=None,\n fx=1./self.down_sample,\n fy=1./self.down_sample,\n interpolation=cv2.INTER_AREA\n )\n # saturation_mask = cv2.resize(\n # saturation_mask,\n # dsize=None,\n # fx=1./self.down_sample,\n # fy=1./self.down_sample,\n # interpolation=cv2.INTER_AREA\n # )\n if self.reup_sample is not None:\n image_data = cv2.resize(\n image_data,\n dsize=None,\n fx=self.reup_sample,\n fy=self.reup_sample,\n interpolation=cv2.INTER_CUBIC\n )\n # saturation_mask = cv2.resize(\n # saturation_mask,\n # dsize=None,\n # fx=self.reup_sample,\n # fy=self.reup_sample,\n # interpolation=cv2.INTER_CUBIC\n # )\n image = torch.tensor(np.transpose(image_data, (2,0,1)), dtype=torch.float32, device=self.device)\n # saturation_mask = torch.tensor(saturation_mask, dtype=torch.float32, device=self.device)\n if not self.lazy:\n self._image = image\n # self._saturation_mask = saturation_mask\n else:\n image = self._image\n # saturation_mask = self._saturation_mask\n\n return image#, saturation_mask", "def generate_image_info(image):\n image = ee.Image(image)\n image_vis = image.visualize(**{\n 'min': image_min,\n 'max': image_max,\n 'palette': image_palette\n })\n\n print(image_min, image_max)\n\n if 'hillshade' in r and r['hillshade']:\n image_vis = hillshade(image_vis,\n image.subtract(image_min).divide(ee.Image.constant(image_max).subtract(image_min)),\n True)\n\n m = image_vis.getMapId()\n\n mapid = m.get('mapid')\n token = m.get('token')\n\n url = 'https://earthengine.googleapis.com/map/{mapid}/{{z}}/{{x}}/{{y}}?token={token}'.format(\n mapid=mapid,\n token=token\n )\n\n result = {\n 'mapid': mapid,\n 'token': token,\n 'url': url\n }\n return result", "def check_image_color(image):\n\n def check_color(i, j, k):\n \"\"\" Function used only for DEBUGGING\"\"\"\n img.show()\n image = Image.new(\"RGB\", (200, 200), (int(Y), int(Y), int(Y)))\n image.show()\n image = Image.new(\"RGB\", (200, 200), (int(i), int(j), int(k)))\n image.show()\n\n if not os.path.isfile(image):\n return \"Image not found\"\n\n def calculate_bgr(data):\n average_color_per_row = numpy.average(data, axis=0)\n average_color = numpy.average(average_color_per_row, axis=0)\n return tuple(average_color)\n\n def calculate_y(r, g, b):\n alpha = 0.299\n betta = 0.587\n gamma = 0.114\n return alpha * r + betta * g + gamma * b\n\n # split the image for four squares calucate averate pixel for them and take higest value\n # blure image and save to /Library/Caches as com.apple.desktop.admin.png\n # in case using blur tool --> blur = cv2.blur(img,(5,5))\n try:\n img_cv_data = cv2.imread(image)\n B, G, R = calculate_bgr(img_cv_data)\n Y = calculate_y(B, G, R)\n height, width = img_cv_data.shape[:2]\n except Exception as err:\n print(f\"[ERROR] {err} with image: {image}\")\n return \"Error parsing image\"\n\n # image detection\n if Y < 72.0:\n _type = \"dark\"\n elif Y >= 73.0 and Y <= 108.0:\n _type = \"evening\"\n else:\n _type = \"light\"\n\n return _type", "def show_image(self):\n cv2.imshow('Image', self.__diff_image())\n cv2.waitKey()", "def show_image_ref():\n return get_image_ref()", "def image(self):\n return self._image", "def ff_correct_image(image):\n pass", "def ff_correct_image(image):\n pass", "def getImage(self):\n return self.get('heightfield.image')", "def set_image(self):\r\n return loader.GFX['title_box']", "def main():\n original = SimpleImage('images/mt-rainier.jpg')\n original.show()\n reflected = make_reflected('images/mt-rainier.jpg')\n reflected.show()", "def set_image(self):\r\n return loader.GFX['instructions_box']", "def read_image(img):\n out = Image.open(img)\n return Technicolor(out)", "def darker(image):\r\n # Demonstrate looping over all the pixels of an image,\r\n # changing each pixel to be half its original intensity.\r\n for pixel in image:\r\n pixel.red = pixel.red // 2\r\n pixel.green = pixel.green // 2\r\n pixel.blue = pixel.blue // 2", "def get_light_state(self):\n\n cv_image = self.bridge.imgmsg_to_cv2(self.camera_image, \"bgr8\")\n\n #Get classification\n tl_image_rgb, color_index = self.light_classifier.get_classification(cv_image)\n tl_cv_image = cv2.cvtColor(tl_image_rgb, cv2.COLOR_RGB2BGR)\n try:\n self.tl_detected_image_pub.publish(self.bridge.cv2_to_imgmsg(tl_cv_image, \"bgr8\"))\n except CvBridgeError as e:\n print(e)", "def large_image(self):\n pass", "def show_image(self):\n cv2.imshow(self.config.DISPLAY_NAME, self.image)", "def _process_img_semantic(self, sensor_data):\n sensor_data.convert(self.cc)\n img = np.array(sensor_data.raw_data).reshape((self.img_y, self.img_x, 4))\n img = img[:, :, :3] # sensor is actualy rgba, we dont need alpha values\n self.semantic = img # need to scale rgb values to be {0,1}", "def _imshow_dtm(image):\n import copy\n import matplotlib as mpl\n from matplotlib.colors import Normalize\n import plottool as pt\n UNKNOWN = -32767\n vmin = image[image != UNKNOWN].min()\n vmax = image.max()\n norm = Normalize(vmin=vmin, vmax=vmax)\n cmap = copy.copy(mpl.cm.get_cmap('viridis'))\n cmap.set_bad((0, 0, 0))\n pt.imshow(image, cmap=cmap, norm=norm, fnum=1)", "def __diff_image(self):\n img = cv2.imread(self.imagefile()).copy()\n Reference.__draw_bugs(img, self.__true_positives, False, 1)\n Reference.__draw_bugs(img, self.__false_negatives, (0, 255, 0))\n Reference.__draw_bugs(img, self.__false_positives, (0, 0, 255))\n return img", "def img_disp(name,img):\n cv2.imshow(name,img.astype(int)/255.0)\n cv2.waitKey()", "def process(self, image):", "def ibl_options_panel(self, context):\r\n \r\n AM = context.window_manager.asset_m\r\n node_group = bpy.context.scene.world.node_tree.nodes\r\n layout = self.layout\r\n \r\n box = layout.box()\r\n row = box.row()\r\n row.alignment = 'CENTER'\r\n row.label(\"IMAGE\")\r\n row = box.row(align = True)\r\n row.label(\"Rotation:\")\r\n col = row.column()\r\n col.prop(node_group[\"Mapping\"], \"rotation\", text = \"\")\r\n row = box.row(align = True)\r\n row.label(\"Projection:\")\r\n row.prop(AM, \"projection\", text = \"\")\r\n row = box.row(align = True)\r\n row.label(\"Blur:\")\r\n row.prop(node_group[\"ImageBlur\"].inputs[1], \"default_value\", text = \"\")\r\n row = box.row(align = True)\r\n row.label(\"Visible:\")\r\n row.prop(bpy.context.scene.world.cycles_visibility, \"camera\")\r\n row = box.row(align = True)\r\n row.label(\"Transparent:\")\r\n row.prop(bpy.context.scene.cycles, \"film_transparent\")\r\n \r\n \r\n \r\n box = layout.box()\r\n row = box.row(align = True)\r\n row.label(\"Gamma:\")\r\n row.prop(node_group[\"AM_IBL_Tool\"].inputs[0], \"default_value\", text = \"\")\r\n \r\n box = layout.box()\r\n row = box.row()\r\n row.alignment = 'CENTER'\r\n row.label(\"LIGHT\")\r\n row = box.row(align = True)\r\n row.label(\"Strength:\")\r\n row.prop(node_group[\"AM_IBL_Tool\"].inputs[2], \"default_value\", text = \"\")\r\n row = box.row(align = True)\r\n row.label(\"Saturation:\")\r\n row.prop(node_group[\"AM_IBL_Tool\"].inputs[3], \"default_value\", text = \"\")\r\n row = box.row(align = True)\r\n row.label(\"Hue:\")\r\n row.prop(node_group[\"AM_IBL_Tool\"].inputs[4], \"default_value\", text = \"\")\r\n row = box.row(align = True)\r\n row.label(\"Mix Hue:\")\r\n row.prop(node_group[\"AM_IBL_Tool\"].inputs[5], \"default_value\", text = \"\")\r\n \r\n box = layout.box()\r\n row = box.row()\r\n row.alignment = 'CENTER'\r\n row.label(\"GLOSSY\")\r\n row = box.row(align = True)\r\n row.label(\"Strength:\")\r\n row.prop(node_group[\"AM_IBL_Tool\"].inputs[7], \"default_value\", text = \"\")\r\n row = box.row(align = True)\r\n row.label(\"Saturation:\")\r\n row.prop(node_group[\"AM_IBL_Tool\"].inputs[8], \"default_value\", text = \"\")\r\n row = box.row(align = True)\r\n row.label(\"Hue:\")\r\n row.prop(node_group[\"AM_IBL_Tool\"].inputs[9], \"default_value\", text = \"\")\r\n row = box.row(align = True)\r\n row.label(\"Mix Hue:\")\r\n row.prop(node_group[\"AM_IBL_Tool\"].inputs[10], \"default_value\", text = \"\")\r\n \r\n layout.operator(\"wm.save_ibl_settings\", text = \"Save settings\", icon = 'FILE_TICK')", "def image_path_changed(self):\r\n #load the image, if no path given open dialog\r\n if self.ui.imagePath.text() == \"\":\r\n self.image_file_dialog()\r\n \r\n #catch wrong path and permission errors\r\n try:\r\n img = Image.open(str(self.ui.imagePath.text()))\r\n except (IOError, OSError) as e:\r\n logging.error(\"failed to open file: \"+str(e))\r\n return ()\r\n\r\n #check the format of the loaded image and adjust the input field accordingly\r\n logging.debug(\"image mode: \"+img.mode)\r\n self.ui.channel_selection.clear()\r\n \r\n if img.mode == \"RGB\":\r\n self.ui.channel_selection.addItem(\"red\",0)\r\n self.ui.channel_selection.addItem(\"green\",1)\r\n self.ui.channel_selection.addItem(\"blue\",2)\r\n \r\n #set the phi0 input\r\n for field in [self.ui.phi0Ch1,self.ui.phi0Ch2,self.ui.phi0Ch3]:\r\n field.setMaximum(255)\r\n field.setMinimum(0)\r\n field.setEnabled(True)\r\n self.ui.phi0LabelCh1.setText(\"red:\")\r\n self.ui.phi0LabelCh2.setText(\"green:\")\r\n self.ui.phi0LabelCh3.setText(\"blue:\")\r\n elif img.mode == \"L\" or img.mode == \"I\":\r\n #none because there is no third dimension in greyscale\r\n self.ui.channel_selection.addItem(\"grey\",None)\r\n modeMax = {\"L\":255,\"I\":2**31}\r\n modeMin = {\"L\":0,\"I\":-2**31}\r\n \r\n \r\n #set the phi0 input\r\n self.ui.phi0Ch1.setEnabled(True)\r\n self.ui.phi0Ch1.setMaximum(modeMax[img.mode])\r\n self.ui.phi0Ch1.setMinimum(modeMin[img.mode])\r\n self.ui.phi0Ch2.setDisabled(True)\r\n self.ui.phi0Ch3.setDisabled(True)\r\n \r\n self.ui.phi0LabelCh1.setText(\"phi0:\")\r\n else:\r\n logging.warning(\"unsupported image mode \"+img.mode+\r\n \" (check pillow docs for details) \"+\r\n \"expected RGB or greyscale image, proceed with caution\")\r\n\r\n #create np array from image and rotate if needed\r\n if self.settings[\"rotate on load\"]:\r\n self.npImg = np.rot90(np.array(img))\r\n else:\r\n self.npImg = np.array(img)\r\n \r\n #mirror the image if desired\r\n if self.settings[\"mirror on load\"]:\r\n self.npImg = np.fliplr(self.npImg)\r\n\r\n \r\n \r\n\r\n \r\n #check for dpi info\r\n try:\r\n dpi = img.info['dpi']\r\n \r\n #try string conversion and tuple first and then deal with type of number\r\n if type(dpi) == str:\r\n try:\r\n dpi = float(dpi)\r\n except ValueError:\r\n logging.warning(\"Can not parse dpi info: \"+dpi)\r\n dpi = 0\r\n elif type(dpi) == tuple:\r\n if len(dpi) == 1:\r\n dpi = dpi[0]\r\n elif len(dpi) == 2:\r\n if dpi[0] == dpi[1]:\r\n dpi = dpi[0]\r\n else:\r\n logging.warning(\"different DPI for the two dimensions found \" +\r\n repr(dpi)+ \" THIS IS NOT SUPPORTED\")\r\n dpi = 0\r\n else:\r\n logging.warning(\"dpi has more than 2 dimensions\" +\r\n repr(dpi)+ \" DONT KNOW WHAT TO DO\")\r\n dpi=0\r\n \r\n try:\r\n dpi = float(dpi)\r\n if not dpi.is_integer():\r\n logging.warning(\"non integer DPI of {:.4f} is not supported\".format(dpi))\r\n except TypeError: #unkown type of dpi\r\n logging.warning(\"Can not parse dpi info: \"+repr(dpi))\r\n dpi = 0\r\n except KeyError:\r\n logging.debug(\"no dpi info\")\r\n dpi=0\r\n\r\n logging.debug(\"loaded image of dimensions: \"+str(self.npImg.shape)+\r\n \" and type: \"+str(self.npImg.dtype)+\r\n \", with DPI: \"+str(dpi))\r\n \r\n if dpi > 0:\r\n self.ui.DPI.setValue(int(dpi))\r\n \r\n #adjust UI elements to image properties\r\n self.ui.x0.setMaximum(self.npImg.shape[1])\r\n self.ui.x0.setValue(0)\r\n self.ui.y0.setMaximum(self.npImg.shape[0])\r\n self.ui.y0.setValue(0)\r\n self.ui.x1.setMaximum(self.npImg.shape[1])\r\n self.ui.x1.setValue(self.npImg.shape[1])\r\n self.ui.y1.setMaximum(self.npImg.shape[0])\r\n self.ui.y1.setValue(self.npImg.shape[0])\r\n\r\n #plot the image\r\n self.subplot.imshow(self.npImg,cmap=\"gray\")\r\n self.canvas.draw()", "def __enhance_image(self, img):\n\n blue = self.g.clahe.apply(img[:,:,0])\n green = self.g.clahe.apply(img[:,:,1])\n red = self.g.clahe.apply(img[:,:,2])\n img[:,:,0] = blue\n img[:,:,1] = green\n img[:,:,2] = red\n return img", "def set_image(self):\r\n return loader.GFX['loadgamebox']", "def show_image(image):\r\n plt.imshow(image, cmap='gray')\r\n plt.show()", "def image_cb(self, msg):\n\n # Save the camera image\n self.camera_image = msg\n\n # I sufficient information is available...\n if not None in (self.camera_image, self.waypoint_tree, self.lights):\n\n # Find index and color state of next light\n light_wp, state = self.process_traffic_lights()\n\n # If the light is green...\n if state == TrafficLight.GREEN:\n\n # Publish sentinel indicatig no red light\n self.upcoming_red_light_pub.publish(Int32(-1))\n\n else:\n\n # Publish the traffic light index\n self.upcoming_red_light_pub.publish(Int32(light_wp))", "def build_filler_images(self):", "def getTasseledCap(img):", "def image(self, verbose=False):\n\n return self._action(FP_ModuleMorphism.image, verbose)", "def show_env(self, img):\n plt.figure(1)\n plt.subplot(111)\n plt.imshow(img, interpolation=\"nearest\")\n plt.show()", "def test_Image():\n assert Image(cur, \"Simple_Linear\").detect_image() == True\n assert Image(cur, \"Logistic_Linear\").detect_image() == False\n assert Image(cur, \"Simple_Linear\").date == \"2021-04-20\"\n assert Image(cur, \"Breslow-Day_Test\").source == \"Course BIOSTAT703 slide\"", "def detect_fields(img):\n lines = np.array_split(img, 5, axis=1)\n fields = [np.array_split(line, 5, axis=0) for line in lines]\n return fields", "def read_img(img): #X\n im = plt.imread(img)\n im = im[:, :, :3]\n if im.max()>200:\n im = im/255.\n return rgb_to_hsv(im)-0.5", "def process_image(self):\n pass", "def pixel_type(self):\n pass", "def __init__(self, machine):\n super().__init__(machine)\n self.features['has_rgb_dmd'] = True", "def scalarInfo(img, cnt):\n\tm = cntInfo(img, cnt)\n\td = {\"perimeter\":m[\"perimeter\"], \"oreientation\":m[\"orientation\"], \"solidity\":m[\"solidity\"],\"height\":m[\"height\"], \"extent\":m[\"extent\"], \"aspect ratio\":m[\"aspect ratio\"], \"area\":m[\"area\"], \"sum intensity\":m[\"sum intensity\"], \"width\":m[\"width\"], \"equivalent diameter\": m[\"equivalent diameter\"], \"mean intensity\": m[\"mean intensity\"]}\n\treturn d", "def display(self, image):\n raise NotImplementedError()", "def image_info(image, task_state, video_state):\n image_info = 'Frame {}/{} ({})'.format(video_state.image_idx + 1, video_state.num_frames, video_state.get_image_name())\n cv2.putText(image, image_info, (5, 15), cv2.FONT_HERSHEY_SIMPLEX, 0.5, video_state.color, 1)\n\n label_info = []\n if len(video_state.labels) > 0:\n label_info = ['{}'.format(a) for (f, a) in video_state.labels.items() if video_state.get_image_name().split('.')[0] == f]\n if len(label_info) == 0:\n label_info = ['None']\n for i, row in enumerate(label_info):\n cv2.putText(image, row, (5, 35 + i * 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, video_state.color, 1)\n cv2.imshow('Video', image)\n if video_state.look_ahead == 0: # no lookahead\n cv2.destroyWindow('Foresight')\n cv2.destroyWindow('Hindsight')\n elif video_state.look_ahead == 1: # only foresight\n foresight(video_state)\n elif video_state.look_ahead == 2: # foresight and hindsight\n foresight(video_state)\n hindsight(video_state)", "def met(r):\n image_url = r.get(\"image\")\n if image_url is None:\n if r.get(\"source\") is not None:\n image_url = r.get(\"source\").get(\"href\")\n image_name = r.get(\"name\")\n image_artist = r.get(\"Who\")\n return image_url, image_name, image_artist", "def _state_main(self, gui):\n gui.entry.wait_variable(gui.entry_sv)\n\n '''Clean string'''\n files = literal_eval(gui.entry_sv.get())\n\n '''Remove previous images'''\n if hasattr(gui, \"panel\"):\n gui.panel.destroy()\n\n '''Load each image'''\n for file_name in files:\n file_name = file_name.replace(\"{\", \"\").replace(\"}\", \"\")\n # image = tk.PhotoImage(file=file_name)\n if \".CR2\" in file_name:\n '''Rawpy implementation'''\n file_image = rawpy.imread(file_name)\n file_image = file_image.postprocess()\n '''Rawkit implementation'''\n '''file_image = Raw(file_name)\n file_image = np.array(file_image.to_buffer())'''\n '''OpenCV implementation'''\n '''file_image = cv2.imread(file_name)'''\n else:\n file_image = Image.open(file_name)\n '''image = file_image.resize((500, 500), Image.ANTIALIAS)\n image = ImageTk.PhotoImage(image)\n gui.panel = tk.Label(gui.root, image=image)\n gui.panel.image = image\n gui.panel.pack()'''\n # panel.grid(row=2)\n\n image_data = np.array(file_image)\n image_data = cv2.cvtColor(image_data, cv2.COLOR_RGB2GRAY)\n '''print(image_data.shape)\n print(image_data)\n print(len(image_data))\n print(len(image_data[0]))'''\n returned_image = Image.fromarray(image_data)\n '''cv2.imshow(\"Gray\", image_data)\n cv2.waitKey()\n cv2.destroyWindow(\"Gray\")'''\n\n '''enhanced_contrast = ImageEnhance.Contrast(Image.fromarray(file_image))\n enhanced_image = enhanced_contrast.enhance(255)\n enhanced_data = np.array(enhanced_image)\n plot_functions.imshow(enhanced_image)\n plot_functions.show()'''\n\n # color_space = cv2.cvtColor(image_data, cv2.COLOR_RGB2HSV)\n # print(color_space)\n \n '''Create mask for white-ish pixels'''\n '''lower_background = np.array([150, 150, 150])\n upper_background = np.array([255, 255, 255])\n print(image_data)\n white_mask = cv2.inRange(image_data, lower_background, upper_background)\n white_mask = cv2.morphologyEx(white_mask, cv2.MORPH_OPEN, np.ones((3,3),np.uint8))\n white_mask = cv2.morphologyEx(white_mask, cv2.MORPH_DILATE, np.ones((3, 3), np.uint8))\n white_mask = white_mask / 255'''\n\n '''Create mask for black-ish pixels'''\n '''lower_background = np.array([0, 0, 0])\n upper_background = np.array([25, 25, 25])\n black_mask = cv2.inRange(image_data, lower_background, upper_background)\n black_mask = cv2.morphologyEx(black_mask, cv2.MORPH_OPEN, np.ones((3, 3), np.uint8))\n black_mask = cv2.morphologyEx(black_mask, cv2.MORPH_DILATE, np.ones((3, 3), np.uint8))\n black_mask = black_mask / 255'''\n\n '''Add masks together'''\n '''background_mask = white_mask\n # Ensure no value is above 1\n background_mask = np.clip(background_mask, 0, 1)'''\n \n copied_image_data = np.asarray(returned_image).copy()\n # background_mask = np.logical_not(background_mask)\n '''for row_index, [mask_row, image_row] in enumerate(zip(background_mask, copied_image_data)):\n # place black pixel on corresponding masked pixels\n # copied_image_data[row_index] = np.array([image_row[pixel] * int(mask_row[pixel]) for pixel in range(len(mask_row))])\n # make pixel fully white on corresponding masked pixels\n copied_image_data[row_index] = np.array([np.array([255, 255, 255]) if int(mask_row[pixel]) else image_row[pixel] for pixel in range(len(mask_row))])'''\n\n '''Turn removed pixels red'''\n '''mask_image = Image.fromarray(copied_image_data)\n plot_functions.imshow(mask_image)\n plot_functions.show()'''\n trapezoid_data = copied_image_data.copy()\n\n enhanced_contrast = ImageEnhance.Contrast(Image.fromarray(trapezoid_data))\n enhanced_image = enhanced_contrast.enhance(255)\n trapezoid_data = np.array(enhanced_image)\n\n '''Detect lines'''\n edges = cv2.Canny(trapezoid_data, 75, 150)\n lines = cv2.HoughLinesP(edges, 1, np.pi / 180, 100, maxLineGap=1000)\n # print(lines)\n for line in lines:\n x1, y1, x2, y2 = line[0]\n if y1 == y2:\n cv2.line(copied_image_data, (x1, y1), (x2, y2), (255, 255, 255), 1)\n\n '''Trapezoid attempt'''\n\n # filters image bilaterally and displays it\n bilatImg = cv2.bilateralFilter(trapezoid_data, 5, 175, 175)\n\n # finds edges of bilaterally filtered image and displays it\n edgeImg = cv2.Canny(bilatImg, 75, 200)\n\n # gets contours (outlines) for shapes and sorts from largest area to smallest area\n contours, hierarchy = cv2.findContours(edgeImg, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n contours = sorted(contours, key=cv2.contourArea, reverse=True)\n\n # drawing red contours on the image\n for con in contours:\n cv2.drawContours(trapezoid_data, con, -1, (255, 255, 255), 3)\n\n '''Detect corners'''\n dst = cv2.cornerHarris(edges, 30, 31, 0.001)\n dst = cv2.dilate(dst, None)\n ret, dst = cv2.threshold(dst, 0.01 * dst.max(), 255, 0)\n dst = np.uint8(dst)\n\n # find centroids\n ret, labels, stats, centroids = cv2.connectedComponentsWithStats(dst)\n # define the criteria to stop and refine the corners\n criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 100,\n 0.001)\n corners = cv2.cornerSubPix(edges, np.float32(centroids), (5, 5),\n (-1, -1), criteria)\n\n good_corners = []\n for corner in corners:\n if (corner[1] < 1000) & (corner[1] > 650) & (corner[0] > 250) & (corner[0] < 2250):\n good_corners.append(corner)\n cv2.circle(edges, (corner[0], corner[1]), 10, (255, 255, 255))\n\n print(good_corners)\n if len(good_corners) >= 3:\n corner_combos = itertools.combinations(good_corners, 3)\n elif len(good_corners) > 1:\n corner_combos = itertools.combinations(good_corners, 2)\n\n best_corner_combo = None\n best_coef = np.inf\n for corner_combo in corner_combos:\n regression = LinearRegression().fit(np.array([corner[0] for corner in corner_combo]).reshape(-1, 1),\n np.array([corner[1] for corner in corner_combo]))\n if np.abs(regression.coef_) < best_coef:\n best_coef = np.abs(regression.coef_)\n best_corner_combo = np.array([corner[1] for corner in corner_combo])\n\n y_edge = int(round(np.mean(best_corner_combo)))\n edges = edges[y_edge:3000, 200:2200]\n copied_image_data = copied_image_data[y_edge:2500, 200:2200]\n trapezoid_data = trapezoid_data[y_edge:2500, 200:2200]\n\n # and double-checking the outcome\n cv2.imshow(\"linesEdges\", edges)\n cv2.imshow(\"linesDetected\", copied_image_data)\n cv2.imshow(\"Contours check\", trapezoid_data)\n cv2.waitKey()\n cv2.destroyWindow(\"Contours check\")\n\n # find the perimeter of the first closed contour\n perim = cv2.arcLength(contours[0], True)\n # setting the precision\n epsilon = 0.02 * perim\n # approximating the contour with a polygon\n approxCorners = cv2.approxPolyDP(contours[0], epsilon, True)\n # check how many vertices has the approximate polygon\n approxCornersNumber = len(approxCorners)\n\n for corners in approxCorners:\n cv2.circle(trapezoid_data, (corners[0], corners[1]), radius=10, color=(255, 255, 255), thickness=-1)\n cv2.imshow(\"Vertex position\", trapezoid_data)\n cv2.waitKey()\n cv2.destroyWindow(\"Vertex position\")\n cv2.imshow(\"linesEdges\", edges)\n cv2.imshow(\"linesDetected\", copied_image_data)\n cv2.waitKey(0)\n cv2.destroyAllWindows()", "def show_field(self, vehicles, type):\n\n # starting pixels x = 0, y = 0 on field image\n start_x = 78\n start_y = 45\n\n # block pixel width is slightly different per field size\n if self.size == 6:\n block_width = 72\n elif self.size == 9:\n block_width = 69\n elif self.size == 12:\n block_width = 68.5\n\n field = plt.imread(f\"data/RushHourImages/RushHour{self.size}.jpg\")\n fig, ax = plt.subplots()\n plt.imshow(field)\n plt.axis('off')\n\n for vehicle in vehicles:\n if vehicle.orientation == 'H':\n x = start_x + (vehicle.x * block_width)\n y = start_y + (vehicle.y * block_width)\n if vehicle.length == 2:\n car = plt.imread(f\"data/RushHourImages/Car{vehicle.id}.png\")\n else:\n car = plt.imread(f\"data/RushHourImages/Truck{vehicle.id}.png\")\n\n # truck: the image coordinate is his middle, which changes with the length of the car\n x += 40\n\n if vehicle.orientation == 'V':\n x = start_y + (vehicle.x * block_width)\n y = start_x + (vehicle.y * block_width)\n if vehicle.length == 2:\n car = plt.imread(f\"data/RushHourImages/Car-rotated{vehicle.id}.png\")\n else:\n car = plt.imread(f\"data/RushHourImages/Truck-rotated{vehicle.id}.png\")\n y += 40\n\n if self.size == 6:\n imagebox = OffsetImage(car, zoom=0.6)\n elif self.size == 9:\n imagebox = OffsetImage(car, zoom=0.4)\n elif self.size == 12:\n imagebox = OffsetImage(car, zoom=0.3)\n\n imagebox.image.axes = ax\n xy = (x, y)\n ab = AnnotationBbox(imagebox, xy, frameon=False)\n ax.add_artist(ab)\n\n if type == True:\n plt.show(block=False)\n plt.pause(0.001)\n plt.close()\n else:\n plt.show()", "def miscinfo(self):\n return _image.image_miscinfo(self)", "def print_info(self, i):\n\n im_size = self.image_size(i)\n print 'The path of the image is: {}'.format(self.image_path_at(i))\n print 'width: {}, height: {}'.format(im_size[0], im_size[1])\n \n attr_i = self.gtdb['attr'][i, :]\n print 'The attributes are: {}'.format(','.join([self._classes[i] for i in np.where(attr_i==1)[0]]))", "def view(self):\n window = tk.Tk()\n label = tk.Label(window)\n label.pack()\n img = self.get_tkimage()\n label[\"image\"] = label.img = img\n window.mainloop()", "def brightness(self):\n _LOGGER.error(\"inside brightness\")\n url = self.urlx + '/dimstate'\n headers = {'x-ha-access': 'raspberry',\n 'content-type': 'application/json'}\n\n response = get(url, headers=headers)\n _LOGGER.error(response.text)\n\n json_data = json.loads(response.text)\n _LOGGER.error(json_data)\n\n state = int(int(json_data['dimState'])*1.5)\n\n # if int(self._dimmer) < 170:\n self._dimmer = state\n\n return self._dimmer", "def load_image(self, image_id):\n # Load image\n# print(self.image_info[image_id]['path'])\n image = cv2.imread(self.image_info[image_id]['path'],cv2.IMREAD_GRAYSCALE) \n image = image[:,:, np.newaxis] #Add 1 dimension for grayscale images\n return image", "def see_image(self, idx, show=True):\n true_label = self.true_targets[idx]\n img, label, _ = self.__getitem__(idx) # img has channel as 1st dim\n img = np.transpose(img.numpy(), (1, 2, 0)) # channel as last dim\n if show:\n plt.imshow(img)\n plt.title(f\"Label: {self.classes_labels[true_label]}\")\n plt.show()\n else:\n return img, label, true_label", "def show(type,img):\n # print(img)\n cv2.imshow(type, img)\n cv2.waitKey()", "def add_field(self, img_dict):\n for k in img_dict.keys():\n assert k in self.bands, \"Celeste model doesn't support band %s\"%k\n self.field_list.append(Field(img_dict))", "def _getGUIImage(self): \n # read the system of your computer\n\n image = ImagePIL.fromarray(self.cv_image)\n\n size = round(image.size[0]/2), round(image.size[1]/2)\n\n image.thumbnail(size, ImagePIL.ANTIALIAS)\n image = ImageTkPIL.PhotoImage(image)\n # self.panel = tki.Label(image=image)\n self.panel.config(image=image)\n self.panel.image = image", "def get_classification(self, image):\n #TODO implement light color prediction\n t = rospy.get_time()\n if(0):\n print(\"Saving\")\n filename = self.filepath + \"%f.png\" % t\n cv2.imwrite(filename, image)\n return TrafficLight.UNKNOWN\n else:\n ifStop, _ = redlight_detection(image)\n StrTime = \"%f: \"%t\n if(ifStop):\n print(StrTime + \"Red Light Detected.\")\n return TrafficLight.RED\n else:\n print(StrTime + \"No Red Light.\")\n return TrafficLight.GREEN", "def showImage(self, image):\n \n self.image = img", "def image(self) -> str:\n return pulumi.get(self, \"image\")", "def imdisplay(filename, representation):\n image = read_image(filename, representation)\n\n if representation == GRAY_OUT:\n plt.imshow(image, cmap='gray')\n else:\n plt.imshow(image)\n\n plt.show()", "def image(self): # type: () -> str\n return self.config['Image']", "def extract_feat(self, imgs):\n pass", "def extract_feat(self, imgs):\n pass", "def img_show(img, counter, mode, RGB):\n plt.figure(counter)\n plt.axis('off')\n if not RGB:\n img_aux = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n img = img_aux\n if mode is None:\n plt.imshow(img)\n else:\n plt.imshow(img, cmap=mode)\n plt.show()\n return counter + 1", "def print_brightness(image):\n target = image.copy()\n for y in range(len(image)):\n for x in range(len(image[y])):\n rgb = image[y, x]\n target[y, x] = brightness(rgb)\n\n return target", "def image_info(path):\n global working_img\n working_img = Image.open(path)\n print('=======================================')\n print(f'이미지 파일 이름:{working_img.filename}')\n print(f'이미지 파일 파일 형식:{working_img.format}')\n print(f'이미지 용량:{working_img.size}')\n print(f'이미지 색상모드:{working_img.mode}')\n print(f'이미지 크기:{working_img.width}x{working_img.height}')", "def display(self):\n display(self.image)", "def get_dark_images(new_path, dataframe):\n\n image_list = [i for i in dataframe['image']]\n return [1 if np.mean(np.array(Image.open(new_path + image))) == 0 else 0 for image in image_list]", "def image(self):\n return self.any_image(-1)", "def __init__(self, parent=None):\n self.image_float_fg = None\n self.image_float_bg = None\n self.image_8bit_fg = None\n self.image_8bit_bg = None\n self.image_unchanged_fg = None\n self.image_unchanged_bg = None\n self.blur = -1\n self.closing = -1\n self.thresh = -1\n self.ready = True", "def image(self):\n return self.datasource.data[\"values\"]", "def do_full(self, image,hsv,upper,lower,debug=False):\n single_color_img = self.extract_single_color_range(image,hsv,lower,upper)\n if debug:\n # cv2.imshow('single_color_img',single_color_img)\n cv2.imwrite('debug_pics/single_color_img.jpg',single_color_img)\n single_channel = self.threshold_image(single_color_img,debug)\n if debug:\n # cv2.imshow('single_channel',single_channel)\n cv2.imwrite('debug_pics/single_channel.jpg',single_channel)\n cont,hierarchy = self.contours(single_channel,debug)\n\n if debug:\n for i,cnt in enumerate(cont):\n cv2.drawContours(single_channel,cont,i,(0,0,255),2)\n if debug: cv2.imwrite('debug_pics/contours.jpg',single_channel) #cv2.imshow('contours',single_channel)\n\n return self.get_bricks(cont)", "def stain_image(image, num_stains, color):", "def analyze(self):\n try:\n self.options[self.multi_image][1]()\n except:\n raise Exception(\"Multi Image Option not defined.\")\n\n self.image = self.data / self.exposure\n\n background = self.min_val = np.min(self.image[:511,:511])\n self.max_val = np.max(self.image[:511,:511])\n # stats.mode returns modal value = value that occours most often\n #background = stats.mode(im[:50,:50].ravel())[0][0]\n\n intensity = self.image.sum() - background*np.size(self.image)\n\n #results.append((self.index, intensity, background))\n self.index =+ 1", "def get_classification(self, image):\n\n temp = cv2.cvtColor(cv2.GaussianBlur(image,(5,5),0), cv2.COLOR_BGR2HSV)\n\n maskR = cv2.inRange(temp, np.array([0, 195, 240]), np.array([5, 215, 255]))\n maskY = cv2.inRange(temp, np.array([28, 195, 240]), np.array([35, 215, 255]))\n maskG = cv2.inRange(temp, np.array([60, 195, 240]), np.array([67, 215, 255]))\n\n filt_r = cv2.bitwise_and(temp,temp, mask= maskR)\n filt_y = cv2.bitwise_and(temp,temp, mask= maskY)\n filt_g = cv2.bitwise_and(temp,temp, mask= maskG)\n\n # Bitwise-AND mask and original image\n self.debug_im1 = filt_r\n self.debug_im2 = filt_y\n self.debug_im3 = filt_g\n status = TrafficLight.UNKNOWN\n\n if np.sum(maskR>10):\n print('detected red')\n status = TrafficLight.RED\n elif np.sum(maskY>10):\n print('detected yellow')\n status = TrafficLight.YELLOW\n elif np.sum(maskG>10):\n print('detected green')\n status = TrafficLight.GREEN\n\n # self.debug()\n return status", "def update_image(self):\n if self.filenames:\n pos = self.slider.value()\n proj, flat, dark, theta = dx.read_aps_32id(self.filenames, proj=(pos, pos+1))\n if self.ffc_correction:\n image = proj[0,:,:].astype(np.float)/flat[0,:,:].astype(np.float)\n else:\n image = proj[0,:,:].astype(np.float)\n self.image_item.setImage(image)", "def plot_field(mut_sample, field, lm, th=0.75,\n names=['blue', 'green', 'orange', 'purple'], ax=None,\n image=None, grid_mm2=None, n_factors=None, n_wt=2, flip=False, scale=15):\n if image is None:\n image = mut_sample._scaffold_image\n\n if grid_mm2 is None:\n grid_mm2 = (mut_sample.get_img_size(mut_sample.image)[0] \\\n * pixel2um / field.shape[1]) ** 2 / 1e6\n\n if n_factors is None:\n n_factors = field.shape[-1]\n\n f = field.mean(0)\n l = lm.mean(0)\n\n fmap = (f[:, :, :n_factors - 2]).argmax(2)\n fn = (cv.blur(l, (3, 3)) / grid_mm2 < 300)\n if type(th) is not list:\n fn |= (f[:, :, n_factors - 2:]).sum(2) > th\n elif type(th) is list:\n for i, t in enumerate(th):\n fn[(f[:, :, :n_factors - 2]).argmax(2) == i] |= ((f[:, :, n_factors - 2:]).sum(2) > t)[\n (f[:, :, :n_factors - 2]).argmax(2) == i]\n c = [get_cmap(cmaps_global[n])(150) for n in names] + [(1, 1, 1, 1)] * n_wt\n\n img = image\n img = (img / img.max() * 255).astype(np.uint8)\n s = img.shape\n s = tuple([int(x) for x in list(s)[::-1]])\n p35, p90 = np.percentile(img, (35, 90))\n processed_img = exposure.rescale_intensity(img, in_range=(p35, p90))\n\n b = cv.resize(processed_img, s)[::-1, :] / 255.\n b = np.maximum(np.minimum(b, 1), 0)\n Fc = np.array([c[int(i)] for i in fmap.flatten()]).reshape((*fmap.shape, -1)).transpose((1, 0, 2))[::-1, :, :3]\n Fc[fn.T[::-1, :], :] = 1.0\n out = (cv.resize(Fc, s) * b.reshape(*b.shape, 1))\n if flip:\n out = out[::-1, :]\n\n if ax is not None:\n ax.imshow(out)\n ax.plot([s[0] * 0.95,\n s[0] * 0.95 - 2.5e3 / 0.325 / scale],\n [s[1] * (.95),\n s[1] * (.95)], color='white', lw=3)\n ax.set_axis_off()\n else:\n plt.imshow(out)\n plt.plot([s[0] * 0.95,\n s[0] * 0.95 - 2.5e3 / 0.325 / scale],\n [s[1] * (.95),\n s[1] * (.95)], color='white', lw=3)\n plt.axis('off')", "def screeninfo(self):\n\t\tDevice().capture_screenshot()\n\t\tresolution = (self.width, self.height)\n\t\tdroid = AQMdroid('image.png', resolution, self.filename)\n\t\t\n\t\ttry:\n\t\t\tdroid.getorigin()\n\t\texcept Exception as e:\n\t\t\tScriptGen(self.filename).log_checker(self.log_handler)\n\t\t\tScriptGen(self.filename).log_checker(self.generate_log_file)\n\t\t\tprint \"\\nExit Point Triggered.\"\n\t\t\tsys.exit()", "def _color_info_func(self):\n\n if self.rgb is not None:\n return self._color_info_text()\n else:\n return None", "def imageinfo(self, *args, **kwargs):\n return self.logger.log(logging.INFO-1, *args, **kwargs)", "def _show_rgb(self):\n R, G, B = self._rgb_frames()\n image = numpy.dstack((R, G, B))\n imageItem = self.parent.image.getImageItem()\n imageItem.updateImage(image)", "def __init__(self, image_path, color = 'g'):\n self.image_path = image_path\n self.color = color", "def observation(self, img):\r\n img = img[25:200]\r\n img = cv2.resize(img, self.img_size[1:])\r\n if not self.color:\r\n img = img.mean(-1, keepdims=True)\r\n\r\n return img.transpose([2, 0, 1]) / 255", "def test_simple(self):\n image = self.design.layout.layers[0].images[0]\n assert len(image.traces) == 2", "def info(self):\n\n\t\tprint(\"Pixels on a side: {0}\".format(self.data.shape[0]))\n\t\tprint(\"Pixel size: {0}\".format(self.resolution))\n\t\tprint(\"Total angular size: {0}\".format(self.side_angle))\n\t\tprint(\"lmin={0:.1e} ; lmax={1:.1e}\".format(self.lmin,self.lmax))", "def draw_info(self, img, age_threshold=8):\n self.n_vehicles = 0\n for detection in self.detections:\n if len(detection.last_boxes) > age_threshold:\n self.n_vehicles += 1\n img = detection.draw(img, thick=2, color=(255, 50, 0))\n\n cv2.putText(img, 'Vehicles in sight: %s' % self.n_vehicles, (50, 150), cv2.FONT_HERSHEY_SIMPLEX, 1,\n (255, 255, 255), 2)\n\n return img", "def show_image(image):\n print('-' * (len(image) + 4))\n for line in image:\n print('| ', end='')\n for ch in line:\n char = '#' if ch is True else '.'\n print(char, end='')\n print(' |')\n print('-' * (len(image) + 4))" ]
[ "0.6423711", "0.6173894", "0.6126788", "0.61006874", "0.60167235", "0.5981014", "0.58750916", "0.5868398", "0.58221865", "0.57751364", "0.57402325", "0.567587", "0.56675595", "0.566325", "0.5617019", "0.55742323", "0.5573085", "0.5560732", "0.5554698", "0.5546894", "0.5545018", "0.5532914", "0.5524726", "0.5524726", "0.5512801", "0.5512393", "0.55057794", "0.5500888", "0.5487857", "0.5472957", "0.5470064", "0.54579425", "0.5432044", "0.54265547", "0.5407431", "0.5403262", "0.53985757", "0.5394725", "0.5388212", "0.5380554", "0.5377197", "0.53759307", "0.5373136", "0.5356523", "0.5355322", "0.5343407", "0.53392583", "0.5335792", "0.5329747", "0.5313772", "0.5311743", "0.53038245", "0.52876794", "0.52719593", "0.52693176", "0.5266868", "0.52378565", "0.52366924", "0.5219785", "0.52194685", "0.52118766", "0.52112347", "0.5209508", "0.5203178", "0.51952183", "0.51912475", "0.51890516", "0.51831794", "0.5182562", "0.5181613", "0.51791155", "0.51739705", "0.51736426", "0.5169367", "0.5168155", "0.5168155", "0.51623577", "0.5159512", "0.515717", "0.51564515", "0.5155946", "0.51547205", "0.5147068", "0.51469433", "0.5146821", "0.514233", "0.5139081", "0.5139016", "0.51380116", "0.5136174", "0.51318294", "0.5131796", "0.5130698", "0.5128652", "0.5127339", "0.5126224", "0.511562", "0.5115511", "0.5115414", "0.5108756" ]
0.57655007
10
describes the micrscope setup
def __init__(self, type, objective, illumination, ccd): self.type = type # e.g. Olympus iX73 self.objective = objective self.illumination = illumination self.ccd = ccd
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n ...", "def setup(self):\n pass", "def setup( self ):", "def setup(self):\n pass # pragma: no cover", "def setup(self):\r\n pass", "def setup(self):\n\t\tpass", "def setup(self) -> None:", "def setup(self):", "def setup(self):", "def setup(self):", "def setup(self):", "def setup(self):\n pass", "def setup(self):\n raise NotImplementedError", "def setup(self):\n raise NotImplementedError", "def setup(self):\n raise NotImplementedError", "def setup(self):\n raise NotImplementedError", "def setup():\n pass", "def setup(self, rc):\n pass", "def setup(self) -> None:\n pass", "def setup(self) -> None:\n pass", "def setup(self) -> None:\n pass", "def setup(self,**kwargs):\n pass", "def _setup(self):", "def _setup(self):", "def analysis_setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self, *args, **kwargs):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def setup_dev():\n setup_general()", "def setup():\n setFormat()\n setFilename()\n setScreenMode()", "def setup(self): \n pass", "def setup(bot):\n bot.add_cog(Info(bot))", "def _setup(self) -> None:\n\t\treturn", "def setup(bot):\n bot.add_cog(Help(bot))", "def setup(bot: Bot) -> None:\n bot.add_cog(Armory(bot))", "def setup(self, args={}):\n\n return Status.RUN", "def setup_method(self):\n self.hass = get_test_home_assistant()\n\n self.config = {\n ip.DOMAIN: {\n \"platform\": \"microsoft_face_identify\",\n \"source\": {\"entity_id\": \"camera.demo_camera\", \"name\": \"test local\"},\n \"group\": \"Test Group1\",\n },\n \"camera\": {\"platform\": \"demo\"},\n mf.DOMAIN: {\"api_key\": \"12345678abcdef6\"},\n }\n\n self.endpoint_url = f\"https://westus.{mf.FACE_API_URL}\"", "def main():\n setup(**setup_params)", "def setup(bot):\n bot.add_cog(TruthOrDareCmd(bot))", "def setup(bot: util.CustomBot):\r\n bot.add_cog(Info(bot))", "def _setup(self):\n raise NotImplementedError()", "def gmcp_setup_data(self):\n yield \"Core.Supports.Debug\", 20\n yield \"Core.Supports.Set\", [ \"MG.char 1\", \"MG.room 1\", \"comm.channel 1\" ]", "def setup(self) -> None:\n mlflow.set_tracking_uri('file://' + hutils.get_original_cwd() + '/mlruns')\n if self.log_mlflow:\n mlflow.set_experiment(self.config.runner.exp_name)\n \n if self.log_mlflow:\n self.log_parameters(self.config)\n mlflow.log_param('node', os.uname()[1])", "def setup(bot: Bot) -> None:\n bot.add_cog(Help(bot))", "def SetupEnvironment(self):\n pass", "def setup(self):\n\n if self.user is 'Daisy':\n import socket\n host = socket.gethostname()\n\n simName = self.name_prefix[:self.name_prefix.find('_')]\n\n if 'ursa' in host:\n self.raw_sim_dir = '/disk01/rad/sim/' + simName + '/' + self.feedback\n self.caesar_dir = '/disk01/rad/sim/' + simName + '/' + self.feedback + 'Groups/'\n self.redshiftFile = '/home/rad/gizmo-extra/outputs_boxspace50.info'\n self.d_data = '/home/dleung/Downloads/SIGAME_dev/sigame/temp/z' + str(int(self.zCloudy)) + '_data_files/'\n elif 'flatironinstitute.org' or 'worker' in host:\n self.raw_sim_dir = '/mnt/ceph/users/daisyleung/simba/sim/' + simName + '/' + self.feedback # dummy\n self.caesar_dir = '/mnt/ceph/users/daisyleung/simba/sim/' + simName + '/' + self.feedback + 'Groups/'\n self.redshiftFile = '/mnt/ceph/users/daisyleung/simba/gizmo-extra/outputs_boxspace50.info'\n self.d_data = '/mnt/home/daisyleung/Downloads/SIGAME_dev/sigame/temp/z' + str(int(self.zCloudy)) + '_data_files/'\n else:\n raise NotImplementedError", "def setUp(self):\n lang = self._sim_lang\n self._simulator = self._find_resource(\n f\"drake/examples/hardware_sim/hardware_sim_{lang}\")\n self._example_scenarios = self._find_resource(\n \"drake/examples/hardware_sim/example_scenarios.yaml\")\n self._test_scenarios = self._find_resource(\n \"drake/examples/hardware_sim/test/test_scenarios.yaml\")\n self._default_extra = {\n # For our smoke test, exit fairly quickly.\n \"simulation_duration\": 0.0625,\n }", "def setup(self, *args, **kwargs):\n return True", "def Setup(self):\n return True", "def setup(self):\n self.machine = Machine(['a', 'b', 'c', '_'])", "def setup(bot: Bot) -> None:\n bot.add_cog(VoiceGate(bot))", "def _setup(app_obj):", "def Setup(self):\n raise NotImplementedError(\n 'No runtime setup defined for %s' % self.__class__.__name__)", "def setup(self):\n self.log.debug('upm - in upm setup()')\n # Add resource setup code here", "def setup(self):\n super(__class__, self).setup()\n # construct command line call\n setup_script = '%s/tfMRI.py' % \\\n os.environ['ABCDTASKPREPDIR']\n arg1 = self.kwargs['path']\n arg2 = self.kwargs['sourcedata_root']\n arg3 = self.kwargs['subject']\n arg4 = self.kwargs['session']\n anat_metadata = self.config.get_bids('t1w_metadata')\n # get make/software information\n make = anat_metadata['Manufacturer']\n if make == 'GE':\n reg = re.compile(r'.*(DV2[56]).*')\n software_version = reg.match(anat_metadata[\n 'SoftwareVersions']).group(1)\n else:\n software_version = 'NA'\n cmd = ' '.join((setup_script, arg1, arg2, arg3, arg4, make,\n software_version))\n print(cmd)\n\n log_dir = self._get_log_dir()\n out_log = os.path.join(log_dir, self.__class__.__name__ + '_setup.out')\n err_log = os.path.join(log_dir, self.__class__.__name__ + '_setup.err')\n result = self.call(cmd, out_log, err_log)", "def setup(self):\n\n self.testInst = pysat.Instrument('pysat', 'testing2D_xarray',\n clean_level='clean')\n self.testInst.bounds = (dt.datetime(2008, 1, 1),\n dt.datetime(2008, 2, 1))\n self.dname = 'profiles'\n self.test_val_length = 15\n\n return", "def setup(self, registers):\n \"\"\" tasks before any generation functions are called \"\"\"\n pass", "def setup(self, run, run_id):\n\n raise NotImplementedError", "def setup(bot):\n bot.add_cog(Miniscape(bot))", "def setup(self):\n ### Set Names\n # Name of the pipeline reduction step\n self.name='sortobs'\n # Shortcut for pipeline reduction step and identifier for\n # saved file names.\n self.procname = 'RAW'\n # Set Logger for this pipe step\n self.log = logging.getLogger('pipe.step.%s' % self.name)\n ### Set Parameter list\n # Clear Parameter list\n self.paramlist = []\n # Append Parameters\n self.paramlist.append(['pattern', '(^.+_([gri]-band|oiii|sii|clear|h-alpha))',\n 'Regex pattern used to get name by matching name_filter'])\n # Confirm end of setup\n self.log.debug('Setup: done')", "def setup(self):\n raise NotImplementedError(\"Need to be implemented in subclasses\")", "def config (self):\n import wikicode\n class Config (wikicode.extension):\n def run (self):\n self.send_page (\"Generic DC Setup\")\n wikicode.run_extension (Config)", "def init():\n\n return \"Welcome to SIX SIGMA, this api is only available to SIX SIGMA developers\"", "def setups():\n setups = []\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F2 = dict()\n kotani2017_F2['name'] = 'kotani2017_F2'\n kotani2017_F2['piltemplate'] = kotani2017_F2_pil\n kotani2017_F2['pilparams'] = [None]\n kotani2017_F2['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F2['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=1'),\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=0.5'),\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=0.05')]\n kotani2017_F2['reporter'] = 'D'\n kotani2017_F2['exp_results'] = [(7733, 7.42), (11333, 6.18), (25533, 1.40)]\n setups.append(kotani2017_F2)\n\n\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F3 = dict()\n kotani2017_F3['name'] = 'kotani2017_F3'\n kotani2017_F3['piltemplate'] = kotani2017_F3_pil\n kotani2017_F3['pilparams'] = [None]\n kotani2017_F3['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F3['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.1'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.01'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.001')]\n kotani2017_F3['reporter'] = 'D'\n kotani2017_F3['exp_results'] = [(21220, 7.72), (64203, 3.12), (86996, 0.69)]\n setups.append(kotani2017_F3)\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F4 = dict()\n kotani2017_F4['name'] = 'kotani2017_F4'\n kotani2017_F4['piltemplate'] = kotani2017_F4_pil\n kotani2017_F4['pilparams'] = [None]\n kotani2017_F4['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F4['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.1'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.01'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.001'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0')]\n kotani2017_F4['reporter'] = 'D'\n kotani2017_F4['exp_results'] = [(6815, 6.06), (9004, 4.78), (10278, 4.03), (10795, 3.73)]\n setups.append(kotani2017_F4)\n\n return setups", "def __init__(self):\n self.label = \"Get SDM parameters\"\n self.description = \"This tool is used to view the Environment and SDM modeling parameters that have been set by the user. All of the values reported by this tool must be set to values specific to the model to be made. Using the ESRI default values will cause SDM to fail. If the Environment is not completely set, then an error message stating \\\"Improper SDM setup\\\" will occur. The successful running of this tool does not assure that the setup is correct; only that the default values have been changed. See the Environment Settings section of the Help file for Calculate Weights for the details.\"\n\n self.canRunInBackground = False\n self.category = \"Utilities\"", "def setup(self):\n self.testInst = pysat.Instrument('pysat', 'testing2D_xarray',\n clean_level='clean')\n self.testInst.bounds = (dt.datetime(2008, 1, 1),\n dt.datetime(2008, 2, 1))\n self.dname = 'variable_profiles'\n self.test_val_length = 15\n\n return", "def configure(self):", "def configure(self):", "def configure(self):", "def configure(self):", "def setup(bot: Red):\n bot.add_cog(Welcome(bot))", "def setup(self):\n raise NotImplemented", "def setup(self):\n \n # Define ui file to be used as a graphical interface\n # This file can be edited graphically with Qt Creator\n # sibling_path function allows python to find a file in the same folder\n # as this python module\n self.ui_filename = sibling_path(__file__, \"ant_watch_plot.ui\")\n \n #Load ui file and convert it to a live QWidget of the user interface\n self.ui = load_qt_ui_file(self.ui_filename)\n\n # Measurement Specific Settings\n # This setting allows the option to save data to an h5 data file during a run\n # All settings are automatically added to the Microscope user interface\n self.settings.New('save_video', dtype = bool, initial = False)\n self.settings.New('track_ant',dtype = bool, initial = False)\n self.settings.New('pixel_size', dtype = float, initial = 0.05547850208, ro = True)\n self.settings.New('binning', dtype = int, initial = 16, ro = True)\n self.settings.New('threshold', dtype = int, initial = 85, ro = False)\n self.settings.New('proportional', dtype = float, initial = 0.12, ro = False)\n self.settings.New('integral', dtype = float, initial = 0, ro = False)\n self.settings.New('derivative', dtype = float, initial = 0.05, ro = False)\n \n # x and y is for transmitting signal\n self.settings.New('x',dtype = float, initial = 32, ro = True, vmin = 0, vmax = 63.5)\n self.settings.New('y',dtype = float, initial = 32, ro = True, vmin = 0, vmax = 63.5)\n \n # Define how often to update display during a run\n self.display_update_period = 0.01\n \n \n # Convenient reference to the hardware used in the measurement\n self.track_cam = self.app.hardware['track_cam']\n self.wide_cam = self.app.hardware['wide_cam']\n self.recorder = self.app.hardware['flirrec']\n self.daqmotor = self.app.hardware['daqmotor']\n \n #setup experiment condition\n self.track_cam.settings.frame_rate.update_value(50)\n self.track_cam.read_from_hardware()", "def setUp(self):\n\n self.niceArgV = (\"--long Alpha -n Beta \"\n \"--shortless Gamma -f --myflag \"\n \"--myparam Tofu\").split()\n\n self.nice = WellBehaved()", "def setup(self):\n env_name = rospy.get_param('ros_gym/environment_name')\n max_episode_steps = rospy.get_param('ros_gym/max_episode_steps')\n self.task_env = self.register_env(env_name, max_episode_steps)\n\n self.agent = \\\n AgentBase.get_agent(rospy.get_param('~agent'), env=self.task_env)\n rospy.loginfo('Using agent of type: {}'.format(self.agent.name))\n\n # Set the logging system\n rospack = rospkg.RosPack()\n pkg_path = rospack.get_path('ros_gym')\n outdir = pkg_path + '/training_results'\n self.task_env = wrappers.Monitor(self.task_env, outdir, force=True)", "def setUp(self):\n self.sampler = {\n \"name\": \"samplername\",\n \"backend_name\": \"\",\n \"backend_header\": \"\",\n \"backend_prefix\": \"\",\n \"backend_suffix\": \"\",\n \"backend_footer\": \"\",\n \"ncores\": 2,\n \"threads_per_core\": 1,\n \"omp_enabled\": True,\n \"papi_enabled\": True,\n \"papi_counters_max\": 2,\n \"papi_counters_avail\": (\"C1\", \"C2\", \"C3\"),\n \"kernels\": {\"dgemm\": (\n 'dgemm', 'char*', 'char*', 'int*', 'int*', 'int*', 'double*',\n 'double*', 'int*', 'double*', 'int*', 'double*', 'float*',\n 'int*'\n )},\n \"nt_max\": random.randint(1, 10),\n \"exe\": \"x\"\n }\n self.i = Symbol(\"i\")\n self.j = Symbol(\"j\")\n self.k = Symbol(\"k\")\n self.ns = [random.randint(1, 100) for _ in range(5)]", "def setup(self):\n self.ae = None", "async def setup(self, ctx):\n pass", "def setup(self):\n\n self.insts = []\n self.testInst = pysat.Instrument('pysat', 'testing2D',\n clean_level='clean')\n self.testInst.bounds = (dt.datetime(2008, 1, 1),\n dt.datetime(2008, 1, 3))\n self.insts.append(self.testInst)\n self.insts.append(self.testInst)\n\n self.dname = 'series_profiles'\n self.test_vals = np.arange(50) * 1.2\n\n self.testC = pysat.Constellation(instruments=self.insts)\n\n return", "def setup(self, app_args):\n raise NotImplementedError", "def setup(bot):\n bot.add_cog(Session(bot))", "def setup(self):\n self.testInst = pysat.Instrument('pysat', 'testing2D',\n clean_level='clean')\n self.testInst.bounds = (dt.datetime(2008, 1, 1),\n dt.datetime(2008, 2, 1))\n self.dname = 'series_profiles'\n self.test_vals = np.arange(50) * 1.2\n\n return", "def setup(self):\n # define misfit function and adjoint source generator\n self.misfit = getattr(misfit, PAR.MISFIT)\n self.adjoint = getattr(adjoint, PAR.MISFIT)\n\n # define seismic data reader and writer\n self.reader = getattr(readers, PAR.READER)\n self.writer = getattr(writers, PAR.WRITER)\n\n # prepare channels list\n self.channels = []\n for char in PAR.CHANNELS:\n self.channels += [char]", "def setup(self):\n self.log.debug('RFSwitch - in RFSwitch setup()')\n # Add resource setup code here\n print(\"Calling RFSwitch:setup\")" ]
[ "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6309029", "0.6301397", "0.6280084", "0.62563354", "0.6255936", "0.6254392", "0.62412417", "0.6226122", "0.6226122", "0.6226122", "0.6226122", "0.6211043", "0.6092844", "0.6092844", "0.6092844", "0.6092844", "0.60887855", "0.6072536", "0.60481685", "0.60481685", "0.60481685", "0.6027901", "0.59710795", "0.59710795", "0.594678", "0.59327334", "0.59327334", "0.590857", "0.5900024", "0.5900024", "0.5900024", "0.5900024", "0.5900024", "0.5888943", "0.5883497", "0.58645135", "0.5853421", "0.58309644", "0.5743328", "0.57386583", "0.5695076", "0.56922555", "0.568683", "0.56477123", "0.56318593", "0.5595929", "0.55615574", "0.5559562", "0.55465704", "0.5542546", "0.5536642", "0.5526248", "0.5521963", "0.55142236", "0.55005157", "0.55004907", "0.5480602", "0.54739803", "0.5466162", "0.5460451", "0.5457194", "0.54565465", "0.5455007", "0.5451243", "0.54417694", "0.54380345", "0.54363555", "0.5420823", "0.54123545", "0.5389246", "0.53889674", "0.53877455", "0.53877455", "0.53877455", "0.53877455", "0.53865206", "0.53865176", "0.53808963", "0.5380804", "0.5373419", "0.5368176", "0.5363429", "0.5348206", "0.5341226", "0.53347284", "0.5334573", "0.53303355", "0.53116655", "0.53114647" ]
0.0
-1
describe the CCD class
def __init__(self, exposure_time, img_acq_rate, EM_gain, name='iXon Ultra 897', img_acq_type='emcdd', darkfield=None, binning=None, vertical_pixel_shift_speed=0.5e-6, horizontal_pixel_shift_speed=0.1e-6, horizontal_pixel_shift_rate_bits=14, frame_transfer=True, crop_mode=False, acquisition_mode='kinetic', triggering='internal', readout_mode='image', pixels=512, pixel_size=16e-6): self.name = name self.img_acq_type = img_acq_type self.exposure_time = exposure_time self.img_acq_rate = img_acq_rate self.em_gain = EM_gain self.darkfield = darkfield self.binning = binning # supporting camera acquisition settings self.vpss = vertical_pixel_shift_speed self.hpss = horizontal_pixel_shift_speed self.hpss_bits = horizontal_pixel_shift_rate_bits self.frame_transfer = frame_transfer self.crop_mode = crop_mode self.acquisition_mode = acquisition_mode self.triggering = triggering self.readout_mode = readout_mode if isinstance(pixels, int): self.pixels = (pixels, pixels) else: self.pixels = pixels self.pixel_size = pixel_size self.image_area = (self.pixels[0]*pixel_size, self.pixels[1]*pixel_size)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe(self) -> str:", "def describe(self):\n raise NotImplementedError()", "def describe(self):\n raise NotImplementedError()", "def description(self):\n return '%s and a CD' % self.component.description()", "def describe(self):\n print(self.description)", "def describe(self):\n print(self.description)", "def describe():", "def describe(self) -> str:\n return self.__class__.__name__", "def description(self):", "def detailedInfo(cls):\n return 'tbd'", "def detailedInfo(cls):\n return 'tbd'", "def describe(self):\n return str(self)", "def description(self):\n pass", "def description(self):\n pass", "def get_description():\n desc = {\"description\": __doc__, \"data\": True, \"cache\": 600}\n today = datetime.date.today()\n desc[\"arguments\"] = [\n dict(\n type=\"csector\",\n name=\"csector\",\n default=\"IA\",\n label=\"Select state/sector to plot\",\n ),\n dict(\n type=\"date\",\n name=\"sdate\",\n default=f\"{today.year}/01/01\",\n label=\"Start Date:\",\n min=\"2000/01/04\",\n max=today.strftime(\"%Y/%m/%d\"),\n ),\n dict(\n type=\"date\",\n name=\"edate\",\n default=today.strftime(\"%Y/%m/%d\"),\n label=\"End Date:\",\n min=\"2000/01/04\",\n max=today.strftime(\"%Y/%m/%d\"),\n ),\n dict(\n type=\"select\",\n name=\"d\",\n default=\"0\",\n options=PDICT,\n label=\"Select Drought Classification (at and above counted):\",\n ),\n dict(\n type=\"select\",\n name=\"w\",\n default=\"percent\",\n options=PDICT2,\n label=\"How to express time for plot:\",\n ),\n dict(type=\"cmap\", name=\"cmap\", default=\"plasma\", label=\"Color Ramp:\"),\n ]\n return desc", "def describe(self):\r\n print( self.name + \" is here!\" )\r\n print( self.description )", "def describe(self):\n\n print(\"Correlation length: {0}\".format(self.cl))\n print(\"icoordchange: {0}\".format(self.icoordchange))\n print(\"ispec: {0}\".format(self.ispec))\n print(\"ireg: {0}\".format(self.ireg))\n print(\"Domain: x-axis: from {0} to {1} with {2} steps of {3}\".format(self.xori, self.xend,\n self.nx, self.dx))\n print(\"Domain: y-axis: from {0} to {1} with {2} steps of {3}\".format(self.yori, self.yend,\n self.ny, self.dy))\n print(\"Exclusion value: {0}\".format(self.valex))\n print(\"Signal-to-noise ratio: {0}\".format(self.snr))\n print(\"Variance of the background field: {0}\".format(self.varbak))", "def describe(self):\n return ''", "def __description__(self):\r\n return id(self)", "def desc(self):\n raise NotImplementedError(\"Subclass must implement abstract method\")", "def description(cls) -> str:\n\n return cls.__doc__ or \"\"", "def _describe(self) -> Dict[str, Any]:", "def describe(self) -> Text:\n return self.__repr__()", "def describe(self):\n response = check_defined(self, inspect.stack()[0][3])\n if not response:\n return response\n class_info = {'properties': self.list_properties(class_specific=False),\n 'description': self.description,\n 'uri': self.uri,\n 'label': self.label,\n 'displayName':self.displayName,\n 'curie': self.name,\n 'used_by': self.used_by(),\n 'child_classes': self.child_classes,\n 'parent_classes': self.parent_classes,\n 'ancestor_classes': self.ancestor_classes,\n 'descendant_classes': self.descendant_classes,\n 'validation': self.validation}\n return class_info", "def method_description(self):\n pass", "def get_coulomb_info(self):\n return", "def description(self):\n return '%s and a disk' % self.component.description()", "def description(cls):\n return \"QDMTK Provider\"", "def details(self):\n raise NotImplementedError()", "def describe(self):\n return \"The method describe() is not implemented\"", "def inspect(self):\n print(cf_inspect(self))", "def description(self) -> str:\n pass", "def description():", "def description(self) -> str:\r\n raise NotImplementedError", "def description(self) -> str:\r\n raise NotImplementedError", "def description(self) -> str:\r\n raise NotImplementedError", "def test_class_doc(self):\n self.assertTrue(len(City.__doc__) > 0)", "def description(self) -> str:\n raise NotImplementedError", "def description(self) -> str:\n raise NotImplementedError", "def description(self) -> str:\n raise NotImplementedError", "def get_description():\n desc = {\"description\": __doc__, \"data\": True}\n desc[\"arguments\"] = [\n dict(\n type=\"station\",\n name=\"station\",\n default=\"IATDSM\",\n label=\"Select Station:\",\n network=\"IACLIMATE\",\n ),\n dict(\n type=\"select\",\n name=\"var\",\n default=\"spi\",\n options=PDICT,\n label=\"Select which metric to plot:\",\n ),\n dict(\n type=\"select\",\n name=\"c\",\n default=\"ncei91\",\n options=PDICT2,\n label=\"Which climatology to use for averages:\",\n ),\n dict(\n type=\"int\",\n name=\"days\",\n default=90,\n label=\"Over how many trailing days to compute the metric?\",\n ),\n ]\n return desc", "def get_description(cls) -> str:\n return cls.__doc__ or \"\"", "def testDesc(self):\n\n self.assertTrue(\n hasattr(self.cd, 'desc')\n )\n\n self.assertEqual(\n [],\n self.cc.desc\n )", "def Description(self) -> str:", "def Description(self) -> str:", "def details(self):\n pass", "def get_description(self):", "def test_ccds(self):\n #TODO write ccds tests", "def describe_carver(self) -> str:\r\n description = f\"Model: {self.name_carver}\\n\" \\\r\n f\"Klasse: Caver\\n\" \\\r\n f\"Marke: {self.brand}\\n\" \\\r\n f\"Carver-Typ: {self.typ}\\n\" \\\r\n f\"Länge: {self.length} cm.\"\r\n return description", "def show_class_details(name, f):\n print '%s:' % name\n print '\\tobject:', f\n print '\\t__name__:', \n try:\n print f.__name__\n except AttributeError:\n print '(no __name__)'\n print '\\t__doc__', repr(f.__doc__)\n return", "def description(self):\n d = self.definition\n if isinstance(d, evaluate.InstanceElement):\n d = d.var\n if isinstance(d, evaluate.parsing.Name):\n d = d.parent\n\n if isinstance(d, evaluate.Array):\n d = 'class ' + d.type\n elif isinstance(d, (parsing.Class, evaluate.Class, evaluate.Instance)):\n d = 'class ' + str(d.name)\n elif isinstance(d, (evaluate.Function, evaluate.parsing.Function)):\n d = 'def ' + str(d.name)\n elif isinstance(d, evaluate.parsing.Module):\n # only show module name\n d = 'module %s' % self.module_name\n elif self.is_keyword:\n d = 'keyword %s' % d.name\n else:\n d = d.get_code().replace('\\n', '')\n return d", "def info(self):", "def info(self):", "def describe(self):\n\n ret = []\n ret.append(\"Functional ID: %s\" % self._number)\n ret.append(\"Functional Name: %s\" % self._xc_func_name)\n ret.append(\"Attributes:\")\n ret.append(\" Name: %s\" % self._name)\n ret.append(\" Kind: %d\" % self._kind)\n ret.append(\" Family: %d\" % self._family)\n ret.append(\"Citations:\")\n for x in self._refs:\n ret.append(\" \" + x)\n\n return \"\\n\".join(ret)", "def c(self):\n pass", "def c(self):\n pass", "def get_description(self):\n raise NotImplementedError", "def get_description(self):\n pass", "def __repr__(cls):\n return cls.__name__", "def _get_desc(self):\n return self.__desc", "def get_description():\n desc = {\"description\": __doc__}\n sts = utc() - timedelta(hours=26)\n ets = utc() - timedelta(hours=2)\n desc[\"arguments\"] = [\n {\n \"type\": \"datetime\",\n \"name\": \"sts\",\n \"default\": sts.strftime(\"%Y/%m/%d %H00\"),\n \"label\": \"Start Timestamp (UTC):\",\n \"min\": \"1986/01/01 0000\",\n },\n {\n \"type\": \"datetime\",\n \"name\": \"ets\",\n \"default\": ets.strftime(\"%Y/%m/%d %H00\"),\n \"label\": (\n \"End Timestamp [inclusive] (UTC), \"\n \"interval must be less than 4 days\"\n ),\n \"min\": \"1986/01/01 0000\",\n },\n {\n \"type\": \"select\",\n \"options\": PDICT,\n \"default\": \"min\",\n \"name\": \"w\",\n \"label\": \"Which statistic to compute\",\n },\n {\n \"type\": \"csector\",\n \"name\": \"csector\",\n \"default\": \"IA\",\n \"label\": \"Select state/sector\",\n },\n {\n \"type\": \"select\",\n \"options\": PDICT2,\n \"default\": \"user\",\n \"label\": \"Plotting mode (user defined color-ramp or freezing)\",\n \"name\": \"mode\",\n },\n {\n \"type\": \"cmap\",\n \"name\": \"cmap\",\n \"default\": \"gnuplot2\",\n \"label\": \"Color Ramp:\",\n },\n ]\n return desc", "def __str__(self):\n return self.__class__.__name__ + '\\n' + self.__class__.__doc__", "def __repr__(self):\n doc_string = \"# %s class description:\\n%s\\n# Instance attributes:\\n\" % (self.__class__, self.__doc__)\n # write each argument with its value\n properties = dir(self)\n for elem in properties:\n if not elem.startswith(\"_\"):\n doc_string += \"\\t%s:%s\\n\" % (elem, self.__getattribute__(elem))\n return doc_string", "def test_doc_class(self):\n expected = 'City class handles all application cities'\n actual = City.__doc__\n self.assertEqual(expected, actual)", "def explain(self):", "def _get_description_diagnostics(self) -> ExpectationDescriptionDiagnostics:\n\n camel_name = self.__class__.__name__\n snake_name = camel_to_snake(self.__class__.__name__)\n docstring, short_description = self._get_docstring_and_short_description()\n\n return ExpectationDescriptionDiagnostics(\n **{\n \"camel_name\": camel_name,\n \"snake_name\": snake_name,\n \"short_description\": short_description,\n \"docstring\": docstring,\n }\n )", "def __str__(self):\n return f\"{self._desc:16s}\"", "def __repr__(self):\n return self.description", "def descriptorType(self): # real signature unknown; restored from __doc__\n pass", "def getDescription(self):\n raise NotImplementedError", "def n_cs(self):\n pass", "def __init__(self, type, objective, illumination, ccd):\n self.type = type # e.g. Olympus iX73\n self.objective = objective\n self.illumination = illumination\n self.ccd = ccd", "def _description(self):\n return None", "def describe(self):\n print(\"Number of nodes: {0}\".format(self.nnodes))\n print(\"Number of interfaces: {0}\".format(self.ninterfaces))\n print(\"Number of elements: {0}\".format(self.nelements))", "def get_description():\n raise NotImplementedError", "def test_docstring(self):\n self.assertTrue(len(City.__doc__) > 1)\n self.assertTrue(len(City.__init__.__doc__) > 1)\n self.assertTrue(len(City.__str__.__doc__) > 1)\n self.assertTrue(len(City.save.__doc__) > 1)\n self.assertTrue(len(City.to_dict.__doc__) > 1)", "def describe(self):\r\n mdataset_description = {\r\n 'kind': \"HConteiner\",\r\n 'compliance': self._compliance,\r\n 'has_encryption': self.has_encryption,\r\n 'encryption': self._encryption,\r\n 'sensitive': self._sensitive,\r\n 'license': self._license,\r\n }\r\n verbose_event()\r\n return mdataset_description", "def get_description():\n desc = {\"description\": __doc__, \"data\": True}\n desc[\"arguments\"] = [\n dict(\n type=\"station\",\n name=\"station\",\n default=\"IATDSM\",\n label=\"Select Station\",\n network=\"IACLIMATE\",\n ),\n dict(\n type=\"select\",\n name=\"var\",\n default=\"precip\",\n label=\"Which Variable:\",\n options=PDICT,\n ),\n ]\n return desc", "def define_description(self):\n self._description = 'NODDI-based processing of DWI datasets.'", "def check_from_class(self):\n context = TestContext(session_context=ducktape_mock.session_context(),\n cls=DummyTest, function=DummyTest.test_class_description)\n assert context.description == \"class description\"", "def get_description(self):\r\n return self.__description", "def test():\n\n # get the measure trait class\n from p2.traits.Measure import Measure as measure\n\n\n # a client\n class Component:\n \"\"\"\n Simple class with a measure\n \"\"\"\n\n # declare a measure\n attr = measure()\n\n\n # get the attribute; careful not to trigger the descriptor behavior\n attr = Component.__dict__[\"attr\"]\n # verify it's a measure\n assert isinstance(attr, measure)\n # verify it has the right category name\n assert attr.category == \"measure\"\n # and that the trait predicates have the right values\n assert attr.isBehavior == False\n assert attr.isDerivation == False\n assert attr.isMeasure == True\n assert attr.isProperty == False\n assert attr.isFacility == False\n # all done\n return attr", "def help_show(self):\n print(\"print an instance based on the class name and id\")", "def __str__(self):\n return (self._name + \", \" + self._type + \" in \" + self._const)", "def type(cls):", "def info(self):\n return 'an instance of {cls}'.format(cls=self.instance_class.__name__)", "def __str__(self):\r\n \r\n for att in self.__dict__.keys():\r\n print '%s: %r' % (att, getattr(self, att))\r\n \r\n return 'Completeness class object attributes'", "def get_cdt(self):\n return None if self.is_raw() else self.structure.compounddatatype", "def get_description(self) -> str:\n pass", "def attributes(self):", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def describe(self, *args, **kwargs):\n\t\treturn self.data.describe(*args, **kwargs)", "def type(self):\n pass", "def type(self):\n pass", "def type(self):\n pass", "def info(self) -> dict:", "def description(self):\n return 'computer'", "def describe(self):\n self.separator()\n print('File Name: ' + self.file_name)\n print('File create date: {}'.format(self.file_header['Creation Date']))\n print('Batch Count: ' + str(self.file_control_record.get('Batch Count')))\n print('Total Debit Amount: ' +\n str(self.file_control_record.get('Total Debit Amount')))\n print(\"Total Credit Amount: \" +\n str(self.file_control_record.get(\"Total Credit Amount\")))\n self.separator()", "def describe(self):\n\n print(json.dumps({\n \"component_type\": self.COMPONENT_TYPE,\n \"output_fields\": _expand_output_fields(self.OUTPUT_FIELDS),\n \"options\": _serialize(self.OPTIONS)}))", "def testDesc(self):\n\n self.assertTrue(\n hasattr(self.cc, 'desc')\n )\n\n self.assertEqual(\n [],\n self.cc.desc\n )" ]
[ "0.6843358", "0.68146247", "0.68146247", "0.671449", "0.66597164", "0.66597164", "0.6572881", "0.6530383", "0.6361396", "0.6335927", "0.6335927", "0.6249203", "0.6237307", "0.6237307", "0.6230096", "0.6198816", "0.61736053", "0.6132944", "0.6123131", "0.6112898", "0.6087091", "0.60717195", "0.6062378", "0.6049518", "0.6034581", "0.60342115", "0.6009802", "0.6006704", "0.60066855", "0.59919596", "0.5978337", "0.59498", "0.5944143", "0.593333", "0.593333", "0.593333", "0.5895217", "0.58837616", "0.58837616", "0.58837616", "0.5852587", "0.5849905", "0.58259296", "0.58239096", "0.58239096", "0.58188295", "0.5817157", "0.5807138", "0.5802207", "0.5768449", "0.57606554", "0.5752631", "0.5752631", "0.5750003", "0.57394797", "0.57394797", "0.57329124", "0.5724363", "0.57147986", "0.5710615", "0.5701125", "0.5691647", "0.5686623", "0.5674501", "0.56608504", "0.56511", "0.5644922", "0.563866", "0.563769", "0.5636376", "0.5625269", "0.56203276", "0.56180614", "0.5612583", "0.5608164", "0.5607232", "0.55883026", "0.55397755", "0.55394894", "0.5511744", "0.5509874", "0.548514", "0.5475316", "0.5465765", "0.5464272", "0.54629904", "0.5462889", "0.5453456", "0.5453087", "0.54500836", "0.5446446", "0.5446446", "0.544433", "0.54431224", "0.54431224", "0.54431224", "0.54416627", "0.5428037", "0.54273164", "0.5426674", "0.54122365" ]
0.0
-1
this class holds images for the microgrid and performs pixel to micron scaling calculations
def __init__(self, gridPath=None, center_to_center_spacing=None, feature_width=None, grid_type='grid', show_grid=False): if gridPath is not None: self.gridPath = gridPath self.spacing = center_to_center_spacing self.width = feature_width self.grid_type = grid_type # find files in directory file_list = glob.glob(join(self.gridPath, 'grid*.tif')) if len(file_list) < 1: raise ValueError("No grid*.tif files found in {}".format(self.gridPath)) img_grid = np.zeros(shape=(512,512)) for f in file_list: img = io.imread(f, plugin='tifffile') if len(np.shape(img)) > 2: img = np.mean(img, axis=0) img_grid += img img_grid = img_grid / len(file_list) self.img_grid = img_grid if show_grid is True: fig, ax = plt.subplots() ax.imshow(img_grid, cmap='gray') ax.set_xlabel('pixels') ax.set_ylabel('pixels') plt.title('grid: 10 um Lines; 50 um Spacing') plt.show()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run(self):\n #calculate platescale of first input image\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.cd)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.pc)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n pscale = self.datain[0].header['PIXSCAL']\n #filtering out images which are too far away from the others\n #passing images added to a list of (image, WCS) tuples\n '''\n image_centers = []\n for f in self.datain:\n image_centers.append((f.header['CRVAL1'], f.header['CRVAL2']))\n filtered_datain = []\n dist_list = [[[0]*(len(image_centers)-1)]*len(image_centers)]\n for i in range(len(image_centers)):\n for j in range(len(image_centers)-1):\n dist_list[i][j+1] = np.sqrt((image_)**2+()**2)\n '''\n #calculations necessary for updating wcs information\n px = []\n py = []\n \n #in order to avoid NaN interactions, creating weight map\n weights=[]\n for f in self.datain:\n weights.append((np.where(np.isnan(f.image) == True, 0, 1)))\n \n for f in self.datain:\n px.extend(wcs.WCS(f.header).calc_footprint()[:,0])\n py.extend(wcs.WCS(f.header).calc_footprint()[:,1])\n x0 = (max(px)+min(px))/2.\n y0 = (max(py)+min(py))/2.\n sx = (max(px)-min(px))*np.cos(y0/180*np.pi) # arcsec\n sy = (max(py)-min(py)) # arcsec\n size = (sx*3600+self.getarg('pad')*2, sy*3600+self.getarg('pad')*2)\n xpix = size[0]//pscale\n ypix = size[1]//pscale\n cdelt = [pscale/3600.]*2\n \n #create self.dataout and give it a copy of an input's header\n self.dataout = DataFits(config = self.config)\n self.dataout.header = self.datain[0].header.copy()\n \n #update header wcs information\n self.log.info('Creating new WCS header')\n \n self.dataout.header['CRPIX1'] = xpix/2\n self.dataout.header['CRPIX2'] = ypix/2\n self.dataout.header['CRVAL1'] = x0\n self.dataout.header['CRVAL2'] = y0\n self.dataout.header['CD1_1'] = -cdelt[0]\n self.dataout.header['CD1_2'] = self.dataout.header['CD2_1'] = 0.\n self.dataout.header['CD2_2'] = cdelt[1]\n self.dataout.header['NAXIS1'] = int(xpix)\n self.dataout.header['NAXIS2'] = int(ypix)\n self.dataout.header['CTYPE1'] = 'RA---TAN-SIP'\n self.dataout.header['CTYPE2'] = 'DEC--TAN-SIP'\n self.dataout.header['RADESYS'] = 'ICRS'\n self.dataout.header['EQUINOX'] = 2000\n self.dataout.header['LATPOLE'] = self.datain[0].header['CRVAL2']\n self.dataout.header['LONPOLE'] = 180\n self.dataout.header['PIXASEC'] = pscale\n \n theta_rad = np.deg2rad(self.getarg('outangle'))\n rot_matrix = np.array([[np.cos(theta_rad), -np.sin(theta_rad)], \n [np.sin(theta_rad), np.cos(theta_rad)]])\n rot_cd = np.dot(rot_matrix, np.array([[self.dataout.header['CD1_1'], 0.],[0., self.dataout.header['CD2_2']]]))\n for i in [0,1]:\n for j in [0,1]:\n self.dataout.header['CD{0:d}_{1:d}'.format(i+1, j+1)] = rot_cd[i,j]\n \n #check drizzle arguments\n if self.getarg('kernel') == 'smoothing':\n kernel = 'lanczos3'\n elif self.getarg('kernel') in ['square', 'point', 'gaussian', 'tophat']:\n kernel = self.getarg('kernel')\n else:\n self.log.error('Kernel name not recognized, using default')\n kernel = 'square'\n if self.getarg('drizzleweights') == 'uniform':\n driz_wt = ''\n elif self.getarg('drizzleweights') in ['exptime', 'expsq']:\n driz_wt = self.getarg('drizzleweights')\n else:\n self.log.error('Drizzle weighting not recognized, using default')\n driz_wt = ''\n \n #create drizzle object and add input images\n fullwcs = wcs.WCS(self.dataout.header)\n self.log.info('Starting drizzle')\n driz = drz.Drizzle(outwcs = fullwcs, pixfrac=self.getarg('pixfrac'), \\\n kernel=kernel, fillval='10000', wt_scl=driz_wt)\n for i,f in enumerate(self.datain):\n self.log.info('Adding %s to drizzle stack' % f.filename)\n driz.add_image(f.imgdata[0], wcs.WCS(f.header), inwht=weights[i])\n \n try:\n fillval=float(self.getarg('fillval'))\n except:\n fillval=np.nan\n self.log.error('Fillvalue not recognized or missing, using default')\n \n #creates output fits file from drizzle output\n self.dataout.imageset(np.where(driz.outsci == 10000, fillval, driz.outsci))\n self.dataout.imageset(driz.outwht,'OutWeight', self.dataout.header)\n self.dataout.filename = self.datain[0].filename\n\n #add history\n self.dataout.setheadval('HISTORY','Coadd: %d files combined with %s kernel, pixfrac %f at %f times resolution' \\\n % (len(self.datain), kernel, self.getarg('pixfrac'), self.getarg('resolution')))", "def reScaleLandsat(self,img):\n \n\t\tthermalBand = ee.List(['thermal'])\n\t\tthermal = ee.Image(img).select(thermalBand).multiply(10)\n \n\t\totherBands = ee.Image(img).bandNames().removeAll(thermalBand)\n\t\tscaled = ee.Image(img).select(otherBands).divide(0.0001)\n \n\t\timage = ee.Image(scaled.addBands(thermal)).int16()\n \n\t\treturn image.copyProperties(img)", "def scale(self):", "def _prepare_image(self, grid):\n grid = np.array(grid, dtype=np.uint8)\n\n width = int(grid.shape[1] * self.scale_percent)\n height = int(grid.shape[0] * self.scale_percent)\n grid = cv2.resize(grid, (width, height), interpolation=cv2.INTER_AREA)\n return grid", "def process(self, image):", "def __init__(self, data, pixscale = 7.77/43):\n self.data = data\n self.pixscale = pixscale", "def __getitem__(self, index):\n A_path = self.A_paths[index % self.A_size] # make sure index is within then range\n #if self.opt.serial_batches: # make sure index is within then range\n \n\n A_img = Image.open(A_path).convert('L')\n \n A = self.transform_A(A_img)\n # B20 = self.transform_B(B20_img)\n #B2 = self.transform_B(B2_img)\n\n\n index_B50 = index % self.B50_size\n B50_path = self.B50_paths[index_B50]\n B50_img = Image.open(B50_path).convert('L')\n B50 = self.transform_B(B50_img)\n\n\n\n index_B100 = index % self.B100_size\n B100_path = self.B100_paths[index_B100]\n B100_img = Image.open(B100_path).convert('L')\n B100 = self.transform_B(B100_img)\n\n index_B150 = index % self.B150_size\n B150_path = self.B150_paths[index_B150]\n B150_img = Image.open(B150_path).convert('L')\n B150 = self.transform_B(B150_img)\n\n\n \n\n index_m0 = 0\n m0_path = self.m0_paths[index_m0]\n m0_img = Image.open(m0_path).convert('L')\n m0 = self.transform_B(m0_img)\n \n index_m50 = 0\n m50_path = self.m50_paths[index_m50]\n m50_img = Image.open(m50_path).convert('L')\n m50 = self.transform_B(m50_img)\n\n index_m100 = 0\n m100_path = self.m100_paths[index_m100]\n m100_img = Image.open(m100_path).convert('L')\n m100 = self.transform_B(m100_img)\n\n index_m150 = 0\n m150_path = self.m150_paths[index_m150]\n m150_img = Image.open(m150_path).convert('L')\n m150 = self.transform_B(m150_img)\n\n\n\n return {'A': A, 'B50': B50,'B100': B100, 'B150': B150, 'A_paths': A_path, 'B50_paths': B50_path,'B100_paths': B100_path, 'B150_paths': B150_path, 'm0':m0, 'm50':m50,'m100':m100, 'm150':m150}", "def make_layers(self):\n w, h = self.image.get_size()\n shrink = pg.transform.smoothscale(self.image, (w//2, h//2))\n self.mid_image = tools.tile_surface((w,h), shrink, True)\n shrink = pg.transform.smoothscale(self.image, (w//4, h//4))\n self.base = tools.tile_surface(prepare.SCREEN_SIZE, shrink, True)", "def scaleLandsat(self,img):\n\t\tthermal = img.select(ee.List(['thermal'])).multiply(0.1)\n\t\tscaled = ee.Image(img).select(self.env.divideBands).multiply(ee.Number(0.0001))\n\t\t\n\t\treturn img.select([]).addBands(scaled).addBands(thermal)", "def compute_img(self):\r\n self.load_img()\r\n self.check_shape()\r\n self.convert_img()\r\n self.img_computed = True", "def main():\n test_image = load_image()\n\n pixelate_image(\n normalize_image(test_image)\n )\n pass", "def Rescale(self):\r\n picWidth,picHeight = self.oldSize = self.GetSizeTuple()\r\n bitmap = self.scaled = self.bitmap\r\n if not bitmap: return\r\n imgWidth,imgHeight = bitmap.GetWidth(),bitmap.GetHeight()\r\n if self.scaling == 2 or (self.scaling == 1 and (imgWidth > picWidth or imgHeight > picHeight)):\r\n image = bitmap.ConvertToImage()\r\n factor = min(1.0*picWidth/imgWidth,1.0*picHeight/imgHeight)\r\n newWidth,newHeight = int(factor*imgWidth),int(factor*imgHeight)\r\n self.scaled = image.Scale(newWidth,newHeight).ConvertToBitmap()\r\n #self.scaled = image.Scale(newWidth,newHeight,wx.IMAGE_QUALITY_HIGH ).ConvertToBitmap()\r", "def _image(self):\n print(\"imaging\")\n self.images.append(self.device_control.image())\n yield", "def rasterize(self):\n\n for primitive in self._scene:\n bbox = primitive[\"bounding_box\"]\n # Loop through all pixels\n # You MUST use bounding boxes in order to speed up this loop\n for w in range(bbox[0][0], bbox[1][0]):\n x = w + 0.5\n for h in range(bbox[0][1], bbox[1][1]):\n y = h + 0.5\n # First, we check if the pixel center is inside the primitive\n im_x, im_y = w, self._height - (h + 1)\n if inside(x, y, primitive):\n # apply affine xfrom if needed\n if \"xform\" in primitive.keys():\n result = np.matmul(primitive[\"xform\"],\n [[im_x], [im_y], [1]])\n im_x, im_y = int(result[0][0]), int(result[1][0])\n\n self._image[im_y, im_x] = primitive[\"color\"]\n # break\n # break\n # break", "def run(self):\n self.run_tasks()\n self.images = np.array(self.images)\n self.shapes.extend(self.images.shape[-2:])\n\n self.images = np.reshape(self.images, self.shapes)", "def _scale(self, image):\n\n if image.GetWidth() != self._width or image.GetHeight()!= self._height:\n image.Rescale(self._width, self._height)\n \n return image", "def __init__(self,scale):\n self.scale = scale", "def scale_image(self, pixels, size):\n x_min, x_max = np.amin(pixels[:,0]), np.amax(pixels[:,0])\n y_min, y_max = np.amin(pixels[:,1]), np.amax(pixels[:,1])\n z_min, z_max = np.amin(pixels[:,2]), np.amax(pixels[:,2])\n \n pixels[:,0] -= x_min \n pixels[:,1] -= y_min\n pixels[:,2] -= z_min\n \n x_max -= x_min\n y_max -= y_min\n z_max -= z_min\n \n scale_factor = size / max(x_max, y_max, z_max) \n # All points are now between [0..max]\n\n pixels *= scale_factor\n return pixels", "def generate_images_pred(self, inputs, outputs):\n for scale in self.scales:\n disp = outputs[(\"disp\", scale)]\n disp = F.interpolate(\n disp, [self.height, self.width], mode=\"bilinear\", align_corners=False)\n source_scale = 0\n\n _, depth = disp_to_depth(disp, self.min_depth, self.max_depth)\n\n outputs[(\"depth\", 0, scale)] = depth\n\n for i, frame_id in enumerate(self.frame_ids[1:]):\n\n T = outputs[(\"cam_T_cam\", 0, frame_id)]\n\n # from the authors of https://arxiv.org/abs/1712.00175\n # mean-normalized inverse depth from [62] to discourage shrinking of the estimated depth\n\n axisangle = outputs[(\"axisangle\", 0, frame_id)]\n translation = outputs[(\"translation\", 0, frame_id)]\n\n inv_depth = 1 / depth\n mean_inv_depth = inv_depth.mean(3, True).mean(2, True)\n\n T = transformation_from_parameters(\n axisangle[:, 0], translation[:, 0] * mean_inv_depth[:, 0], frame_id < 0)\n\n cam_points = self.backproject_depth[source_scale](\n depth, inputs[(\"inv_K\", source_scale)])\n pix_coords = self.project_3d[source_scale](\n cam_points, inputs[(\"K\", source_scale)], T)\n\n outputs[(\"sample\", frame_id, scale)] = pix_coords\n\n outputs[(\"color\", frame_id, scale)] = F.grid_sample(\n inputs[(\"color\", frame_id, source_scale)],\n outputs[(\"sample\", frame_id, scale)],\n padding_mode=\"border\")\n\n outputs[(\"color_identity\", frame_id, scale)] = \\\n inputs[(\"color\", frame_id, source_scale)]", "def process_image(img):\n img[0] = img[0] * 0.229\n img[1] = img[1] * 0.224\n img[2] = img[2] * 0.225\n img[0] += 0.485\n img[1] += 0.456\n img[2] += 0.406\n\n return img.cpu().numpy().transpose((1, 2, 0))", "def mask_the_images(working_path,set_name):\n\n file_list=glob('/media/talhassid/My Passport/haimTal/test_images_0b8afe447b5f1a2c405f41cf2fb1198e.npy')\n out_images = [] #final set of images for all patients\n for fname in file_list:\n out_images_per_patient = []\n print (\"working on file \", fname)\n imgs_to_process = np.load(fname.replace(\"lungmask\",\"images\")) # images of one patient\n masks = np.load(fname)\n for i in range(len(imgs_to_process)):\n mask = masks[i]\n img = imgs_to_process[i]\n new_size = [512,512] # we're scaling back up to the original size of the image\n img= mask*img # apply lung mask\n #\n # renormalizing the masked image (in the mask region)\n #\n new_mean = np.mean(img[mask>0])\n new_std = np.std(img[mask>0])\n #\n # Pulling the background color up to the lower end\n # of the pixel range for the lungs\n #\n old_min = np.min(img) # background color\n img[img==old_min] = new_mean-1.2*new_std # resetting backgound color\n img = img-new_mean\n img = img/new_std\n #make image bounding box (min row, min col, max row, max col)\n labels = measure.label(mask)\n regions = measure.regionprops(labels)\n #\n # Finding the global min and max row over all regions\n #\n min_row = 512\n max_row = 0\n min_col = 512\n max_col = 0\n for prop in regions:\n B = prop.bbox\n if min_row > B[0]:\n min_row = B[0]\n if min_col > B[1]:\n min_col = B[1]\n if max_row < B[2]:\n max_row = B[2]\n if max_col < B[3]:\n max_col = B[3]\n width = max_col-min_col\n height = max_row - min_row\n if width > height:\n max_row=min_row+width\n else:\n max_col = min_col+height\n #\n # cropping the image down to the bounding box for all regions\n # (there's probably an skimage command that can do this in one line)\n #\n img = img[min_row:max_row,min_col:max_col]\n mask = mask[min_row:max_row,min_col:max_col]\n if max_row-min_row <5 or max_col-min_col<5: # skipping all images with no god regions\n pass\n else:\n # moving range to -1 to 1 to accomodate the resize function\n mean = np.mean(img)\n img = img - mean\n min = np.min(img)\n max = np.max(img)\n img = img/(max-min)\n new_img = resize(img,[512,512], mode='constant')\n out_images_per_patient.append(new_img)\n\n id = re.sub(r'.*_images_(.*)\\.npy',r'\\1',fname)\n patient_images_and_id = (out_images_per_patient,id)\n out_images.append(patient_images_and_id)\n print (\"Delete files: {} \\n\\t {} \".format(fname,re.sub(\"lungmask\",\"images\",fname)))\n os.remove(fname)\n os.remove(fname.replace(\"images\",\"lungmask\")) # images of one patient\n\n\n np.save(working_path+\"{}Images.npy\".format(set_name),out_images)", "def update_image(self):\n if self.filenames:\n pos = self.slider.value()\n proj, flat, dark, theta = dx.read_aps_32id(self.filenames, proj=(pos, pos+1))\n if self.ffc_correction:\n image = proj[0,:,:].astype(np.float)/flat[0,:,:].astype(np.float)\n else:\n image = proj[0,:,:].astype(np.float)\n self.image_item.setImage(image)", "def __init__(self,\n directory,\n train=True,\n imsize=(256, 256),\n num_channels=3,\n scale=True,\n invert_white_images=True):\n\n # Sets all attributes.\n args, _, _, values = inspect.getargvalues(inspect.currentframe())\n values.pop(\"self\")\n for arg, val in values.items():\n setattr(self, arg, val)\n\n self.IMG_MAX = 255.0\n\n data_pattern = os.path.join(directory, \"**/images/*.png\")\n\n self.metadata_ = []\n self.masks_ = []\n self.metadata_columns = [\"image_id\", \"orig_shape\"]\n\n self.data_ic_ = ImageCollection(data_pattern)", "def normalise(image):", "def run(self):\n if not self.config.galfile_pixelized:\n raise ValueError(\"Code only runs with pixelized galfile.\")\n\n self.config.check_files(check_zredfile=False, check_bkgfile=True, check_bkgfile_components=False, check_parfile=True, check_zlambdafile=True)\n\n # Compute the border size\n\n self.config.border = self.config.compute_border()\n\n self.config.d.hpix = [self.pixel]\n self.config.d.nside = self.nside\n self.config.d.outbase = '%s_%d_%05d' % (self.config.outbase, self.nside, self.pixel)\n\n # Do the run\n self.config.start_file_logging()\n\n self.config.logger.info(\"Running runcat on pixel %d\" % (self.pixel))\n\n runcat = RunCatalog(self.config)\n if not os.path.isfile(runcat.filename):\n runcat.run(do_percolation_masking=self.config.runcat_percolation_masking)\n runcat.output(savemembers=True, withversion=True)\n\n self.config.stop_file_logging()", "def __init__(self, width, height, tilesize = 256, tileformat='jpg'):\n\n self.tilesize = tilesize\n self.tileformat = tileformat\n imagesize = (width, height)\n tiles = ( math.ceil( width / tilesize ), math.ceil( height / tilesize ) )\n\n # Size (in tiles) for each tier of pyramid.\n self.tierSizeInTiles = []\n self.tierSizeInTiles.push( tiles )\n\n # Image size in pixels for each pyramid tierself\n self.tierImageSize = []\n self.tierImageSize.append( imagesize );\n\n while (imagesize[0] > tilesize or imageSize[1] > tilesize ):\n imagesize = (math.floor( imagesize[0] / 2 ), math.floor( imagesize[1] / 2) )\n tiles = ( math.ceil( imagesize[0] / tilesize ), math.ceil( imagesize[1] / tilesize ) )\n self.tierSizeInTiles.append( tiles )\n self.tierImageSize.append( imagesize )\n\n self.tierSizeInTiles.reverse()\n self.tierImageSize.reverse()\n\n # Depth of the Zoomify pyramid, number of tiers (zoom levels)\n self.numberOfTiers = len(self.tierSizeInTiles)\n\n # Number of tiles up to the given tier of pyramid.\n self.tileCountUpToTier = []\n self.tileCountUpToTier[0] = 0\n for i in range(1, self.numberOfTiers+1):\n self.tileCountUpToTier.append(\n self.tierSizeInTiles[i-1][0] * self.tierSizeInTiles[i-1][1] + self.tileCountUpToTier[i-1]\n )", "def get_image_characteristics(self):\r\n self.image_height, self.image_width, self.image_channels = self.image.shape\r\n\r\n # Estimate the cell size to be around a ninth of the width of the screenshot area\r\n self.cell_size = int(self.image_width / 9) | 1\r\n\r\n # Cell size should be at most a ninth of the width and at least a twentieth of the width of the screenshot\r\n # Since a typical grid is 9x9, so it should be at most a ninth of the image width, and it shouldn't be too small\r\n self.min_cell_size = int(self.image_width / 20 * self.image_width / 20)\r\n self.max_cell_size = int(self.image_width / 9 * self.image_width / 9)", "def convert(self):\n self.tilewidth = int(self.tilewidth)\n self.tileheight = int(self.tileheight)\n self.width = int(self.width)\n self.height = int(self.height)\n self.pixel_width = self.width * self.tilewidth\n self.pixel_height = self.height * self.tileheight\n for layer in self.layers:\n self.named_layers[layer.name] = layer\n layer.opacity = float(layer.opacity)\n layer.x = int(layer.x)\n layer.y = int(layer.y)\n layer.width = int(layer.width)\n layer.height = int(layer.height)\n layer.pixel_width = layer.width * self.tilewidth\n layer.pixel_height = layer.height * self.tileheight\n layer.visible = bool(int(layer.visible))\n for tile_set in self.tile_sets:\n self.named_tile_sets[tile_set.name] = tile_set\n tile_set.spacing = int(tile_set.spacing)\n tile_set.margin = int(tile_set.margin)\n for img in tile_set.images:\n if img.trans:\n img.trans = (int(img.trans[:2], 16), int(img.trans[2:4], 16), int(img.trans[4:], 16))\n for obj_group in self.object_groups:\n obj_group.x = int(obj_group.x)\n obj_group.y = int(obj_group.y)\n obj_group.width = int(obj_group.width)\n obj_group.height = int(obj_group.height)\n for map_obj in obj_group.objects:\n map_obj.x = int(map_obj.x)\n map_obj.y = int(map_obj.y)\n map_obj.width = int(map_obj.width)\n map_obj.height = int(map_obj.height)", "def convert(self):\n self.tilewidth = int(self.tilewidth)\n self.tileheight = int(self.tileheight)\n self.width = int(self.width)\n self.height = int(self.height)\n self.pixel_width = self.width * self.tilewidth\n self.pixel_height = self.height * self.tileheight\n for layer in self.layers:\n self.named_layers[layer.name] = layer\n layer.opacity = float(layer.opacity)\n layer.x = int(layer.x)\n layer.y = int(layer.y)\n layer.width = int(layer.width)\n layer.height = int(layer.height)\n layer.pixel_width = layer.width * self.tilewidth\n layer.pixel_height = layer.height * self.tileheight\n layer.visible = bool(int(layer.visible))\n for tile_set in self.tile_sets:\n self.named_tile_sets[tile_set.name] = tile_set\n tile_set.spacing = int(tile_set.spacing)\n tile_set.margin = int(tile_set.margin)\n for img in tile_set.images:\n if img.trans:\n img.trans = (int(img.trans[:2], 16), int(img.trans[2:4], 16), int(img.trans[4:], 16))\n for obj_group in self.object_groups:\n obj_group.x = int(obj_group.x)\n obj_group.y = int(obj_group.y)\n obj_group.width = int(obj_group.width)\n obj_group.height = int(obj_group.height)\n for map_obj in obj_group.objects:\n map_obj.x = int(map_obj.x)\n map_obj.y = int(map_obj.y)\n map_obj.width = int(map_obj.width)\n map_obj.height = int(map_obj.height)", "def draw_image(self):\n \n pixel_array = self.imageprepare(self.image_path)\n newArr = self.reshape_pixel_array(pixel_array)\n plt.imshow(newArr, interpolation='nearest')\n plt.savefig('MNIST_IMAGE.png')#save MNIST image\n plt.show()#Show / plot that image", "def get_image(self):\n if self._image is None:\n image_data = np.load(self.image_file)\n if not isinstance(image_data, np.ndarray):\n image_data = image_data['arr_0']\n self.meta_data = ImageWrapper.load_metadata(self.image_file+\".meta\")\n exposure_time = self.meta_data['exposure_time_us'] * 1e-6\n dark_level = float(self.meta_data['black_level'])\n # saturation_mask = image_data.max(axis=2) >= 4094\n image_data = np.clip((image_data.astype(np.float32) - dark_level),\n a_min=0.0, a_max=None) / exposure_time\n if self.original_vignetting is not None:\n image_data = image_data / self.original_vignetting\n if self.crop is not None:\n image_data = image_data[\n self.crop[1,0]:self.crop[1,1],\n self.crop[0,0]:self.crop[0,1]\n ]\n # saturation_mask = saturation_mask[\n # self.crop[1,0]:self.crop[1,1],\n # self.crop[0,0]:self.crop[0,1]\n # ]\n if self.down_sample is not None:\n image_data = cv2.resize(\n image_data,\n dsize=None,\n fx=1./self.down_sample,\n fy=1./self.down_sample,\n interpolation=cv2.INTER_AREA\n )\n # saturation_mask = cv2.resize(\n # saturation_mask,\n # dsize=None,\n # fx=1./self.down_sample,\n # fy=1./self.down_sample,\n # interpolation=cv2.INTER_AREA\n # )\n if self.reup_sample is not None:\n image_data = cv2.resize(\n image_data,\n dsize=None,\n fx=self.reup_sample,\n fy=self.reup_sample,\n interpolation=cv2.INTER_CUBIC\n )\n # saturation_mask = cv2.resize(\n # saturation_mask,\n # dsize=None,\n # fx=self.reup_sample,\n # fy=self.reup_sample,\n # interpolation=cv2.INTER_CUBIC\n # )\n image = torch.tensor(np.transpose(image_data, (2,0,1)), dtype=torch.float32, device=self.device)\n # saturation_mask = torch.tensor(saturation_mask, dtype=torch.float32, device=self.device)\n if not self.lazy:\n self._image = image\n # self._saturation_mask = saturation_mask\n else:\n image = self._image\n # saturation_mask = self._saturation_mask\n\n return image#, saturation_mask", "def __init__(self, filename):\n #read the image, scale it and save it\n image = imread(filename)\n self.image = image\n self.scaled = image / 255\n #check if it is in color or grayscale\n if self.scaled.shape[-1] == 3:\n #compute its brightess matrix by averaging the RGB values at each pixel\n self.brightness = self.scaled.mean(axis = 2)\n self.flat_brightness = np.ravel(self.brightness)\n else:\n self.flat_brightness = np.ravel(self.scaled)", "def _preprocessing(self, path: str) -> np.array:\n if Checker.check_input_type_bool(path, 'nii'):\n image = sitk.ReadImage(path)\n self.space = image.GetSpacing()\n image = sitk.GetArrayFromImage(image).astype('float32')\n\n elif Checker.check_input_type_bool(path, 'npy'):\n image = np.load(path)\n self.space = [1., 1., 1.]\n warnings.warn(\n '.npy is not recommended as an image format.'\n 'Since spacing cannot be identified from .npy, spacing is set as [1., 1., 1.].', UserWarning)\n\n elif Checker.check_input_type_bool(path, 'dcm'):\n raise ValueError(\n '.dcm is not supported.'\n 'Please convert dcm dummies to nii format.')\n\n else:\n input_ext = path.split('.')[-1]\n raise ValueError(\n f'.{input_ext} format is not supported.')\n\n self.img_shape = image.shape\n\n # normalize\n windowing_range = [-40., 120.]\n windowing_min = windowing_range[0] - windowing_range[1] // 2\n windowing_max = windowing_range[0] + windowing_range[1] // 2\n image = ndimage.zoom(image, [.5, .5, .5], order=1, mode='constant')\n image = np.clip(image, windowing_min, windowing_max)\n image = (image - windowing_min) / (windowing_max - windowing_min)\n image = image[np.newaxis, ..., np.newaxis]\n return image", "def build_filler_images(self):", "def preprocess(path, path2 , scale):\n image = imread(path)\n label_ = imread(path2)\n\n #label_ = modcrop(label, scale)\n\n # Must be normalized\n input_ = image / 255.\n label_ = label_ / 255.\n\n #input_ = scipy.ndimage.interpolation.zoom(label_, (1./scale), prefilter=False)\n #input_ = scipy.ndimage.interpolation.zoom(input_, (scale/1.), prefilter=False)\n\n return input_, label_", "def __call__(self, results):\n if 'scale' not in results:\n if 'scale_factor' in results:\n img_shape = results['img'].shape[:2]\n scale_factor = results['scale_factor']\n assert isinstance(scale_factor, float)\n results['scale'] = tuple([int(x * scale_factor) for x in img_shape][::-1])\n else:\n self._random_scale(results)\n else:\n if not self.override:\n assert 'scale_factor' not in results, 'scale and scale_factor cannot be both set.'\n else:\n results.pop('scale')\n if 'scale_factor' in results:\n results.pop('scale_factor')\n self._random_scale(results)\n\n self._resize_img(results)\n self._resize_bboxes(results)\n self._resize_cbboxes(results)\n self._resize_masks(results)\n self._resize_seg(results)\n\n return results", "def __call__(self, results):\n\n if 'scale' not in results:\n if 'scale_factor' in results:\n img_shape = results['img'].shape[:2]\n scale_factor = results['scale_factor']\n assert isinstance(scale_factor, float)\n results['scale'] = tuple(\n [int(x * scale_factor) for x in img_shape][::-1])\n else:\n self._random_scale(results)\n else:\n if not self.override:\n assert 'scale_factor' not in results, (\n 'scale and scale_factor cannot be both set.')\n else:\n results.pop('scale')\n if 'scale_factor' in results:\n results.pop('scale_factor')\n self._random_scale(results)\n\n self._resize_img(results)\n self._resize_bboxes(results)\n self._resize_masks(results)\n self._resize_seg(results)\n return results", "def __call__(self, results):\n\n if 'scale' not in results:\n if 'scale_factor' in results:\n img_shape = results['img'].shape[:2]\n scale_factor = results['scale_factor']\n assert isinstance(scale_factor, float)\n results['scale'] = tuple(\n [int(x * scale_factor) for x in img_shape][::-1])\n else:\n self._random_scale(results)\n else:\n if not self.override:\n assert 'scale_factor' not in results, (\n 'scale and scale_factor cannot be both set.')\n else:\n results.pop('scale')\n if 'scale_factor' in results:\n results.pop('scale_factor')\n self._random_scale(results)\n\n self._resize_img(results)\n self._resize_bboxes(results)\n self._resize_masks(results)\n self._resize_seg(results)\n return results", "def __init__(self, n_pixels=(250,150),\n submarine=None,\n air_velocity=0.1,\n water_velocity=1.0,\n rock_velocity=10.0,\n **kwargs):\n\n GeneratedGalleryModel.__init__(self)\n\n self.air_velocity = air_velocity\n self.water_velocity = water_velocity\n self.rock_velocity = rock_velocity\n\n if len(n_pixels) not in [2,3]:\n raise ValueError('Submarine-sonar model only works for dimensions greater than 1.')\n\n if submarine is None:\n if len(n_pixels) == 2:\n submarine = default_submarine_2D\n else: # len(n_pixels) == 3\n submarine = default_submarine_3D\n self.submarine = submarine\n\n config_list = list()\n\n # Configure X direction\n x_lbc = kwargs['x_lbc'] if ('x_lbc' in list(kwargs.keys())) else PML(0.1, 100.0)\n x_rbc = kwargs['x_rbc'] if ('x_rbc' in list(kwargs.keys())) else PML(0.1, 100.0)\n\n xmin, xmax = 0.0, 2.5\n x_config = (xmin, xmax, x_lbc, x_rbc)\n config_list.append(x_config)\n\n\n if len(n_pixels) == 3:\n\n # If it is there, configure Y direction\n y_lbc = kwargs['y_lbc'] if ('y_lbc' in list(kwargs.keys())) else PML(0.1, 100.0)\n y_rbc = kwargs['y_rbc'] if ('y_rbc' in list(kwargs.keys())) else PML(0.1, 100.0)\n\n ymin, ymax = 0.0, 2.5\n y_config = (ymin, ymax, y_lbc, y_rbc)\n config_list.append(y_config)\n\n # Configure Z direction\n z_lbc = kwargs['z_lbc'] if ('z_lbc' in list(kwargs.keys())) else PML(0.1, 100.0)\n z_rbc = kwargs['z_rbc'] if ('z_rbc' in list(kwargs.keys())) else PML(0.1, 100.0)\n\n zmin, zmax = 0.0, 1.5\n z_config = (zmin, zmax, z_lbc, z_rbc)\n config_list.append(z_config)\n\n domain = RectangularDomain(*config_list)\n\n mesh_args = [domain] + list(n_pixels)\n mesh = CartesianMesh(*mesh_args)\n\n\n self._mesh = mesh\n self._domain = mesh.domain\n\n # Set _initial_model and _true_model\n self.rebuild_models()", "def __init__(self, nb_sub_images, window_size, recovery, image_horiz_size):\n self.nb_sub_images = nb_sub_images\n self.window_size = window_size\n self.recovery = recovery\n self.image_horiz_size = image_horiz_size", "def __init__(self,datamask , h, w, upper, lower):\n self.datamask = datamask\n self.imgidmask = datamask[datamask.type == 'global'].reset_index(drop = True)\n self.h = h\n self.w = w\n self.upper = upper\n self.lower = lower\n self._birads_to_idxs = get_birad()\n self._densities_to_idxs = get_dens()\n self.tfms = get_transform(height = self.h, width =self.w)", "def process(self):\n self.output_image = cv.resize(\n self.input_image,\n (self.WIDTH, self.HEIHGT),\n )\n return self.output_image", "def montage(images, w_sub, h_sub, step):\n target = Image.new('RGB', (w_sub*step, h_sub*step))\n left = 0\n right = w_sub\n for i in range(len(images)):\n top=(i//step)*h_sub\n target.paste(images[i], (left, top, right, top+h_sub))\n if(i//step < (i+1)//step):#Check if this row is done\n left = 0#Reset the position in a row\n right = w_sub\n else: #Next picture\n left += w_sub\n right += w_sub\n quality_value = 100\n return target", "def __call__(self, results):\n # Image is bgr\n img = results['img'][..., ::-1]\n img = Image.fromarray(img)\n img = self.transform(img)\n img = np.asarray(img)\n img = img[..., ::-1]\n results['img'] = img\n return results", "def image(self, image):\n if isinstance(image, Icon):\n self._image = image.image\n elif isinstance(image, RDMs):\n avg_rdm = pool_rdm(image)\n image = avg_rdm.get_matrices()[0]\n self._image = image / np.max(image)\n if self.resolution is None:\n self._resolution = np.array(100)\n elif image is not None:\n self._image = image\n else:\n self._image = None\n self.recompute_final_image()", "def __init__(self,phosphene_resolution=(50,50), size=(480,480), jitter=0.35, intensity_var=0.9, aperture=.66, sigma=0.8, custom_grid=None):\n if custom_grid is None:\n self.phosphene_resolution = phosphene_resolution\n self.size = size\n self.phosphene_spacing = np.divide(size,phosphene_resolution)\n self.jitter = jitter\n self.intensity_var = intensity_var\n self.grid = self.create_regular_grid(self.phosphene_resolution,self.size,self.jitter,self.intensity_var)\n self.aperture = np.round(aperture*self.phosphene_spacing[0]).astype(int) #relative aperture > dilation kernel size\n else:\n self.grid = custom_grid\n self.aperture = aperture\n self.sigma = sigma\n self.dilation_kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE,(self.aperture,self.aperture))\n self.k_size = 11 #np.round(4*sigma+1).astype(int) # rule of thumb: choose k_size>3*sigma", "def transform(self, previousimage):", "def __call__(self, results):\n\n for key in results.get('seg_fields', []):\n if self.scale_factor != 1:\n results[key] = general_ocr.imrescale(\n results[key],\n self.scale_factor,\n interpolation='nearest',\n backend=self.backend)\n return results", "def run(self, image):\n # width, height = image.size\n # resize_ratio = 1.0 * self.INPUT_SIZE / max(width, height)\n # target_size = (int(resize_ratio * width), int(resize_ratio * height))\n target_size = (self.INPUT_SIZE, self.INPUT_SIZE)\n resized_image = image.convert('RGB').resize(target_size, Image.ANTIALIAS)\n net_image = resized_image\n if params.HZ_preprocess_activate:\n net_image = params.image_preprocess_func(resized_image)\n net_image = np.expand_dims(net_image, axis=-1)\n batch_seg_map = self.sess.run(\n self.OUTPUT_TENSOR_NAME,\n feed_dict={self.INPUT_TENSOR_NAME: [np.asarray(net_image)]})\n seg_map = batch_seg_map[0]\n return resized_image, seg_map", "def process_image(image):\n # Open the image using PIL\n pil_image = Image.open(image)\n \n # Resize the image to 256x256 while maintining aspect ratio\n if pil_image.width > pil_image.height:\n resize_dim = (int(pil_image.width*256 / pil_image.height), 256)\n else:\n resize_dim = (256, int(pil_image.height*256 / pil_image.width))\n \n pil_image = pil_image.resize(resize_dim)\n \n # Crop image to center 224 pixles\n crop_box_dim = 224\n left = (pil_image.width - crop_box_dim)/2\n top = (pil_image.height - crop_box_dim)/2\n right = pil_image.width - (pil_image.width - crop_box_dim)/2\n bottom = pil_image.height - (pil_image.height - crop_box_dim)/2\n pil_image = pil_image.crop((left, top, right, bottom))\n \n # Update color channels\n np_image = np.array(pil_image)\n np_image_means = np.array([0.485, 0.456, 0.406])\n np_image_stddev = np.array([0.229, 0.224, 0.225])\n np_image = (np_image/255 - np_image_means) / np_image_stddev\n \n # PIL images and numpy arrays have color channels in the 3rd dimension\n # Transpose them to first dimension to match what PyTorch expects\n np_image = np_image.transpose((2,0,1))\n\n return np_image", "def apply_model(self, original, t1, t2, resolution_scaling_factor=1):\n img = Image()\n img.time_stamp = t2\n\n if t1 == t2:\n img.initialize_with_image(original)\n return img\n\n calc_shift_fnc = self.calculate_shift\n orig_get_fnc = original.get\n interp_fnc = my_math.linear_interpolation\n\n def generate(y, x):\n \"\"\"Function describing the transformed image\"\"\"\n realy = y / resolution_scaling_factor\n realx = x / resolution_scaling_factor\n\n # move to time t2\n posy = y + calc_shift_fnc(realy, realx, t2, 0) - \\\n calc_shift_fnc(realy, realx, t1, 0)\n posx = x + calc_shift_fnc(realy, realx, t2, 1) - \\\n calc_shift_fnc(realy, realx, t1, 1)\n\n x_left = int(posx) # math.floor(pos[0])\n x_right = x_left + 1 # math.ceil(pos[0])\n y_down = int(posy) # math.floor(pos[1])\n y_up = y_down + 1 # math.ceil(pos[1])\n\n v11 = orig_get_fnc(y_down, x_left, resolution_scaling_factor)\n v12 = orig_get_fnc(y_down, x_right, resolution_scaling_factor)\n v21 = orig_get_fnc(y_up, x_left, resolution_scaling_factor)\n v22 = orig_get_fnc(y_up, x_right, resolution_scaling_factor)\n\n return interp_fnc(y_down, x_left, y_up, x_right, v11, v12, v21, v22,\n posy, posx)\n\n img.image_data = np.fromfunction(np.vectorize(generate),\n (original.shape()[0]*resolution_scaling_factor,\n original.shape()[1]*resolution_scaling_factor))\n\n if resolution_scaling_factor != 1:\n img.image_data = skimage.transform.resize(img.image_data,\n original.shape(),\n preserve_range=True)\n\n return img", "def __weights(self):\n r, c = np.mgrid[:self.size, :self.size] + 0.5\n rad = np.sqrt((r - self.size/2)**2 + (c - self.size/2)**2)\n img = np.zeros((self.size, self.size))\n rmin = np.sqrt(2) * 0.5 * self.damp * rad.max()\n rmax = np.sqrt(2) * 0.5 * rad.max()\n zone = np.logical_and(rad > rmin, rad < rmax)\n img[rad < rmin] = 1.0\n img[rad > rmax] = 0.0\n img[zone] = (rmax - rad[zone]) / (rmax - rmin)\n return img", "def __init__(self, initial_x:int, initial_y:int, width:int, height:int, power_type:str, time_to_live:int, debug:bool = False):\n\n #Call the superclass contructor\n super().__init__(initial_x, initial_y, width, height, PowerUp.sprites[power_type], debug)\n\n #Store variables\n self.power_type = power_type\n self.ttl = time_to_live\n\n #Scale the image\n self.scale(30,30)", "def Make_pixel_metric(data, folder, cadence, argsMV={'cmap':'cool', 'cbar':False}, nside=64, recov=7, nb_day=30, fps=20, SUB=320, BAND=['gri', 'griz', 'g', 'r', 'i', 'z'], FMT=['.:k', '.-k', '.:r', '.:g', '.:b', '.:y'], figsize=(16, 10)):\n\n argsMV['nest'] = True\n \n mjd_i = int(data['mjd'][0])\n mjd_f = int(data['mjd'][-1])\n\n t = np.arange(mjd_i, mjd_f)\n floor = np.floor(data['mjd']+5./24.)\n\n hpx0 = np.zeros(hp.nside2npix(nside)).astype(float) + recov\n tot_pxl = np.size(hpx0)\n f = FocPS()\n p = f.pixellize()\n\n HPX = []\n HPXs = []\n ADDpix = []\n t = t[:nb_day]\n\n MET = np.zeros((np.size(BAND), np.size(t)))\n\n for band in BAND:\n HPX.append(np.copy(hpx0))\n HPXs.append(np.copy(hpx0 * 0))\n ADDpix.append([])\n\n for k, ti in enumerate(t):\n I = np.where(floor == ti)\n d = data[I]\n\n plt.figure(figsize=figsize).suptitle('{} - [{}]'.format(cadence, ti))\n\n for i, band in enumerate(BAND):\n\n hpxN = make_hpmoll(d, HPX[i], recov, band, f, p, SUB + 1 + i, nside, argsMV)\n HPX[i] = np.copy(hpxN)\n\n MET[i, k] = np.size(np.where(HPX[i] != hp.UNSEEN)[0])\n\n fini = HPXs[i][hpxN == hp.UNSEEN]\n ADDpix[i] += list(fini[fini != 0])\n \n HPXs[i][hpxN == hp.UNSEEN] = 0\n HPXs[i][hpxN != hp.UNSEEN] += 1\n\n plt.savefig('Save_Metric/' + folder + cadence + '/fig/fig' + str(k) + '.png')\n plt.close()\n\n\n #Make the film with the figs\n path_folder = 'Save_Metric/' + folder + cadence + '/'\n create_film(nb_day, fps, path_folder+'fig/', prefixe='fig', extension='png')\n\n #Make duration activ pixel metric hsito\n plt.figure(figsize=figsize).suptitle('Count of duration of activ pixel')\n for i, band in enumerate(BAND):\n plt.subplot(SUB + 1 + i)\n plt.hist(ADDpix[i], 200, color='r')\n plt.yscale('log')\n plt.title(band)\n\n plt.savefig('Save_Metric/' + folder + cadence + '/Metric_duration_activ_pixel.png')\n\n #Make the %sky activ pixel metric\n plt.figure(figsize=figsize)\n\n for fmt, band, met in zip(FMT, BAND, MET):\n\n plt.plot(t, met/tot_pxl*100, fmt, label=band)\n\n plt.xlabel('Day')\n plt.ylabel('% of activ pixel in sky')\n plt.title('Metric of Activ pixel of ' + cadence)\n plt.legend()\n\n plt.savefig(path_folder + 'pc_activ_sky.png')\n\n #Save data for cadence set metric\n moy_pcs = np.zeros(np.size(BAND))\n moy_act = np.zeros(np.size(BAND))\n\n for i, met in enumerate(MET):\n moy_pcs[i] = np.mean(met)/tot_pxl*100\n moy_act[i] = np.mean(ADDpix[i])\n\n with open(path_folder + 'save_data.dat', 'rb') as f:\n DATA = pUk(f).load()\n\n DATA['moy_pcs'] = moy_pcs\n DATA['moy_act'] = moy_act\n\n with open(path_folder + 'save_data.dat', 'wb') as f:\n pPk(f).dump(DATA)", "def _process_output(self, result):\n\n scale_h, scale_w = self.last_scales\n\n if result.shape[-1] == 5: # format: [xmin, ymin, xmax, ymax, conf]\n return np.array([[scale_w, scale_h, scale_w, scale_h, 1.0]]) * result\n else: # format: [image_id, label, conf, xmin, ymin, xmax, ymax]\n scale_w *= self.input_width\n scale_h *= self.input_height\n out = np.array([[1.0, scale_w, scale_h, scale_w, scale_h]]) * result[0, 0, :, 2:]\n\n return np.concatenate((out[:, 1:], out[:, 0].reshape([-1, 1])), axis=1)", "def init(self):\n imageDim = u.getDimImage(self.length, 0, 0, 78) # 54.5, 42.3, 66.17\n self.imageInfo['ratio'] = u.getRatio(self.imageInfo['shape'],\n imageDim)\n\n self.measuring = pymeasuring.Measuring(self.imageInfo, self.length)\n\n # rospy.loginfo(\"dims of image [mm]: \" + str(imageDim))\n # rospy.loginfo(\"ratios [mm/px]: \" + str(self.imageInfo['ratio']))\n # rospy.loginfo(\"shape [px]: \" + str(self.imageInfo['shape']))\n rospy.loginfo('init of measuring object is complete.')", "def random_scale(im, inst_masks, mask, boxes, classes, scale):\n # scale = np.random.uniform(down, upper)\n h, w, c = im.shape\n if scale > 1:\n \"\"\"\"\"\"\n max_offx = (scale - 1.) * w\n max_offy = (scale - 1.) * h\n offx = int(np.random.uniform() * max_offx)\n offy = int(np.random.uniform() * max_offy)\n im = cv2.resize(im, (0, 0), fx=scale, fy=scale)\n mask = cv2.resize(mask, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_NEAREST)\n im = im[offy: (offy + h), offx: (offx + w)]\n mask = mask[offy: (offy + h), offx: (offx + w)]\n if inst_masks.size > 0:\n inst_masks = np.transpose(inst_masks, (1, 2, 0)) # to (h, w, n)\n inst_masks = cv2.resize(inst_masks, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_NEAREST)\n inst_masks = inst_masks[offy: (offy + h), offx: (offx + w)]\n try:\n if inst_masks.ndim > 2:\n inst_masks = np.transpose(inst_masks, (2, 0, 1)) # to (n, h, w)\n else:\n inst_masks = inst_masks.reshape((1, h, w))\n except ValueError:\n print (inst_masks.ndim, inst_masks.shape)\n raise\n else:\n inst_masks = np.zeros((0, h, w), inst_masks.dtype)\n else:\n \"\"\"\"\"\"\n canvas = np.zeros(im.shape, im.dtype) + np.array([103, 116, 123], im.dtype)\n canvas_mask = np.zeros(mask.shape, mask.dtype)\n max_offx = (scale - 1.) * w\n max_offy = (scale - 1.) * h\n offx = int(np.random.uniform() * max_offx)\n offy = int(np.random.uniform() * max_offy)\n im = cv2.resize(im, (0, 0), fx=scale, fy=scale)\n mask = cv2.resize(mask, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_NEAREST)\n h_, w_, _ = im.shape\n canvas[-offy: (-offy + h_), -offx: (-offx + w_)] = im\n canvas_mask[-offy: (-offy + h_), -offx: (-offx + w_)] = mask\n if inst_masks.size > 0:\n inst_masks = np.transpose(inst_masks, (1, 2, 0)) # to (h, w, n)\n canvas_instmask = np.zeros(inst_masks.shape, inst_masks.dtype)\n inst_masks = cv2.resize(inst_masks, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_NEAREST)\n if inst_masks.ndim == 2:\n inst_masks = inst_masks[:,:, np.newaxis]\n canvas_instmask[-offy: (-offy + h_), -offx: (-offx + w_)] = inst_masks\n canvas_instmask = np.transpose(canvas_instmask, (2, 0, 1)) # to (n, h, w)\n else:\n canvas_instmask = np.zeros((0, h, w), inst_masks.dtype)\n\n im, mask, inst_masks = canvas, canvas_mask, canvas_instmask\n\n boxes = _offset_boxes(boxes, im.shape, scale, [offx, offy], False)\n boxes, classes, inst_masks = _filter_invalid_boxes(boxes, classes, inst_masks, min_size=3)\n\n return im, inst_masks, mask, boxes, classes", "def __init__(self, width, height, tilesize=256, tileformat='jpg'):\n\n self.tilesize = tilesize\n self.tileformat = tileformat\n imagesize = (width, height)\n tiles = (math.ceil(width / tilesize), math.ceil(height / tilesize))\n\n # Size (in tiles) for each tier of pyramid.\n self.tierSizeInTiles = []\n self.tierSizeInTiles.append(tiles)\n\n # Image size in pixels for each pyramid tierself\n self.tierImageSize = []\n self.tierImageSize.append(imagesize)\n\n while (imagesize[0] > tilesize or imagesize[1] > tilesize):\n imagesize = (math.floor(imagesize[0] / 2), math.floor(imagesize[1] / 2))\n tiles = (math.ceil(imagesize[0] / tilesize), math.ceil(imagesize[1] / tilesize))\n self.tierSizeInTiles.append(tiles)\n self.tierImageSize.append(imagesize)\n\n self.tierSizeInTiles.reverse()\n self.tierImageSize.reverse()\n\n # Depth of the Zoomify pyramid, number of tiers (zoom levels)\n self.numberOfTiers = len(self.tierSizeInTiles)\n\n # Number of tiles up to the given tier of pyramid.\n self.tileCountUpToTier = []\n self.tileCountUpToTier[0] = 0\n for i in range(1, self.numberOfTiers+1):\n self.tileCountUpToTier.append(\n self.tierSizeInTiles[i-1][0] * self.tierSizeInTiles[i-1][1] +\n self.tileCountUpToTier[i-1]\n )", "def process_image(self):\n pass", "def set_scales(self):\r\n self.canvas.update()\r\n self.dxmin = self.dmargin\r\n self.dymin = self.dmargin\r\n self.dxmax = self.canvas.winfo_width() - self.dmargin - 1\r\n self.dymax = self.canvas.winfo_height() - self.dmargin - 1\r\n\r\n # Flip the Y coordinates to invert the result.\r\n if self.y_is_flipped:\r\n self.dymin, self.dymax = self.dymax, self.dymin\r\n\r\n self.xscale = (self.dxmax - self.dxmin) / (self.wxmax - self.wxmin)\r\n self.yscale = (self.dymax - self.dymin) / (self.wymax - self.wymin)\r\n\r\n # Calculate 1 pixel in world coordinates.\r\n self.xpix = 1 / self.xscale\r\n self.ypix = 1 / self.yscale", "def smaller(self):\n w1, h1 = float(self.imwidth), float(self.imheight)\n w2, h2 = float(self.__huge_size), float(self.__huge_size)\n aspect_ratio1 = w1 / h1\n aspect_ratio2 = w2 / h2 # it equals to 1.0\n if aspect_ratio1 == aspect_ratio2:\n image = Image.new('RGB', (int(w2), int(h2)))\n k = h2 / h1 # compression ratio\n w = int(w2) # band length\n elif aspect_ratio1 > aspect_ratio2:\n image = Image.new('RGB', (int(w2), int(w2 / aspect_ratio1)))\n k = h2 / w1 # compression ratio\n w = int(w2) # band length\n else: # aspect_ratio1 < aspect_ration2\n image = Image.new('RGB', (int(h2 * aspect_ratio1), int(h2)))\n k = h2 / h1 # compression ratio\n w = int(h2 * aspect_ratio1) # band length\n i, j, n = 0, 1, round(0.5 + self.imheight / self.__band_width)\n while i < self.imheight:\n print('\\rOpening image: {j} from {n}'.format(j=j, n=n), end='')\n band = min(self.__band_width, self.imheight - i) # width of the tile band\n self.__tile[1][3] = band # set band width\n self.__tile[2] = self.__offset + self.imwidth * i * 3 # tile offset (3 bytes per pixel)\n self.__image.close()\n self.__image = Image.open(self.path) # reopen / reset image\n self.__image.size = (self.imwidth, band) # set size of the tile band\n self.__image.tile = [self.__tile] # set tile\n cropped = self.__image.crop((0, 0, self.imwidth, band)) # crop tile band\n image.paste(cropped.resize((w, int(band * k) + 1), self.__filter), (0, int(i * k)))\n i += band\n j += 1\n print('\\r' + 30 * ' ' + '\\r', end='') # hide printed string\n return image", "def imgProcessing(self):\n if (self.image_width > 320):\n self.cv_image = imutils.resize(self.cv_image, width = 320)\n else:\n pass\n\n \"\"\" optional -- image-mirrored \"\"\"\n # self.cv_image = cv2.flip(self.cv_image, 1)", "def _resize_img(self, results):\n for key in results.get('img_fields', ['img']):\n if self.keep_ratio:\n img, scale_factor = mmcv.imrescale(\n results[key],\n results['scale'],\n return_scale=True,\n interpolation=self.interpolation,\n backend=self.backend)\n # the w_scale and h_scale has minor difference\n # a real fix should be done in the mmcv.imrescale in the future\n new_h, new_w = img.shape[:2]\n h, w = results[key].shape[:2]\n w_scale = new_w / w\n h_scale = new_h / h\n else:\n img, w_scale, h_scale = mmcv.imresize(\n results[key],\n results['scale'],\n return_scale=True,\n interpolation=self.interpolation,\n backend=self.backend)\n results[key] = img\n\n scale_factor = np.array([w_scale, h_scale, w_scale, h_scale],\n dtype=np.float32)\n results['img_shape'] = img.shape\n # in case that there is no padding\n results['pad_shape'] = img.shape\n results['scale_factor'] = scale_factor\n results['keep_ratio'] = self.keep_ratio", "def process_image(image_path):\n\n # open the image\n image = Image.open(image_path)\n \n # print(\"Original Image size: \", image.size)\n \n # first resize the images where the shortest side is 256 px\n width, height = image.size\n size = 256, 256\n \n newwidth, newheight = None, None\n \n # if the height is the shorter side\n if height < width:\n # find ratio between larger and smaller side\n ratio = float(width) / float(height)\n # resize smaller side to 256\n newheight = 256\n # resize larger side to 256 * ratio\n newwidth = int(floor(ratio * size[0])) \n # else, the width is the shorter side\n else:\n # find ratio between larger and smaller side\n ratio = float(height)/float(width)\n # resize smaller side to 256\n newwidth = 256\n # resize larger side to 256 * ratio\n newheight = int(floor(ratio * size[1]))\n \n \n # print(\"W: {}, H: {}\".format(newwidth, newheight))\n \n # resize the image\n image = image.resize((newwidth, newheight), Image.ANTIALIAS)\n\n # print(\"Resized Image (keep aspect ratio): \", image.size)\n \n # perform center crop\n # https://stackoverflow.com/questions/16646183/crop-an-image-in-the-centre-using-pil\n width, height = image.size # Get dimensions\n new_height, new_width = 224, 224\n \n left = (width - new_width)/2\n top = (height - new_height)/2\n right = (width + new_width)/2\n bottom = (height + new_height)/2\n\n image = image.crop((left, top, right, bottom))\n # print(\"cropped image size: \", image.size)\n \n # convert encoded color channels and convert to floats (divide by 255)\n np_image = np.array(image) / 255\n # print(np_image)\n \n # normalize\n mean = [0.485, 0.456, 0.406]\n std = [0.229, 0.224, 0.225]\n np_image = (np_image - mean) / std\n \n # finally, transpose\n # print(\"shape 1: \", np_image.shape)\n np_image = np_image.transpose((2, 0, 1))\n # print(\"transposed shape: \", np_image.shape)\n \n # Originally, I was returning a numpy array, as I thought these were the instructions, but\n # when trying to test, it would not work. \n # Found solution at: https://knowledge.udacity.com/questions/29173\n # We have to convert to a tensor before we return it\n return torch.Tensor(np_image)", "def _resize_img(self, results):\n for key in results.get('img_fields', ['img']):\n if self.keep_ratio:\n img, scale_factor = general_ocr.imrescale(\n results[key],\n results['scale'],\n return_scale=True,\n backend=self.backend)\n # the w_scale and h_scale has minor difference\n # a real fix should be done in the general_ocr.imrescale in the future\n new_h, new_w = img.shape[:2]\n h, w = results[key].shape[:2]\n w_scale = new_w / w\n h_scale = new_h / h\n else:\n img, w_scale, h_scale = general_ocr.imresize(\n results[key],\n results['scale'],\n return_scale=True,\n backend=self.backend)\n results[key] = img\n\n scale_factor = np.array([w_scale, h_scale, w_scale, h_scale],\n dtype=np.float32)\n results['img_shape'] = img.shape\n # in case that there is no padding\n results['pad_shape'] = img.shape\n results['scale_factor'] = scale_factor\n results['keep_ratio'] = self.keep_ratio", "def generate_image(self, imagename, antenna='', cellsize='8arcmin', npix=512, niter=0, threshold='0Jy', weighting='uniform', start=200, stop=900, uvlength=0, uvsign='>', phasecenter='', gridmode='', wprojplanes=1024, script='clean', del_script=True):\n ct.imaging(self.ms, imagename, antenna=antenna, cellsize=cellsize, npix=npix, niter=niter, threshold=threshold, weighting=weighting, start=start, stop=stop, uvlength=uvlength, uvsign=uvsign, phasecenter=phasecenter, gridmode=gridmode, wprojplanes=wprojplanes, script=script, delete=del_script)", "def _scale_to_mbs_frame(self : \"animation\",\n img : \"np.ndarray\"\n ) -> \"np.ndarray\":\n xnew = img.shape[0] + self._mbs - img.shape[0]%self._mbs\n ynew = img.shape[1] + self._mbs - img.shape[1]%self._mbs\n return (255*resize(img, (xnew, ynew))).astype(np.uint8)", "def _preprocessing(\n self, \n path: str, \n img_type: str, \n min_percent: float = 40., \n max_percent: float = 98.5, \n out_min: int = 0, \n out_max: int = 1) -> torch.Tensor:\n\n read_data = nib.load(path)\n data = read_data.get_fdata().astype(np.float32)\n\n if img_type == 'T1':\n pass\n elif img_type == 'MRA':\n # windowing\n w_min = np.percentile(data, min_percent)\n w_max = np.percentile(data, max_percent)\n width = w_max - w_min + 1\n center = w_min + width / 2\n\n data = ((data - center) / width + 0.5) * (out_max - out_min)\n data = np.piecewise(data, [data <= out_min, data >= out_max],\n [out_min, out_max, lambda data: data])\n\n # ToTensor, unsqueezing\n data = torch.from_numpy(data)[np.newaxis, np.newaxis, ...]\n data = data.cuda()\n return data, read_data", "def transform_images(img1,img2):", "def transform_image(self):\n im = cv2.imread(\"result.png\", 0)\n im2 = cv2.resize(im, (28, 28))\n im = im2.reshape(28, 28, -1)\n im = im.reshape(1, 1, 28, 28)\n im = cv2.bitwise_not(im)\n im = im.reshape(28,28)\n \n with out:\n clear_output()\n \n # resize\n img = np.array(im)\n img = img.reshape(28*28,)\n \n #img = img/255.0\n \n return img", "def __init__(self,img_size=[1000,1000], real_size=[22,22]):\n\n super(map_sim, self).__init__()\n\n ## simulation variables..\n undersamp = 2\n self.index = 0\n self.x_pos = np.loadtxt('x_val')[::undersamp]\n self.y_pos = np.loadtxt('y_val')[::undersamp]\n\n ##\n\n self.img_size = img_size\n self.real_size = real_size \n\n self.img = np.zeros(img_size)\n \n self.dx = 1.*real_size[0]/img_size[0]\n self.dy = 1.*real_size[1]/img_size[1]\n \n #\n \n self.win = pg.GraphicsWindow()\n self.win.setWindowTitle('scatt anim')\n self.win.show()\n self.plot1 = self.win.addPlot()\n self.plot1.disableAutoRange()\n self.plot1.setRange(xRange=[0,self.img_size[0]], yRange=[0,self.img_size[1]])\n \n self.img1 = pg.ImageItem()\n self.plot1.addItem(self.img1)\n \n self.lut = plt.get_cmap('viridis').colors\n \n self.threadpool = QtCore.QThreadPool(parent=self.win)\n self.threadpool.setMaxThreadCount(1)\n self.update()", "def __init__(self, nx=1000, ny=1000, shift=True):\n # Note that numpy array translate to images in [y][x] order! \n self.nx = int(nx)\n self.ny = int(ny)\n self.image = numpy.zeros((self.ny, self.nx), 'float')\n self.yy, self.xx = numpy.indices(self.image.shape)\n self.padx = 0.0\n self.pady = 0.0\n self.xcen = round(self.nx/2.0)\n self.ycen = round(self.ny/2.0)\n self.fimage = None\n self.psd2d = None\n self.phasespec = None\n self.psd1d = None\n self.acf2d = None\n self.acf1d = None\n self.shift = shift\n return", "def prepare_map(self):\n for y, row in enumerate(self.contents):\n for x, tile in enumerate(row):\n bm = self.get_tile(tile)\n self.image[\n y * TILE_SIZE : (y + 1) * TILE_SIZE,\n x * TILE_SIZE : (x + 1) * TILE_SIZE,\n ] = bm", "def _process_img(self, img):\n\n # ==\n # Construct transforms\n trans_list = [transforms.Resize(self.img_size)]\n if self.grayscale:\n trans_list += [transforms.Grayscale(num_output_channels=1)]\n\n img_transforms = transforms.Compose(trans_list)\n\n # ==\n # Transform and output\n img = img_transforms(img)\n obs = np.array(img, dtype=np.float32)\n\n # Ensure channel is in first dimension (torch conv standard)\n if len(np.shape(obs)) == 2:\n obs = np.expand_dims(obs, axis=0)\n elif len(np.shape(obs)) == 3:\n # PIL have channel on dim 2, swap with dim 0\n obs = np.swapaxes(obs, 2, 0)\n pass\n else:\n raise RuntimeError\n\n # Scale values to [0, 1]\n if self.scale_observation:\n obs = obs / 255.0\n\n # (Optinal) Flatten to vector\n if self.flatten_obs:\n obs = obs.flatten()\n\n return obs", "def set_scale_factors_to_output_size(self):\n # Compute the scale_factor using rounded scaled image size.\n height = tf.shape(self._image)[0]\n width = tf.shape(self._image)[1]\n max_image_size = tf.to_float(tf.maximum(height, width))\n image_scale = tf.to_float(self._output_size) / max_image_size\n scaled_height = tf.to_int32(tf.to_float(height) * image_scale)\n scaled_width = tf.to_int32(tf.to_float(width) * image_scale)\n self._image_scale = image_scale\n self._scaled_height = scaled_height\n self._scaled_width = scaled_width", "def __processImage(self):\n userName = getpass.getuser()\n pathImage = os.path.expanduser('~/StructureImage')\n imageFile = pathImage + '/structure.png'\n try:\n print('read the image')\n binaryImage = sciimage.imread(imageFile, True)\n except FileNotFoundError:\n print('The image file or the directory does not exist.')\n except:\n print('Other errors happen.')\n ySize, xSize = binaryImage.shape\n xPosition = []; yPosition = []\n for i in sp.arange(ySize):\n for j in sp.arange(xSize):\n if (binaryImage[i, j] == 0.0):\n yPosition.append(i)\n xPosition.append(j)\n xPosition = np.array(xPosition); yPosition = np.array(yPosition)\n xMin = xPosition.min(); xMax = xPosition.max()\n yMin = yPosition.min(); yMax = yPosition.max()\n #redefine the domain\n if (self.duplicateDomain == \"'no'\"):\n self.effectiveDomain = binaryImage[yMin:(yMax + 1), xMin:(xMax + 1)]\n elif (self.duplicateDomain == \"'yes'\"):\n tmpDomain = binaryImage[yMin:(yMax + 1), xMin:(xMax + 1)]\n xDirectionNum = int(input(\"Number of duplication in x direction: \"))\n yDirectionNum = int(input(\"Number of duplication in y direction: \"))\n self.effectiveDomain = self.__expandImageDomain(tmpDomain, xDirectionNum, \\\n yDirectionNum)\n yDimension, xDimension = self.effectiveDomain.shape\n self.effectiveDomain[:, 0] = 0.; self.effectiveDomain[:, -1] = 0.\n tmpBufferLayer = np.zeros(xDimension, dtype = np.float64)\n tmpBufferLayer[:] = 255.\n for i in sp.arange(40):\n if (i < 20):\n self.effectiveDomain = np.vstack((tmpBufferLayer, self.effectiveDomain))\n else:\n self.effectiveDomain = np.vstack((self.effectiveDomain, tmpBufferLayer))", "def _process(self, data: np.ndarray) -> np.ndarray:\n return data[..., 1] * self.scale", "def _preprocessing(self, input_image):\n if self.resize:\n input_image = self._np_resize_image(input_image,\n self.input_size,\n dtype='int')\n image = self._np_transpose(input_image)\n image = self._np_normalize(image)\n image = self._np_flip_n_cat(image)\n return image", "def load_images_from_folder(folder, n_cases,patch_size, mask_path, mask_type, mask_name,normalize=False, imrotate=False):\n\n# # Initialize the arrays:\n# if imrotate: # number of images is 4 * n_im\n# bigy = np.empty((n_im * 4, 64, 64))\n# bigx = np.empty((n_im * 4, 64, 64, 2))\n# else:\n# bigy = np.empty((n_im, 64, 64))\n# bigx = np.empty((n_im, 64, 64, 2))\n\n# im = 0 # image counter\n bigy = []\n filenames = os.listdir(folder)\n\n for filename in filenames[n_cases[0]:n_cases[1]]:\n if not filename.startswith('.'):\n temp = loadmat(os.path.join(folder, filename))['res']\n print temp.shape\n # Clean the STONE sense recon data\n row, col = temp.shape\n temp = np.reshape(temp, (row, col, -1))\n #valid_mask = (np.abs(np.squeeze(temp[int(row/2), int(col/2), :])) != 0)\n #final_images = temp[:,:,valid_mask]\n final_images = temp\n \n# # Resize images\n #final_images = np.abs(final_images)\n final_images_resized = np.zeros((patch_size,patch_size,final_images.shape[2]))\n for i in range(final_images.shape[2]):\n final_images_resized[:,:,i] = cv2.resize(final_images[:,:,i], (patch_size,patch_size))\n \n# # Only take a small part of the data\n# final_images = final_images[140:180,140:180,:]\n \n# # Convert to abs values\n# final_images = np.abs(final_images)\n# \n# # Normalize based on single patient case\n# final_images = (final_images - np.mean(final_images)) / np.std(final_images)\n \n# bigy_temp = cv2.imread(os.path.join(folder, filename),\n# cv2.IMREAD_GRAYSCALE)\n \n \n bigy.append(final_images_resized)\n \n bigy = np.asarray(bigy)\n cases, row, col, imgs = bigy.shape\n bigy = np.transpose(np.reshape(np.transpose(bigy, (1,2,3,0)), (row, col, -1)), (2,0,1))\n \n # convert to k-space\n imgs, row, col = bigy.shape\n bigx = np.empty((imgs, row, col, 2))\n mask = read_mask(mask_path=mask_path,mask_type=mask_type,mask_name=mask_name,patch_size=patch_size,show_image=False)\n for i in range(imgs):\n bigx[i, :, :, :] = create_x(np.squeeze(bigy[i,:,:]),mask)\n \n # convert bigx from complex to abs values\n bigy = np.abs(bigy)\n \n# im += 1\n# if imrotate:\n# for angle in [90, 180, 270]:\n# bigy_rot = im_rotate(bigy_temp, angle)\n# bigx_rot = create_x(bigy_rot, normalize)\n# bigy[im, :, :] = bigy_rot\n# bigx[im, :, :, :] = bigx_rot\n# im += 1\n\n# if imrotate:\n# if im > (n_im * 4 - 1): # how many images to load\n# break\n# else:\n# if im > (n_im - 1): # how many images to load\n# break\n\n# if normalize:\n# bigx = (bigx - np.amin(bigx)) / (np.amax(bigx) - np.amin(bigx))\n\n return bigx, bigy", "def __call__(self, in_data):\n # There are five data augmentation steps\n # 1. Color augmentation\n # 2. Random expansion\n # 3. Random cropping\n # 4. Resizing with random interpolation\n # 5. Random horizontal flipping\n if self.count % 10 == 0 and self.count % self.batchsize == 0 and self.count != 0:\n self.i += 1\n i = self.i % len(self.dim)\n self.output_shape = (self.dim[i], self.dim[i])\n # print(self.count, self.i, self.output_shape)\n self.count += 1\n\n img, bbox, label = in_data\n\n # 1. Color augmentation\n img = random_distort(img, brightness_delta=32,\n contrast_low=0.5, contrast_high=1.5,\n saturation_low=0.5, saturation_high=1.5,\n hue_delta=25)\n\n # Normalize. range is [0, 1]\n img /= 255.0\n\n _, H, W = img.shape\n scale = np.random.uniform(0.25, 2)\n random_expand = np.random.uniform(0.8, 1.2, 2)\n net_h, net_w = self.output_shape\n out_h = net_h * scale # random_expand[0]\n out_w = net_w * scale # random_expand[1]\n if H > W:\n out_w = out_h * (float(W) / H) * np.random.uniform(0.8, 1.2)\n elif H < W:\n out_h = out_w * (float(H) / W) * np.random.uniform(0.8, 1.2)\n\n out_h = int(out_h)\n out_w = int(out_w)\n\n img = resize_with_random_interpolation(img, (out_h, out_w))\n bbox = transforms.resize_bbox(bbox, (H, W), (out_h, out_w))\n\n if out_h < net_h and out_w < net_w:\n img, param = expand(img, out_h=net_h, out_w=net_w,\n fill=self.value, return_param=True)\n bbox = transforms.translate_bbox(\n bbox, y_offset=param['y_offset'], x_offset=param['x_offset'])\n else:\n out_h = net_h if net_h > out_h else int(out_h * 1.05)\n out_w = net_w if net_w > out_w else int(out_w * 1.05)\n img, param = expand(img, out_h=out_h, out_w=out_w,\n fill=self.value, return_param=True)\n bbox = transforms.translate_bbox(\n bbox, y_offset=param['y_offset'], x_offset=param['x_offset'])\n\n img, param = crop_with_bbox_constraints(\n img, bbox, return_param=True,\n crop_height=net_h, crop_width=net_w)\n bbox, param = transforms.crop_bbox(\n bbox, y_slice=param['y_slice'], x_slice=param['x_slice'],\n allow_outside_center=False, return_param=True)\n label = label[param['index']]\n\n\n # 5. Random horizontal flipping # OK\n img, params = transforms.random_flip(\n img, x_random=True, return_param=True)\n bbox = transforms.flip_bbox(\n bbox, self.output_shape, x_flip=params['x_flip'])\n\n # Preparation for Yolov2 network\n bbox[:, ::2] /= self.output_shape[0] # y\n bbox[:, 1::2] /= self.output_shape[1] # x\n\n num_bbox = len(bbox)\n len_max = max(num_bbox, self.max_target)\n\n gmap = create_map_anchor_gt(bbox, self.anchors, self.output_shape,\n self.downscale, self.n_boxes, len_max)\n\n out_bbox = np.zeros((len_max, 4), dtype='f')\n out_bbox[:num_bbox] = bbox[:num_bbox]\n out_label = np.zeros((len_max), dtype='i')\n out_label[:num_bbox] = label\n\n gmap = gmap[:self.max_target]\n out_bbox = out_bbox[:self.max_target]\n out_label = out_label[:self.max_target]\n num_array = min(num_bbox, self.max_target)\n\n img = np.clip(img, 0, 1)\n return img, out_bbox, out_label, gmap, np.array([num_array], dtype='i')", "def __init__(self, path: Path, rescale_factor: float) -> None:\n # FIXME: Handle both png and jpg here\n file_types = [\"*.jpg\", \"*.png\"]\n image_files = [str(file) for type in file_types for file in path.rglob(type)]\n\n color_images = [cv2.imread(image) for image in image_files]\n grayscale_images = [cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) for image in color_images]\n\n self.high_res_images = [rescale_image(image, rescale_factor) for image in color_images]\n self.low_res_images = [rescale_image(image, rescale_factor) for image in grayscale_images]", "def __init__(self, array: ReadCache, y_scale: float, y_spacing: float, chan_map: ChannelMap,\n nav_trace: np.ndarray, x_scale: Union[float, np.ndarray]=1.0, load_channels: Union[list, str]='all',\n max_zoom: float=120.0, units: str='V'):\n\n nchan = array.shape[0]\n if isinstance(load_channels, str) and load_channels.lower() == 'all':\n load_channels = range(nchan)\n self.chan_map = chan_map\n elif len(load_channels) == len(chan_map):\n self.chan_map = chan_map\n elif len(load_channels) < len(chan_map):\n # \"load_channels\" indexes an array of recording channels\n # that may include grounded channels or other junk.\n raise NotImplementedError('cannot yet subset data channels')\n # new_cm = ChannelMap( [chan_map[i] for i in load_channels],\n # chan_map.geometry,\n # col_major=chan_map.col_major )\n # self.chan_map = new_cm\n else:\n raise ValueError('cannot map the listed channels')\n\n self.array = array\n self.y_scale = y_scale\n if isinstance(load_channels, str) and load_channels == 'all':\n load_channels = list(range(len(array)))\n self.load_channels = load_channels\n\n # The main window + layout\n self.win = pg.GraphicsLayoutWidget(border=(10, 10, 10))\n layout = self.win.ci\n # Adding columns to layout: just titles on the top row\n layout.addLabel('Array heatmap')\n layout.addLabel('Zoomed plot')\n layout.addLabel('|')\n # Next row has 1) the heatmap image with the colorbar widget\n layout.nextRow()\n sub_layout = layout.addLayout(colspan=1)\n self.img = pg.ImageItem(image=np.random.randn(*self.chan_map.geometry) * y_spacing) # * 1e6 / 2)\n cmap = pg.colormap.get('coolwarm', source='matplotlib')\n p_img = sub_layout.addPlot()\n self.p_img = p_img\n p_img.getViewBox().setAspectLocked()\n p_img.addItem(self.img)\n p_img.hideAxis('bottom')\n p_img.hideAxis('left')\n mid_x, top_y = self.chan_map.geometry[1] / 2.0, self.chan_map.geometry[0] + 2.0\n\n # add a text label on top of the box (\"anchor\" has text box is centered on its x, y position)\n self.frame_text = pg.TextItem('empty', anchor=(0.5, 0.5), color=(255, 255, 255))\n self.frame_text.setPos(mid_x, top_y)\n # self.vb_img.addItem(self.frame_text)\n p_img.getViewBox().addItem(self.frame_text)\n p_img.getViewBox().autoRange()\n\n # colorbar\n self.cb = pg.ColorBarItem(limits=None, colorMap=cmap, hoverBrush='#EEEEFF80',\n rounding=10e-6, values=(-y_spacing, y_spacing))\n self.cb.getAxis('left').setLabel('')\n self.cb.getAxis('right').setLabel('Voltage', units='V')\n self.cb.setImageItem(self.img)\n sub_layout.addItem(self.cb)\n\n # 2) the stacked traces plot (colspan 2)\n axis = PlainSecAxis(orientation='bottom')\n self.p1 = layout.addPlot(colspan=2, row=1, col=1,\n axisItems={'bottom':axis})\n\n self.p1.enableAutoRange(axis='y', enable=True)\n self.p1.setAutoVisible(y=False)\n self.p1.setLabel('left', 'Amplitude', units=units)\n self.p1.setLabel('bottom', 'Time', units='s')\n\n # Next layout row has the navigator plot (colspan 3)\n layout.nextRow()\n\n # The navigator plot\n axis = HMSAxis(orientation='bottom')\n self.p2 = layout.addPlot(row=2, col=0, colspan=3,\n axisItems={'bottom':axis})\n self.p2.setLabel('left', 'Amplitude', units=units)\n self.p2.setLabel('bottom', 'Time')\n self.region = pg.LinearRegionItem() \n self.region.setZValue(10)\n\n # Add the LinearRegionItem to the ViewBox,\n # but tell the ViewBox to exclude this \n # item when doing auto-range calculations.\n self.p2.addItem(self.region, ignoreBounds=True)\n\n # Multiple curve set that calls up data on-demand\n self.curve_manager = CurveManager(plot=self.p1)\n curves = PlotCurveCollection(array, load_channels, x_scale, y_scale, y_spacing, False)\n x_scale = curves.dx\n # curves.setPen('w', width=1)\n self.curve_manager.add_new_curves(curves, 'all', set_source=True)\n # Set the heatmap to track these curves\n self.curve_manager.heatmap_name = 'all'\n\n initial_pts = 5000\n self.region.setRegion([0, initial_pts * x_scale])\n\n self.p1.setAutoVisible(y=True)\n self.p1.setXRange(0, initial_pts * x_scale)\n self.p1.vb.setLimits(maxXRange=max_zoom)\n\n\n # Selected curve & label set that calls up data on-demand\n labels = ['({}, {})'.format(i, j) for i, j in zip(*chan_map.to_mat())]\n selected_curves = LabeledCurveCollection(curves, labels, clickable=True)\n self.curve_manager.add_new_curves(selected_curves, 'selected')\n for text in selected_curves.texts:\n self.p1.addItem(text)\n\n # Add mean trace to bottom plot\n self.nav_trace = pg.PlotCurveItem(x=np.arange(len(nav_trace)) * x_scale, y=nav_trace)\n self.p2.addItem(self.nav_trace)\n self.p2.setXRange(0, min(5e4, len(nav_trace))*x_scale)\n self.p2.setYRange(*np.percentile(nav_trace, [1, 99]))\n \n # Set bidirectional plot interaction\n # need to hang onto references?\n self._db_cnx1 = DebounceCallback.connect(self.region.sigRegionChanged, self.update_zoom_callback)\n self._db_cnx2 = DebounceCallback.connect(self.p1.sigXRangeChanged, self.update_region_callback)\n\n # Do navigation jumps (if shift key is down)\n self.p2.scene().sigMouseClicked.connect(self.jump_nav)\n\n # Do fine interaction in zoomed plot with vertical line\n self.vline = pg.InfiniteLine(angle=90, movable=False)\n self.p1.addItem(self.vline)\n self.p1.scene().sigMouseMoved.connect(self.fine_nav)\n \n # final adjustments to rows: args are (row, stretch)\n # TODO: deprecation warning here -- do not understand why\n self.win.centralWidget.layout.setRowStretchFactor(0, 0.5)\n self.win.centralWidget.layout.setRowStretchFactor(1, 5)\n self.win.centralWidget.layout.setRowStretchFactor(2, 2.5)\n\n # a callable frame filter may be set on this object to affect frame display\n self.frame_filter = None\n\n # set up initial frame\n self.set_mean_image()", "def gen_grids(self):\n self.dx = self.grid_width / self.grid_resol\n self.dk = 2 * np.pi/self.grid_width\n self.grid_x_shifted = -self.grid_width/2 + self.dx * np.arange(0, self.grid_resol)\n self.grid_x = self.grid_x_shifted + self.grid_center\n self.grid_k = - (np.pi * self.grid_resol)/self.grid_width + self.dk * np.arange(0, self.grid_resol)\n self.grid_k = np.roll(self.grid_k, int((self.grid_resol)/2))\n self.grid_kin = np.square(self.h)/ (2*self.m) * np.square(self.grid_k)", "def normalize_images(image_sitk):\n\n max = 400\n min = -1000\n\n image_np = sitk.GetArrayFromImage(image_sitk)\n\n # Normalization\n image_np = (image_np - min)/(max - min)\n image_np[image_np > 1] = 1\n image_np[image_np < 0] = 0\n\n # Convert back to SITK\n out_image_sitk = sitk.GetImageFromArray(image_np)\n out_image_sitk.CopyInformation(image_sitk)\n\n return out_image_sitk", "def tf_pil(self):\n im_w, im_h = self.img.size\n self.orig_width = im_w\n self.orig_height = im_h\n if im_w == self.netw and im_h == self.neth:\n self.scale = None\n self.pad = None\n return self.img\n\n # Rescaling\n if im_w / self.netw >= im_h / self.neth:\n self.scale = self.netw / im_w\n else:\n self.scale = self.neth / im_h\n if self.scale != 1:\n resample_mode = Image.NEAREST #Image.BILINEAR if self.scale > 1 else Image.ANTIALIAS\n self.img = self.img.resize((int(self.scale*im_w), int(self.scale*im_h)), resample_mode)\n im_w, im_h = self.img.size\n\n if im_w == self.netw and im_h == self.neth:\n self.pad = None\n return self.img\n\n # Padding\n img_np = np.array(self.img)\n channels = img_np.shape[2] if len(img_np.shape) > 2 else 1\n pad_w = (self.netw - im_w) / 2\n pad_h = (self.neth - im_h) / 2\n self.pad = (int(pad_w), int(pad_h), int(pad_w+.5), int(pad_h+.5))\n self.img = ImageOps.expand(self.img, border=self.pad, fill=(self.fill_color,)*channels)\n return self.img", "def preprocess(path, scale=3):\n image = imread(path, is_grayscale=True)\n label_ = modcrop(image, scale)\n\n # Must be normalized\n \n label_ = label_ / 255.\n \n\n\n input_ = scipy.ndimage.interpolation.zoom(label_, (1. / scale), prefilter=False)\n input_ = scipy.ndimage.interpolation.zoom(input_, (scale / 1.), prefilter=False)\n\n return input_, label_", "def scales(self):\n scales = queryMultiAdapter((self, self.request), name=\"images\")\n return scales", "def process_files(self):\n for filename in self.temp_directory.iterdir():\n im = Image.open(str(filename))\n scaled = im.resize((640, 480))\n scaled.save(str(filename))", "def __init__(self):\n # Effective batch size\n self.BATCH_SIZE = self.IMAGES_PER_GPU * self.GPU_COUNT\n\n # Input image size\n if self.IMAGE_RESIZE_MODE == \"crop\":\n self.IMAGE_SHAPE = np.array([self.IMAGE_MIN_DIM, self.IMAGE_MIN_DIM,\n self.IMAGE_CHANNEL_COUNT])\n else:\n self.IMAGE_SHAPE = np.array([self.IMAGE_MAX_DIM, self.IMAGE_MAX_DIM,\n self.IMAGE_CHANNEL_COUNT])\n\n # Image meta data length\n # See compose_image_meta() for details\n self.IMAGE_META_SIZE = 1 + 3 + 3 + 4 + 1 + self.NUM_CLASSES", "def resizeContours(self):\n scale = 500/self.images.shape[1]\n print('Scaling images by {} for display'.format(scale))\n self.lumenCopy = (self.lumen[0][:], self.lumen[1][:])\n self.plaqueCopy = (self.plaque[0][:], self.plaque[1][:])\n self.stentCopy = (self.stent[0][:], self.stent[1][:])\n self.lumen = self.resize(self.lumen, scale)\n self.plaque = self.resize(self.plaque, scale)\n self.stent = self.resize(self.stent, scale)", "def image_cube(self, i=0, channels=None, cell=1.0, imagesize=4096, pol='i'):\n\n # select integration and reduce pol axis\n if ((pol == 'i') | (pol == 'I')):\n if len(trackdata) == 2:\n print 'Making Stokes I image as mean of two pols...'\n else:\n print 'Making Stokes I image as mean over all pols. Hope that\\'s ok...'\n tr=self.data.mean(axis=3)[0]\n elif isinstance(pol, types.IntType):\n print 'Making image of pol %d' % (pol)\n tr=self.data[0,:,:,pol]\n \n # res and size in aipy units (lambda)\n # size is pixel scale (cell size)\n size=1/n.radians(cell/60.0)\n # full field\n res=size/imagesize\n fov = n.degrees(1./res)*3600. # field of view in arcseconds\n\n if channels is None:\n channels=range(self.nchan)\n\n # form channel dependent uvw\n u_ch = n.outer(self.u[i], self.freq/self.freq_orig[0])\n v_ch = n.outer(self.v[i], self.freq/self.freq_orig[0])\n w_ch = n.outer(self.w[i], self.freq/self.freq_orig[0])\n\n # make image\n image=n.zeros((len(channels),size/res,size/res))\n for c,ic in zip(channels,xrange(len(channels))):\n print 'Creating image for channel %d...' % c\n ai = aipy.img.Img(size=size, res=res)\n uvw_new, tr_new = ai.append_hermitian( (u_ch[:,c], v_ch[:,c], w_ch[:,c]), tr[:,c])\n ai.put(uvw_new, tr_new)\n image[ic] = ai.image(center = (size/res/2, size/res/2))\n\n self.ai=ai\n self.image_center=(size/res/2, size/res/2)\n return image", "def pnet_process(self, image, height, width):\n image = cv2.resize(image, (width, height)).astype(np.float32)\n image[:, :, 0] -= self.mean[0]\n image[:, :, 1] -= self.mean[1]\n image[:, :, 2] -= self.mean[2]\n image *= self.scale_factor\n image = np.transpose(image, (2, 0, 1))\n image = image.reshape((1, image.shape[0], image.shape[1], image.shape[2]))\n return image.copy()", "def ulab_bilinear_interpolation():\n GRID_DATA[1::2, ::2] = SENSOR_DATA[:-1, :]\n GRID_DATA[1::2, ::2] += SENSOR_DATA[1:, :]\n GRID_DATA[1::2, ::2] /= 2\n GRID_DATA[::, 1::2] = GRID_DATA[::, :-1:2]\n GRID_DATA[::, 1::2] += GRID_DATA[::, 2::2]\n GRID_DATA[::, 1::2] /= 2", "def _prepare_im(self, im):\n # Train and test setups differ\n train_size = cfg.TRAIN.IM_SIZE\n if \"train\" in self._split:\n # Scale and aspect ratio then horizontal flip\n im = transforms.random_sized_crop(im=im, size=train_size, area_frac=0.08)\n im = transforms.horizontal_flip(im=im, p=0.5, order=\"HWC\")\n else:\n # Scale and center crop\n im = transforms.scale(cfg.TEST.IM_SIZE, im)\n im = transforms.center_crop(train_size, im)\n # HWC -> CHW\n im = im.transpose([2, 0, 1])\n # [0, 255] -> [0, 1]\n im = im / 255.0\n # PCA jitter\n if \"train\" in self._split:\n im = transforms.lighting(im, 0.1, _EIG_VALS, _EIG_VECS)\n # Color normalization\n im = transforms.color_norm(im, _MEAN, _SD)\n return im", "def __init__(self):\n\n self._P = 0 # number of pixels\n self._x = 0.0 # x-coordinate of center of mass, i.e.\n # the avg x-coordinate\n self._y = 0.0 # y-coordinate of center of mass, i.e.\n # the avg y-coordinate", "def simple_image(self, i=0, c=0, cell=1.0, imagesize=4096):\n\n # select integration and channel\n track=n.rollaxis(self.data[i,:,c,:].reshape((self.nbl,1,1)),2)\n\n # take mean over frequency => introduces delay beam \n truearr = n.ones( n.shape(track) )\n falsearr = 1e-5*n.ones( n.shape(track) ) # need to set to small number so n.average doesn't go NaN\n weightarr = n.where(track != 0j, truearr, falsearr) # ignore zeros in mean across channels # bit of a hack \n track = n.average(track, axis=2, weights=weightarr)\n\n # select integration and reduce pol axis\n if ((pol == 'i') | (pol == 'I')):\n if len(trackdata) == 2:\n print 'Making Stokes I image as mean of two pols...'\n else:\n print 'Making Stokes I image as mean over all pols. Hope that\\'s ok...'\n tr=track.mean(axis=0)\n elif isinstance(pol, types.IntType):\n print 'Making image of pol %d' % (pol)\n tr=track[pol]\n \n # res and size in aipy units (lambda)\n # size is pixel scale (cell size)\n size=1/n.radians(cell/60.0)\n # full field\n res=size/imagesize\n fov = n.degrees(1./res)*3600. # field of view in arcseconds\n\n # form channel dependent uvw\n u_ch = n.outer(self.u[i], self.freq/self.freq_orig[0])\n v_ch = n.outer(self.v[i], self.freq/self.freq_orig[0])\n w_ch = n.outer(self.w[i], self.freq/self.freq_orig[0])\n \n # make image\n ai = aipy.img.Img(size=size, res=res)\n uvw_new, tr_new = ai.append_hermitian( (u_ch[:,c], v_ch[:,c], w_ch[:,c]), tr)\n ai.put(uvw_new, tr_new)\n image = ai.image(center = (size/res/2, size/res/2))\n\n self.ai=ai\n self.image_center=(size/res/2, size/res/2)\n \n return image", "def __init__(self,filename) :\n # create an MImage object\n self.image=om.MImage()\n # read from file MImage should handle errors for us so no need to check\n self.image.readFromFile(filename)\n # as the MImage class is a wrapper to the C++ module we need to access data\n # as pointers, to do this use the MScritUtil helpers\n self.scriptUtilWidth = om.MScriptUtil()\n self.scriptUtilHeight = om.MScriptUtil()\n\n # first we create a pointer to an unsigned in for width and height\n widthPtr = self.scriptUtilWidth.asUintPtr()\n heightPtr = self.scriptUtilHeight.asUintPtr()\n # now we set the values to 0 for each\n self.scriptUtilWidth.setUint( widthPtr, 0 )\n self.scriptUtilHeight.setUint( heightPtr, 0 )\n # now we call the MImage getSize method which needs the params passed as pointers\n #as it uses a pass by reference\n self.image.getSize( widthPtr, heightPtr )\n # once we get these values we need to convert them to int so use the helpers\n self.m_width = self.scriptUtilWidth.getUint(widthPtr)\n self.m_height = self.scriptUtilHeight.getUint(heightPtr)\n\n # now we grab the pixel data and store\n self.charPixelPtr = self.image.pixels()\n # query to see if it's an RGB or RGBA image, this will be True or False\n self.m_hasAlpha=self.image.isRGBA()\n # if we are doing RGB we step into the image array in 3's\n # data is always packed as RGBA even if no alpha present\n self.imgStep=4\n # finally create an empty script util and a pointer to the function\n # getUcharArrayItem function for speed\n scriptUtil = om.MScriptUtil()\n self.getUcharArrayItem=scriptUtil.getUcharArrayItem\n\n self.scriptUtilWidth = om.MScriptUtil()\n self.scriptUtilHeight = om.MScriptUtil()\n\n # first we create a pointer to an unsigned in for width and height\n widthPtr = self.scriptUtilWidth.asUintPtr()\n heightPtr = self.scriptUtilHeight.asUintPtr()", "def create_scale(tiff_fname, tfw_fname, scale_len=100):\n try:\n # Acquire image info\n im = Image.open(tiff_fname)\n print(\"Image has structure: {0}\".format(im.__str__()))\n\n # Acquire world info\n dim_ns, _, __, dim_ew, easting, northing = read_tfw_file(tfw_fname)\n\n # Check if NS and EW dimensions are equal (should be)\n assert(abs(dim_ns)==abs(dim_ew))\n\n # Decide line thickness\n line_width = int(np.ceil(im.height/300))\n\n # Infer number of pixels\n px_len = scale_len/dim_ns\n\n # Create PNG line of correct length\n print(\"Creating PNG Line of length ({0} meters/{1} pixels) to be separately overlaid on TIFF\".format(scale_len, px_len))\n\n # Correct Image Dimension Order: Vertical, Horizontal, Colour Channel\n # Locate the 'leftmost' segments of each scale tick-mark at positions of 0.2*(line length) intervals = 0.2*(px_len+line_width)\n line_len = px_len + line_width\n line_arr = np.array([[ (0,255) for horizontal_index in range(int(line_len))] for vertical_index in range(line_width)])\n ticks_arr = np.array([[ (0,0) for horizontal_index in range(int(line_len))] for vertical_index in range(line_width)])\n\n # Ticks at 0.2, 0.4, 0.6, 0.8 of full horizontal length\n print(\"Creating ticks_arr structure\")\n for vertical_index in range(line_width):\n for horizontal_index in np.round(px_len*np.arange(0.0,1.00001,0.2)).astype(int): # Use px_len here so that we can loop up to line_len = px_len + line_width\n for horizontal_offset in range(line_width):\n ticks_arr[vertical_index][horizontal_index + horizontal_offset][1] = 255\n #print(\"ticks_arr[vertical_index][horizontal_index + offset]: {0}\".format(ticks_arr[vertical_index][horizontal_index+horizontal_offset]))# = (255, 255, 255, 255)\n\n # The last pixel in the line is at 4407, so we need to count backwards from there\n scale_arr = np.concatenate((ticks_arr, line_arr), axis=0)\n\n # Colour channel options and bit depth\n chan_mode = 'LA'\n bit_depth = '8'\n mode = chan_mode + ';' + bit_depth\n line_png = png.from_array(scale_arr, mode=mode)\n line_png.save(str(sys.argv[1] + \".png\"))\n print(\"Line image written to file\")\n except Exception as e:\n print(\"Exception: {0}\".format(e))", "def GetScale(self):\n ...", "def setUp(self):\n self.gray_image = np.ndarray((100, 200), dtype=np.uint8)\n self.rgb_image = np.ndarray((100, 200, 3), dtype=np.uint8)", "def __init__(self):\n self.index = 'r11_07_06c'\n self.parameters = {'run_index': 'r11_07_06c',\n 'h_1': 0.25,\n 'rho_0': 1.150,\n 'rho_1': 1.100,\n 'rho_2': 1.000,\n 'alpha': 0.5,\n 'D': 0.4,\n 'H': 0.25,\n 'sample': 1.0,\n 'perspective': 'old'}\n self.run_data = {'run_index': 'r11_07_06c',\n 'l0x': 2796,\n 'l0y': 1151,\n 'lsx': 2793,\n 'lsy': 716,\n 'j10x': 210,\n 'j10y': 1165,\n 'j1sx': 208,\n 'j1sy': 727,\n 'leakage': -76,\n 'odd_1': 'n',\n 'j20x': 2728,\n 'j20y': 1086,\n 'j2sx': 2730,\n 'j2sy': 670,\n 'r0x': 1097,\n 'r0y': 1095,\n 'rsx': 1093,\n 'rsy': 683,\n 'odd_2': 'n'}\n self.raw_image = 'tests/data/synced/r11_07_06c/cam1/img_0001.jpg'\n self.bc_image = 'tests/data/bc/r11_07_06c/cam1/img_0001.jpg'\n self.processed_path = 'tests/data/processed_ref/r11_07_06c/cam1/img_0001.jpg'" ]
[ "0.69724894", "0.6320908", "0.63168067", "0.6197034", "0.61356825", "0.60902923", "0.60631675", "0.6053572", "0.6046153", "0.60447425", "0.6000556", "0.5958693", "0.5948496", "0.59227127", "0.59221554", "0.59041744", "0.5895156", "0.5886209", "0.58859426", "0.58822227", "0.58744276", "0.58667153", "0.5836203", "0.5829602", "0.5825212", "0.5813289", "0.58034253", "0.57985586", "0.57985586", "0.57810664", "0.5765299", "0.5745297", "0.5737539", "0.57211685", "0.570293", "0.570144", "0.5670872", "0.5670872", "0.566163", "0.5660783", "0.5658621", "0.56559527", "0.56441694", "0.56266373", "0.56253564", "0.5619724", "0.56190413", "0.56168896", "0.56140834", "0.561124", "0.56106436", "0.56035584", "0.56013507", "0.55999655", "0.55930835", "0.5580298", "0.55745953", "0.5568001", "0.5567907", "0.5567838", "0.55655426", "0.55488557", "0.55477923", "0.5543372", "0.5542656", "0.5541285", "0.5532558", "0.55300653", "0.55286574", "0.55268747", "0.55265373", "0.5524979", "0.5517898", "0.5515492", "0.55120826", "0.55113715", "0.5508524", "0.55079246", "0.55071723", "0.55014867", "0.5498586", "0.5491733", "0.54814523", "0.5481076", "0.54794097", "0.5478329", "0.54776394", "0.5477306", "0.54747885", "0.547459", "0.54732007", "0.54729414", "0.5472664", "0.547021", "0.54686624", "0.546641", "0.546246", "0.5461806", "0.54615027", "0.5457919", "0.54563695" ]
0.0
-1
the details of the fluroescent particles used
def __init__(self, name=None, materials=None,diameter=None,fluorescence_spectra=None, concentration=None, electrophoretic_mobility=None, zeta=None): self.name = name self.materials=materials self.concentration=concentration self.electrophoretic_mobility=electrophoretic_mobility self.zeta=zeta self.diameter=diameter if diameter: k_b = 1.3806e-23 T=298 mu=0.001 self.diffusivity = k_b*T/(6*np.pi*mu*diameter/2) self.fluorescence_spectra=fluorescence_spectra
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, particles):\n self.particles = particles", "def __len__(self):\n return self.nparticles", "def particles(self):\n if self.data_section is None:\n return None\n data_keys = self.data_section.keys()\n if bool(data_keys)==False:\n return None\n particles_section = self.data_section.values()[0].get('particles', None)\n if particles_section is None:\n return None\n return dict(\n (id, dict(species_id=spid, position=pos))\n for id, spid, pos in particles_section.value)", "def create_particles(self):\n # xf, yf = create_fluid_with_solid_cube()\n xf, yf = create_fluid()\n uf = np.zeros_like(xf)\n vf = np.zeros_like(xf)\n m = initialize_mass(xf, yf)\n rho = initialize_density_fluid(xf, yf)\n h = np.ones_like(xf) * self.hdx * self.dx\n fluid = get_particle_array_wcsph(x=xf, y=yf, h=h, m=m, rho=rho, u=uf,\n v=vf, name=\"fluid\")\n\n xt, yt = create_boundary(self.dx / 2.)\n ut = np.zeros_like(xt)\n vt = np.zeros_like(xt)\n m = np.ones_like(xt) * 1500 * self.dx * self.dx\n rho = np.ones_like(xt) * 1000\n h = np.ones_like(xt) * self.hdx * self.dx / 2.\n tank = get_particle_array_wcsph(x=xt, y=yt, h=h, m=m, rho=rho, u=ut,\n v=vt, name=\"tank\")\n\n return [fluid, tank]", "def x_add_particles():\n particle_count_list = np.zeros(7)", "def get_fluorescence(self):\n return self._lib.StGetFluorFlg()", "def get_observations(self):\n joint_states = self.joints_state\n self.force = self.wrench_stamped.wrench.force\n self.torque = self.wrench_stamped.wrench.torque\n self.static_taxel = self.tactile_static.taxels\n# dynamic_taxel= tactile_dynamic\n\n# print(\"[force]\", self.force.x, self.force.y, self.force.z)\n# print(\"[torque]\", self.torque.x, self.torque.y, self.torque.z)\n shp_joint_ang = joint_states.position[0]\n shl_joint_ang = joint_states.position[1]\n elb_joint_ang = joint_states.position[2]\n wr1_joint_ang = joint_states.position[3]\n wr2_joint_ang = joint_states.position[4]\n wr3_joint_ang = joint_states.position[5]\n\n shp_joint_vel = joint_states.velocity[0]\n shl_joint_vel = joint_states.velocity[1]\n elb_joint_vel = joint_states.velocity[2]\n wr1_joint_vel = joint_states.velocity[3]\n wr2_joint_vel = joint_states.velocity[4]\n wr3_joint_vel = joint_states.velocity[5]\n\n q = [shp_joint_ang, shl_joint_ang, elb_joint_ang, wr1_joint_ang, wr2_joint_ang, wr3_joint_ang]\n# print(\"q(observation):\", q)\n eef_x, eef_y, eef_z = self.get_xyz(q)\n self.end_effector = self.get_xyz(q)\n eef_x_ini, eef_y_ini, eef_z_ini = self.get_xyz(self.init_joint_pose2) \n\n delta_image_r, delta_image_l = self.get_image()\n self.cnn_image_r = agent.update_cnn(delta_image_r)\n self.cnn_image_l = agent.update_cnn(delta_image_l)\n self.cnn_image_r_list = self.cnn_image_r.tolist()\n self.cnn_image_l_list = self.cnn_image_l.tolist()\n print(\"r_list\", self.cnn_image_r_list)\n print(\"l_list\", self.cnn_image_l_list)\n\n observation = []\n# rospy.logdebug(\"List of Observations==>\"+str(self.observations))\n for obs_name in self.observations:\n if obs_name == \"shp_joint_ang\":\n observation.append((shp_joint_ang - self.init_joint_pose2[0]) * self.joint_n)\n elif obs_name == \"shl_joint_ang\":\n observation.append((shl_joint_ang - self.init_joint_pose2[1]) * self.joint_n)\n elif obs_name == \"elb_joint_ang\":\n observation.append((elb_joint_ang - self.init_joint_pose2[2]) * self.joint_n)\n elif obs_name == \"wr1_joint_ang\":\n observation.append((wr1_joint_ang - self.init_joint_pose2[3]) * self.joint_n)\n elif obs_name == \"wr2_joint_ang\":\n observation.append((wr2_joint_ang - self.init_joint_pose2[4]) * self.joint_n)\n elif obs_name == \"wr3_joint_ang\":\n observation.append((wr3_joint_ang - self.init_joint_pose2[5]) * self.joint_n)\n elif obs_name == \"shp_joint_vel\":\n observation.append(shp_joint_vel)\n elif obs_name == \"shl_joint_vel\":\n observation.append(shl_joint_vel)\n elif obs_name == \"elb_joint_vel\":\n observation.append(elb_joint_vel)\n elif obs_name == \"wr1_joint_vel\":\n observation.append(wr1_joint_vel)\n elif obs_name == \"wr2_joint_vel\":\n observation.append(wr2_joint_vel)\n elif obs_name == \"wr3_joint_vel\":\n observation.append(wr3_joint_vel)\n elif obs_name == \"eef_x\":\n observation.append((eef_x - eef_x_ini) * self.eef_n)\n elif obs_name == \"eef_y\":\n observation.append((eef_y - eef_y_ini) * self.eef_n)\n elif obs_name == \"eef_z\":\n observation.append((eef_z - eef_z_ini) * self.eef_n)\n elif obs_name == \"force_x\":\n observation.append((self.force.x - self.force_ini.x) / self.force_limit1 * self.force_n)\n elif obs_name == \"force_y\":\n observation.append((self.force.y - self.force_ini.y) / self.force_limit1 * self.force_n)\n elif obs_name == \"force_z\":\n observation.append((self.force.z - self.force_ini.z) / self.force_limit1 * self.force_n)\n elif obs_name == \"torque_x\":\n observation.append((self.torque.x - self.torque_ini.x) / self.torque_limit1 * self.torque_n)\n elif obs_name == \"torque_y\":\n observation.append((self.torque.y - self.torque_ini.y) / self.torque_limit1 * self.torque_n)\n elif obs_name == \"torque_z\":\n observation.append((self.torque.z - self.torque_ini.z) / self.torque_limit1 * self.torque_n)\n elif obs_name == \"image_cnn\":\n for x in range(0, 10):\n observation.append(self.cnn_image_r_list[0][x])\n# print(\"r_list\", self.cnn_image_r_list[0][x])\n for x in range(0, 10):\n observation.append(self.cnn_image_l_list[0][x])\n# print(\"l_list\", self.cnn_image_l_list[0][x])\n elif obs_name == \"static_taxel\":\n for x in range(0, 28):\n observation.append((self.static_taxel[0].values[x] - self.static_taxel_ini[0].values[x]) * self.taxel_n)\n for x in range(0, 28):\n observation.append((self.static_taxel[1].values[x] - self.static_taxel_ini[1].values[x]) * self.taxel_n)\n# elif obs_name == \"dynamic_taxel\":\n# observation.append(dynamic_taxel[0].values) * self.taxel_n\n# observation.append(dynamic_taxel[1].values) * self.taxel_n\n else:\n raise NameError('Observation Asked does not exist=='+str(obs_name))\n\n print(\"observation\", list(map(round, observation, [3]*len(observation))))\n# print(\"observation\", observation)\n\n return observation", "def __init__(self):\n self.pidDict = { # particle_name, pid\n \"total\" : 0,\n \"charged\" : 1,\n \"charged_eta\" : 2,\n \"pion\" : 6, # sum(7, 8, -7)\n \"pion_p\" : 7,\n \"pion_0\" : 8,\n \"pion_m\" : -7,\n \"kaon\" : 11, # sum(12, 13)\n \"kaon_p\" : 12,\n \"kaon_0\" : 13,\n \"anti_kaon\" : -11, # sum(-12, -13)\n \"kaon_m\" : -12,\n \"anti_kaon_0\" : -13,\n \"nucleon\" : 16, # sum(17, 18)\n \"proton\" : 17,\n \"neutron\" : 18,\n \"anti_nucleon\" : -16, # sum(-17, -18)\n \"anti_proton\" : -17,\n \"anti_neutron\" : -18,\n \"sigma\" : 21, # sum(22, 23, 24)\n \"sigma_p\" : 22,\n \"sigma_0\" : 23,\n \"sigma_m\" : 24,\n \"anti_sigma\" : -21,\n \"anti_sigma_p\" : -22,\n \"anti_sigma_0\" : -23,\n \"anti_sigma_m\" : -24,\n \"xi\" : 26, # sum(27, 28)\n \"xi_0\" : 27,\n \"xi_m\" : 28,\n \"anti_xi\" : -26,\n \"anti_xi_0\" : -27,\n \"anti_xi_m\" : -28,\n \"lambda\" : 31,\n \"anti_lambda\" : -31,\n \"omega\" : 36,\n \"anti_omega\" : -36,\n \"phi\" : 41,\n \"rho\" : 46, #sum(47, 48, -47)\n \"rho_p\" : 47,\n \"rho_0\" : 48,\n \"rho_m\" : -47,\n \"eta\" : 51,\n \"eta_prime\" : 52,\n \"gamma\" : 61,\n \"omega782\" : 65,\n \"eta\" : 71,\n \"etap\" : 72,\n }\n\n for aParticle in self.pidDict.keys():\n if self.pidDict[aParticle]>=0:\n self.pidDict[aParticle+\"_hydro\"] = self.pidDict[aParticle]+1000\n else:\n self.pidDict[aParticle+\"_hydro\"] = self.pidDict[aParticle]-1000\n if self.pidDict[aParticle]>=0:\n self.pidDict[aParticle+\"_thermal\"] = self.pidDict[aParticle]+2000\n else:\n self.pidDict[aParticle+\"_thermal\"] = self.pidDict[aParticle]-2000\n\n self.pidDict.update({\n \"photon_total\" : 9000,\n \"photon_total_eq\" : 9001,\n \"photon_QGP_tot\" : 9002,\n \"photon_QGP_eq\" : 9003,\n \"photon_HG_tot\" : 9004,\n \"photon_HG_eq\" : 9005,\n \"direct_gamma_shortdecay_hydro\" : 9006,\n \"decay_gamma_pi0_hydro\" : 9007,\n \"decay_gamma_eta_hydro\" : 9008,\n \"decay_gamma_omega_hydro\" : 9009,\n \"decay_gamma_phi_hydro\" : 9010,\n \"decay_gamma_etap_hydro\" : 9011,\n \"decay_gamma_Sigma0_hydro\" : 9012,\n })\n\n #UrQMD pid Dictionary, name conversion defined as in binUtility\n self.UrQMDpidDict = { #particle name, UrQMD id# : isospin*2000 + pid\n 2101 : \"pion_p\",\n -1899 : \"pion_m\",\n 101 : \"pion_0\",\n 1106 : \"kaon_p\",\n -894 : \"kaon_0\",\n -1106 : \"kaon_m\",\n 894 : \"anti_kaon_0\",\n 1001 : \"proton\",\n -999 : \"neutron\",\n -1001 : \"anti_proton\",\n 999 : \"anti_neutron\",\n 2040 : \"sigma_p\",\n -1960 : \"sigma_m\",\n 40 : \"sigma_0\",\n -2040 : \"anti_sigma_p\",\n 1960 : \"anti_sigma_m\",\n -40 : \"anti_sigma_0\",\n 1049 : \"xi_0\",\n -951 : \"xi_m\",\n -1049 : \"anti_xi_0\",\n 951 : \"anti_xi_m\",\n 27 : \"lambda\",\n -27 : \"anti_lambda\",\n 55 : \"omega\",\n -55 : \"anti_omega\",\n 109 : \"phi\",\n 102 : \"eta\",\n 107 : \"eta_prime\",\n 100 : \"gamma\",\n }\n\n #pdg pid Dictionary\n self.PDGpidDict = { #pdg id#, particle name\n 211 : \"pion_p\",\n -211 : \"pion_m\",\n 111 : \"pion_0\",\n 321 : \"kaon_p\",\n 311 : \"kaon_0\",\n -321 : \"kaon_m\",\n -311 : \"anti_kaon_0\",\n 2212 : \"proton\",\n 2112 : \"neutron\",\n -2212 : \"anti_proton\",\n -2112 : \"anti_neutron\",\n 3222 : \"sigma_p\",\n 3112 : \"sigma_m\",\n 3212 : \"sigma_0\",\n -3222 : \"anti_sigma_p\",\n -3112 : \"anti_sigma_m\",\n -3212 : \"anti_sigma_0\",\n 3322 : \"xi_0\",\n 3312 : \"xi_m\",\n -3322 : \"anti_xi_0\",\n -3312 : \"anti_xi_m\",\n 3122 : \"lambda\",\n -3122 : \"anti_lambda\",\n 3334 : \"omega\",\n -3334 : \"anti_omega\",\n 333 : \"phi\",\n 221 : \"eta\",\n 331 : \"eta_prime\",\n 22 : \"gamma\",\n }\n\n #particle mass Dictionary (unit in GeV)\n self.masspidDict = {\n \"pion\" : 0.13957,\n \"pion_p\" : 0.13957,\n \"pion_0\" : 0.13498,\n \"pion_m\" : 0.13957,\n \"kaon\" : 0.49368,\n \"kaon_p\" : 0.49368,\n \"kaon_0\" : 0.49765,\n \"anti_kaon\" : 0.49368,\n \"kaon_m\" : 0.49368,\n \"anti_kaon_0\" : 0.49765,\n \"nucleon\" : 0.93827,\n \"proton\" : 0.93827,\n \"neutron\" : 0.93957,\n \"anti_nucleon\" : 0.93827,\n \"anti_proton\" : 0.93827,\n \"anit_neutron\" : 0.93957,\n \"sigma\" : 1.18937,\n \"sigma_p\" : 1.18937,\n \"sigma_0\" : 1.19264,\n \"sigma_m\" : 1.19745,\n \"anti_sigma\" : 1.18937,\n \"anti_sigma_p\" : 1.18937,\n \"anti_sigma_0\" : 1.19264,\n \"anti_sigma_m\" : 1.19745,\n \"xi\" : 1.31483,\n \"xi_0\" : 1.31483,\n \"xi_m\" : 1.32131,\n \"anti_xi\" : 1.31483,\n \"anti_xi_0\" : 1.31483,\n \"anti_xi_m\" : 1.32131,\n \"lambda\" : 1.11568,\n \"anti_lambda\" : 1.11568,\n \"omega\" : 1.67243,\n \"anti_omega\" : 1.67243,\n \"rho\" : 0.77580,\n \"rho_p\" : 0.77580,\n \"rho_0\" : 0.77580,\n \"rho_m\" : 0.77580,\n \"phi\" : 1.01946,\n \"eta\" : 0.54775,\n \"eta_prime\" : 0.95778,\n \"gamma\" : 0.0,\n }\n for aParticle in self.masspidDict.keys():\n self.masspidDict[aParticle+\"_hydro\"] = self.masspidDict[aParticle]\n self.masspidDict[aParticle+\"_thermal\"] = self.masspidDict[aParticle]\n\n # charged hadrons list\n self.charged_hadron_list = [\n \"pion_p\", \"pion_m\", \"kaon_p\", \"kaon_m\", \"proton\", \"anti_proton\",\n \"sigma_p\", \"sigma_m\", \"anti_sigma_p\", \"anti_sigma_m\",\n \"xi_m\", \"anti_xi_m\"]", "def create_flux_vector_pms_gr(self):\n soma_prod = 0\n soma_inj = 0\n lim4 = 1e-4\n store_velocity = {}\n store_flux = {}\n for primal in self.primals:\n #1\n primal_id = self.mb.tag_get_data(self.primal_id_tag, primal, flat=True)[0]\n primal_id = self.ident_primal[primal_id]\n fine_elems_in_primal = self.mb.get_entities_by_handle(primal)\n for volume in fine_elems_in_primal:\n #2\n flux = {}\n velocity = {}\n kvol = self.mb.tag_get_data(self.perm_tag, volume).reshape([3, 3])\n centroid_volume = self.mesh_topo_util.get_average_position([volume])\n z_vol = self.tz - centroid_volume[2]\n adjs_vol = self.mesh_topo_util.get_bridge_adjacencies(volume, 2, 3)\n gid_vol = self.mb.tag_get_data(self.global_id_tag, volume, flat=True)[0]\n for adj in adjs_vol:\n #3\n gid_adj = self.mb.tag_get_data(self.global_id_tag, adj, flat=True)[0]\n if adj not in fine_elems_in_primal:\n #4\n pvol = self.mb.tag_get_data(self.pms_tag, volume, flat=True)[0]\n padj = self.mb.tag_get_data(self.pms_tag, adj, flat=True)[0]\n #3\n else:\n #4\n pvol = self.mb.tag_get_data(self.pcorr_tag, volume, flat=True)[0]\n padj = self.mb.tag_get_data(self.pcorr_tag, adj, flat=True)[0]\n #3\n kadj = self.mb.tag_get_data(self.perm_tag, adj).reshape([3, 3])\n centroid_adj = self.mesh_topo_util.get_average_position([adj])\n z_adj = self.tz - centroid_adj[2]\n direction = centroid_adj - centroid_volume\n unit = direction/np.linalg.norm(direction)\n #unit = vetor unitario na direcao de direction\n uni = self.unitary(direction)\n # uni = valor positivo do vetor unitario\n kvol = np.dot(np.dot(kvol,uni),uni)\n kadj = np.dot(np.dot(kadj,uni),uni)\n keq = self.kequiv(kvol, kadj)/(self.mi)\n keq2 = keq\n keq = keq*(np.dot(self.A, uni))\n pvol2 = self.mb.tag_get_data(self.pms_tag, volume, flat=True)[0]\n padj2 = self.mb.tag_get_data(self.pms_tag, adj, flat=True)[0]\n grad_p = (padj - pvol)/float(abs(np.dot(direction, uni)))\n grad_z = (z_adj - z_vol)/float(abs(np.dot(direction, uni)))\n grad_p2 = (padj2 - pvol2)/float(abs(np.dot(direction, uni)))\n q = (grad_p)*keq - grad_z*keq*self.gama\n print((grad_p)*keq)\n print(- grad_z*keq*self.gama)\n print(q)\n print(self.store_flux_pf_gr[volume][tuple(unit)])\n print('\\n')\n import pdb; pdb.set_trace()\n\n if gid_adj > gid_vol:\n v = -((grad_p2)*keq2 - grad_z*self.gama*keq2)\n else:\n v = -((grad_p2)*keq2 - grad_z*self.gama*keq2)\n\n flux[tuple(unit)] = q\n velocity[tuple(unit)] = v\n kvol = self.mb.tag_get_data(self.perm_tag, volume).reshape([3, 3])\n\n #2\n # print(gid_vol)\n # print(velocity)\n # print('\\n')\n # import pdb; pdb.set_trace()\n store_flux[volume] = flux\n self.mb.tag_set_data(self.flux_fine_pms_tag, volume, sum(flux.values()))\n # flt = sum(flux.values())\n # if volume not in self.wells_inj and volume not in self.wells_prod:\n # lim4 = 1e-7\n # if abs(flt) > lim4:\n # print(gid_vol)\n # print(flt)\n # import pdb; pdb.set_trace()\n # flt = sum(flux.values())\n store_velocity[volume] = velocity\n\n for volume in set(self.all_fine_vols) - set(self.wells):\n gid = self.mb.tag_get_data(self.global_id_tag, volume, flat=True)[0]\n values = store_flux[volume].values()\n if sum(values) > lim4:\n print('fluxo multiescala nao esta dando conservativo')\n print('gid:{0}'.format(gid))\n print(sum(values))\n import pdb; pdb.set_trace()\n\n with open('fluxo_multiescala_gr.txt', 'w') as arq:\n for volume in self.wells:\n gid = self.mb.tag_get_data(self.global_id_tag, volume, flat= True)[0]\n values = store_flux[volume].values()\n if volume in self.wells_inj:\n soma_inj += sum(values)\n else:\n soma_prod += sum(values)\n arq.write('gid:{0} , fluxo:{1}\\n'.format(gid, sum(values)))\n arq.write('\\n')\n arq.write('soma_inj:{0}\\n'.format(soma_inj))\n arq.write('soma_prod:{0}\\n'.format(soma_prod))\n\n return store_flux", "def num_particles(self) -> Tuple[int, int]:\n return (self.num_alpha, self.num_beta)", "def x(self):\n return UnweightedSamples(self.particles)", "def init_particles(self):\n \n # Each particle is a dimension-K vector. We generate each particle \n # uniformly at random from the space [0,1]^K. \n self.Particles = np.random.uniform(0, 1, (self.Npar, self.K))\n #print(\"Particles: \", self.Particles) \n return None", "def analyze(self,event):\n print \"\\n%s event %s %s\"%('-'*10,event.event,'-'*68)\n self.nevents += 1\n leptonic = False\n particles = Collection(event,'GenPart')\n #particles = Collection(event,'LHEPart')\n seeds = [ ] # seeds for decay chain\n chain = { } # decay chain\n print \" \\033[4m%7s %8s %8s %8s %8s %8s %8s %8s %9s %10s \\033[0m\"%(\n \"index\",\"pdgId\",\"moth\",\"mothid\",\"dR\",\"pt\",\"eta\",\"status\",\"prompt\",\"last copy\")\n for i, particle in enumerate(particles):\n mothidx = particle.genPartIdxMother\n if 0<=mothidx<len(particles):\n moth = particles[mothidx]\n mothpid = moth.pdgId\n mothdR = min(9999,particle.DeltaR(moth)) #particle.p4().DeltaR(moth.p4())\n else:\n mothpid = -1\n mothdR = -1\n eta = max(-9999,min(9999,particle.eta))\n prompt = hasbit(particle.statusFlags,0)\n lastcopy = hasbit(particle.statusFlags,13)\n print \" %7d %8d %8d %8d %8.2f %8.2f %8.2f %8d %9s %10s\"%(\n i,particle.pdgId,mothidx,mothpid,mothdR,particle.pt,eta,particle.status,prompt,lastcopy)\n if abs(particle.pdgId) in [11,13,15]:\n leptonic = True\n if mothidx in chain: # add to decay chain\n chain[mothidx].append(i)\n chain[i] = [ ] # daughters\n elif abs(particle.pdgId) in self.seedpids: # save as decay chain seed\n seeds.append(i)\n chain[i] = [ ] # daughters\n if leptonic:\n self.nleptons += 1\n print parsechain(particles,seeds,chain) # print decay chain", "def num_particles(self) -> int:\n return len(self.particles)", "def num_particles(self) -> Optional[Tuple[int, int]]:\n return None", "def _nelec(self):\n pd = self.particle_distribution(self._gam * mec2)\n return pd.to(1/mec2_unit).value", "def estimate(self):\n mu = self.mean()\n var = np.average((self.particles - mu) ** 2, weights=self.weights, axis=0)\n\n return mu, var", "def engine_and_general_info(self):\r\n pos,vel,esc_part, impact, wall_collision,mom = self.box_collision_info()\r\n tot_kin, kin_er = self.kinetic_energy()\r\n esc_mom, force = self.escaped_momentum()\r\n pres = self.pressure()\r\n tot_force = self.engine_boost()\r\n #force, acceleration, fuel = self.engine_boost()\r\n\r\n print\" Engine started and launched \"\r\n\r\n print \"###############################################\"\r\n print \" Engine status (Numerical values) \"\r\n print \"-----------------------------------------------\"\r\n print \"The amount of particle escaped %g\" %(esc_part)\r\n print \"Amount of particles collided with one wall %i\" %wall_collision\r\n print \"Momentum escaped %g kgm/s\" %(esc_mom)\r\n print \"Kinetic energy per particle %gj\" %(kin_er)\r\n print \"Total kinetic energy %gj\" %(tot_kin)\r\n print \"Pressure inside the engine is %f\" %(pres)\r\n print \"momentum on the wall %g\" %(mom)\r\n print \"total force %g\"%(tot_force)\r\n print \"###############################################\"\r\n print \" Launch info \"\r\n print \"-----------------------------------------------\"\r\n #print \"acceleration per engine %g m/s^2\" %(acceleration)\r\n #print \"force per engine %g N \" %(force)\r\n print \"################################################\"", "def getTallyParticles(self):\n\n\t\tparticleNames = []\n\n\t\tif self.typeNumber > 0:\n\t\t\tparticleNames.append(particleListShort[self.typeNumber]) \n\t\telse:\n\t\t\tfor i,name in enumerate(self.particleList):\n\t\t\t\ttry:\n\t\t\t\t\tif self.tallyParticles[i] == 1:\n\t\t\t\t\t\tparticleNames.append(self.particleList[i])\n\t\t\t\texcept:\n\t\t\t\t\tpass # For some reasons there can be less than 35 particles listed. Skip in case.\n\t\treturn particleNames", "def box_collision_info(self):\r\n position = np.zeros((self.Npart,3)) # antall part, dim, iterasjoner\r\n position[:,:] = np.random.uniform(0,1e-6, size = (self.Npart,3))\r\n velocity = np.zeros((self.Npart,3))\r\n velocity[:,:] = np.random.normal(0,self.sigma,size = (self.Npart,3))\r\n\r\n part_collided = 0\r\n part_escaped = 0\r\n momentum = 0\r\n\r\n print 'engine started'\r\n for i in xrange(1,self.n):\r\n #collision\r\n position += velocity*dt\r\n l_hole = position[:,0:2] > self.L/4\r\n h_hole = position[:,0:2] < (3*self.L)/4\r\n pos_xy = np.logical_and(l_hole, h_hole)\r\n pos_xy = np.logical_and(pos_xy[:,0], pos_xy[:,1])\r\n pos_z = position[:,2] < 0\r\n esc_part = np.logical_and(pos_z, pos_xy)\r\n\r\n #velocity[esc_part] = velocity[esc_part]\r\n part_escaped += np.sum(esc_part)\r\n\r\n for j in xrange(0,3):\r\n impact_wall_pos = np.logical_and(position[:,j] > 0,\r\n position[:,j] < self.L)\r\n velocity[np.logical_not(impact_wall_pos),j] = -velocity[\r\n np.logical_not(impact_wall_pos),j]\r\n\r\n\r\n if j == 0:\r\n part_collided += np.sum(np.logical_not(impact_wall_pos),j)\r\n momentum += np.sum(2*self.m*abs(velocity[np.logical_not(\r\n impact_wall_pos),j]))\r\n\r\n\r\n\r\n position[position < 0] = 0\r\n position[position >self.L] = self.L\r\n\r\n particle_collided = part_collided/2\r\n return position, velocity,part_escaped, impact_wall_pos, particle_collided, momentum", "def get_variables(self, z0, u_inf):\n # Get the ambient data from the CTD profile\n Ta, Sa, P = self.profile.get_values(z0, ['temperature', 'salinity',\n 'pressure'])\n rho = seawater.density(Ta, Sa, P)\n \n # Compute the properties of each dispersed-phase particle\n us = np.zeros(len(self.particles))\n rho_p = np.zeros(len(self.particles))\n m_p = np.zeros(len(self.particles))\n B_p = np.zeros(len(self.particles))\n for i in range(len(self.particles)):\n m0 = self.particles[i].m0\n T0 = self.particles[i].T0\n m_p[i] = np.sum(m0) * self.particles[i].nb0\n if m_p[i] > 0.:\n # Particles exist, get properties. Make sure the algorithm \n # uses the dirty bubble properties since this is supposed\n # to be the rise velocity averaged over the whole plume.\n us[i], rho_p[i]= self.particles[i].properties(m0, T0, P, Sa, \n Ta, np.inf)[0:2]\n B_p[i] = (rho - rho_p[i]) / rho * 9.81 * (m_p[i] / rho_p[i])\n else:\n # Particles dissolved, set to ambient conditions\n us[i] = 0.\n rho_p[i] = rho\n B_p[i] = 0.\n \n # Select the correct slip velocity\n u_slip = us[0]\n for i in range(len(self.particles) - 1):\n if B_p[i+1] > B_p[i]:\n u_slip = us[i+1]\n \n # Compute the total buoyancy flux\n B = np.sum(B_p)\n \n # Get the ambient buoyancy frequency\n N = self.profile.buoyancy_frequency(z0)\n \n # Return the governing parameters\n return (B, N, u_slip, u_inf)", "def energy(self):\n energy = -0.5*np.sum(self.phi)+0.5*np.sum(self.mass*np.sqrt(self.particles.momentum[:,0]**2+self.particles.momentum[:,1]**2)**2)\n return energy", "def __init__(self,E,px,py,pz):\n Particle.__init__(self)\n self.E=float(E)\n self.px=float(px)\n self.py=float(py)\n self.pz=float(pz)\n self.cal_pt()\n self.cal_phi()\n self.cal_eta()\n #self.cal_mass()\n #print self.E,self.px,self.py,self.pz\n #print self.pt,self.phi,self.eta", "def __len__(self):\n return len(self.particles.position[:,0])", "def flux_hack(self):\r\n return self.planes[1].galaxies[0].light_profiles[0].flux", "def analyze(self, event):\n\t\tJets = Collection(event, \"Jet\")\n\t\tjets = [j for j in Jets if j.pt >= 20]\n\t\tgenpart = Collection(event, \"GenPart\")\n\t\tgenParts = [l for l in genpart]\n\t\t# get the particles when they have a mother ---> getting the daughters only \n\t\tdaughters = [l for l in genpart if l.genPartIdxMother>= 0 ]\n\t\tevent.nIsr = 0\n\t\tfor jet in jets:\n\t\t\tif jet.pt <30.0: continue\n\t\t\tif abs(jet.eta )>2.4: continue\n\t\t\tmatched = False\n\t\t\tfor i,mc in enumerate(genParts):\n\t\t\t\t# if it's matched doesn't make sence to correct it\n\t\t\t\tif matched: break\n\t\t\t\t# check if it's quark from top or not\n\t\t\t\tif (mc.status!=23 or abs(mc.pdgId)>5): continue\n\t\t\t\tmomid = abs(genParts[mc.genPartIdxMother].pdgId)\n\t\t\t\tif not (momid==6 or momid==23 or momid==24 or momid==25 or momid>1e6): continue\n\t\t\t\tfor idau in range(len(daughters)) :\n\t\t\t\t\t# look for the products of the jet and match jet with gen daughters of the quark \n\t\t\t\t\tif i == daughters[idau].genPartIdxMother:\n\t\t\t\t\t\tdR = math.sqrt(deltaR2(jet.eta,jet.phi, daughters[idau].eta,daughters[idau].phi))\n\t\t\t\t\t\tif dR<0.3:\n\t\t\t\t\t\t\t# if matched escape\n\t\t\t\t\t\t\tmatched = True\n\t\t\t\t\t\t\tbreak\n\t\t\t# if not matched correct it \n\t\t\tif not matched:\n\t\t\t\tevent.nIsr+=1\n\t\t# fill the output with nisr\n\t\tself.out.fillBranch(\"nIsr\",event.nIsr)\n\t\tnISRweight = 1\n\t\t#https://indico.cern.ch/event/592621/contributions/2398559/attachments/1383909/2105089/16-12-05_ana_manuelf_isr.pdf\n\t\tISRweights_Mar17 = { 0: 1, 1 : 0.920, 2 : 0.821, 3 : 0.715, 4 : 0.662, 5 : 0.561, 6 : 0.511}\n\t\tISRweights_ICHEP16 = { 0: 1, 1 : 0.882, 2 : 0.792, 3 : 0.702, 4 : 0.648, 5 : 0.601, 6 : 0.515}\n\t\tISRweightssyst_Mar17 = { 0: 0.0, 1 : 0.040, 2 : 0.090, 3 : 0.143, 4 : 0.169, 5 : 0.219, 6 : 0.244}\n\t\tISRweightssyst_ICHEP16 = { 0: 0.0, 1 : 0.059, 2 : 0.104, 3 : 0.149, 4 : 0.176, 5 : 0.199, 6 : 0.242}\n\t\t\n\t\tif self.ICHEP16 == True and self.Mar17 == False:\n\t\t\tISRweights = ISRweights_ICHEP16\n\t\t\tISRweightssyst = ISRweightssyst_ICHEP16\n\t\t\t\n\t\telif self.ICHEP16 == False and self.Mar17 == True: \n\t\t\tISRweights = ISRweights_Mar17\n\t\t\tISRweightssyst = ISRweightssyst_Mar17\n\t\t\t\n\t\tnISRforWeights = int(event.nIsr)\n\t\tif event.nIsr > 6:\n\t\t\tnISRforWeights = 6\n\t\tC_ISR = 1.090\n\t\tC_ISR_up = 1.043\n\t\tC_ISR_down = 1.141\n\t\tnISRweight = C_ISR * ISRweights[nISRforWeights]\n\t\tnISRweightsyst_up = C_ISR_up * (ISRweights[nISRforWeights] + ISRweightssyst[nISRforWeights])\n\t\tnISRweightsyst_down = C_ISR_down * (ISRweights[nISRforWeights] - ISRweightssyst[nISRforWeights])\n\t\t\n\t\tself.out.fillBranch(\"nISRweight\",nISRweight)\n\t\tself.out.fillBranch(\"nISRttweightsyst_up\",nISRweightsyst_up)\n\t\tself.out.fillBranch(\"nISRttweightsyst_down\",nISRweightsyst_down)\n\n\n # ------ Forwarded Message --------\n # Subject: Re: question for ttbar ISR reweighting\n # Date: Sat, 14 Jan 2017 20:24:14 +0100\n # From: Manuel Franco Sevilla <manuel.franco.sevilla@cern.ch>\n #The [Nom, Up, Down] values we find for the events with Nisr = 0 are:\n #[1.090, 1.043, 1.141]: TTJets_Tune\n #[1.096, 1.046, 1.151]: TTJets_SingleLeptFromT\n #[1.116, 1.055, 1.185]: TTJets_DiLept\n\t\t\n\t\t\n\t\treturn True", "def info(self):\n\n print(\"pupil file =\", self.pupil_file)\n print(\"phase file =\", self.phase_file)\n print(\"wavelengths and weights =\")\n for i in range(len(self.filter[0])):\n print(\" %10.5f %6.4f\" % (self.filter[0][i], self.filter[1][i]))\n print(\"pupil diameter (meters) =\", self.D)\n if self.oversample == 2:\n print(\"oversampling factor = 2 (Nyquist sampling)\")\n else:\n r = float(self.oversample) / 2.\n print(\"oversampling factor = %d (%g * Nyquist sampling)\" % \\\n (self.oversample, r))\n if self.type == SINGLE_PREC:\n print(\"computations will use single precision\")\n else:\n print(\"computations will use double precision\")\n print(\"size of output image =\", self.output_size)\n if self.cdelt is not None:\n print(\"output pixel size (arcsec) =\", self.cdelt / ARCSECtoDEGREES)\n if self.output_written:\n print(\"The computed PSF has been written to the output file.\")\n else:\n print(\"The output file has not been written yet.\")", "def main():\n \n # Particle in SHO - c.f. Mocz & Succi (2015) Fig. 2\n # parameters\n n = 100 # number of particles\n dt = 0.02 # timestep\n nt = 100 # number of timesteps\n nt_setup = 400 # number of timesteps to set up simulation\n n_out = 25 # plot solution every nout steps\n b = 4 # velocity damping for acquiring initial condition\n m = 1/n # mass of SPH particle ( m * n = 1 normalizes |wavefunction|^2 to 1)\n h = 40/n # smoothing length\n t = 0. # time\n\n # plot potential\n xx = np.linspace(-4.0, 4.0, num=400)\n xx = np.reshape(xx,(xx.size,1))\n fig = plt.plot(xx, 0.5*xx**2, linewidth=5, color=[0.7, 0.7, 0.9])\n \n # initialize\n x = np.linspace(-3.0, 3.0, num=n)\n x = np.reshape(x,(n,1))\n u = np.zeros((n,1))\n \n rho = density( x, m, h )\n P = pressure( x, rho, m, h )\n a = acceleration( x, u, m, rho, P, b, h )\n\n # get v at t=-0.5*dt for the leap frog integrator using Euler's method\n u_mhalf = u - 0.5 * dt * a\n\n # main loop (time evolution)\n for i in np.arange(-nt_setup, nt): # negative time (t<0, i<0) is used to set up initial conditions\n\n # leap frog\n u_phalf = u_mhalf + a*dt\n x = x + u_phalf*dt\n u = 0.5*(u_mhalf+u_phalf)\n u_mhalf = u_phalf\n if (i >= 0):\n t = t + dt\n print(\"%.2f\" % t)\n \n if (i == -1 ): # switch off damping before t=0\n u = np.zeros((n,1)) + 1.0\n u_mhalf = u\n b = 0 # switch off damping at time t=0\n \n # update densities, pressures, accelerations\n rho = density( x, m, h )\n P = pressure( x, rho, m, h )\n a = acceleration( x, u, m, rho, P, b, h)\n \n # plot solution every n_out steps\n if( (i >= 0) and (i % n_out) == 0 ):\n xx = np.linspace(-4.0, 4.0, num=400)\n xx = np.reshape(xx,(xx.size,1))\n rr = probeDensity(x, m, h, xx)\n rr_exact = 1./np.sqrt(np.pi) * np.exp(-(xx-np.sin(t))**2/2.)**2\n fig = plt.plot(xx, rr_exact, linewidth=2, color=[.6, .6, .6])\n fig = plt.plot(xx, rr, linewidth=2, color=[1.*i/nt, 0, 1.-1.*i/nt], label='$t='+\"%.2f\" % t +'$')\n # plot the t<0 damping process for fun\n if( i==-nt_setup or i==-nt_setup*3/4 or i==-nt_setup/2 ):\n xx = np.linspace(-4.0, 4.0, num=400)\n xx = np.reshape(xx,(xx.size,1))\n rr = probeDensity(x, m, h, xx)\n fig = plt.plot(xx, rr, linewidth=1, color=[0.9, 0.9, 0.9])\n \n plt.legend()\n plt.xlabel('$x$')\n plt.ylabel('$|\\psi|^2$')\n plt.axis([-2, 4, 0, 0.8])\n plt.savefig('solution.pdf', aspect = 'normal', bbox_inches='tight', pad_inches = 0)\n plt.close()", "def spring_particle(name, num_trajectories, NUM_PARTS, T_max, dt, sub_sample_rate, noise_std, seed):\n num_particles = NUM_PARTS\n collater = {}\n\n def diffeq_hyper(t, q, k, m, nparts):\n num_particles = nparts\n vels = q[2 * num_particles:]\n xs = q[:2 * num_particles]\n xs = xs.reshape(-1, 2)\n forces = np.zeros(xs.shape)\n new_k = np.repeat(k, num_particles) * np.tile(k, num_particles)\n new_k = np.repeat(new_k, 2).reshape(-1, 2)\n dx = np.repeat(xs, num_particles, axis=0) - np.tile(xs, (num_particles, 1))\n resu = -new_k * dx\n forces = np.add.reduceat(resu, np.arange(0, nparts * nparts, nparts)).ravel()\n\n return np.concatenate([vels / np.repeat(m, 2), forces]).ravel()\n\n def hamiltonian(vec, m, k, num_particles):\n num_particles = num_particles\n x = vec[:num_particles * 2]\n p = vec[2 * num_particles:]\n xs = x.reshape(-1, 2)\n ps = p.reshape(-1, 2)\n U1 = 0\n K = 0\n for i in range(num_particles):\n for j in range(i + 1, num_particles):\n U1 += .5 * k[i] * k[j] * ((xs[i] - xs[j]) ** 2).sum()\n K += 0.5 * ((ps[i] ** 2).sum()) / m[i]\n return K, U1\n\n theta = []\n dtheta = []\n energy = []\n mass_arr = []\n ks_arr = []\n lagrangian = []\n np.random.seed(seed)\n\n for traj in range(num_trajectories):\n ks = np.ones(NUM_PARTS)#np.random.uniform(.5, 1, size=(NUM_PARTS))\n positions = np.random.uniform(-1, 1, size=(NUM_PARTS, 2))\n velocities = np.random.uniform(-3, 3, size=(NUM_PARTS, 2))\n masses = np.ones(NUM_PARTS)#np.random.uniform(0.1, 1, size=NUM_PARTS)\n momentum = np.multiply(velocities, np.repeat(masses, 2).reshape(-1, 2))\n q = np.concatenate([positions, momentum]).ravel()\n qnrk = rk(lambda t, y: diffeq_hyper(t, y, ks, masses, num_particles), (0, T_max), q,\n t_eval=np.arange(0, T_max, dt),\n rtol=1e-12, atol=1e-12, method='DOP853')\n accum = qnrk.y.T\n ssr = int(sub_sample_rate / dt)\n accum = accum[::ssr]\n daccum = np.array([diffeq_hyper(0, accum[i], ks, masses, num_particles) for i in range(accum.shape[0])])\n energies = []\n lags = []\n for i in range(accum.shape[0]):\n ktmp, utmp = hamiltonian(accum[i], masses, ks, NUM_PARTS)\n energies.append(ktmp + utmp)\n lags.append(ktmp - utmp)\n\n accum += np.random.randn(*accum.shape) * noise_std\n daccum += np.random.randn(*daccum.shape) * noise_std\n\n theta.append(accum)\n dtheta.append(daccum)\n energy.append(energies)\n mass_arr.append(masses)\n ks_arr.append(ks)\n lagrangian.append(lags)\n\n collater['x'] = np.concatenate(theta)\n collater['dx'] = np.concatenate(dtheta)\n collater['energy'] = np.concatenate(energy)\n collater['lagrangian'] = np.concatenate(lagrangian)\n\n collater['mass'] = mass_arr\n collater['ks'] = ks_arr\n\n f = open(name + \".pkl\", \"wb\")\n pickle.dump(collater, f)\n f.close()\n\n return collater", "def test_flma(self):\n self.create_sample_data_set_dir(\"node11p1.dat\", TELEM_DIR, \"node59p1.dat\")\n self.assert_initialize()\n result = self.data_subscribers.get_samples(DataParticleType.METADATA_TELEMETERED,1,30)\n result = self.data_subscribers.get_samples(DataParticleType.SAMPLE_TELEMETERED,5,30)", "def particleCharge(self):\n return self.params['particleCharge']", "def analyze(self, event):\n self.orig_jet_coll = Collection(event, \"Jet\" ) ## we will get Jet_pt_nom / Jet_phi\n self.jet_coll = Collection(event, self.jetColl)\n \n nJet=len(self.jet_coll)##To update pt of cleanjetcollection\n self.corrPTs=[]\n\n\n #### MET_px = -sum( particle_px ) \n ######MET_px_new = -sum(particle_px_new) = -sum( particle_px - particle_px + particle_px_new )\n ######## = MET_px -sum(particle_px_new - particle_px)\n ### => MET_px_new = MET_px - sum( dpx )\n self.JetPxSum_old=0\n self.JetPySum_old=0\n self.GetJetPxPySum_old() ## Set self.JetPxSum_old & self.JetPySum_old\n \n self.JetPxSum_new=0\n self.JetPySum_new=0\n self.GetJetPxPySum_new()\n \n dpx = self.JetPxSum_new - self.JetPxSum_old\n dpy = self.JetPySum_new - self.JetPySum_old\n\n\n for METtype in self.METLIST:\n origMET = Object(event, METtype)\n origMET_pt = origMET.pt\n origMET_phi = origMET.phi\n origMET_px = origMET_pt*math.cos(origMET_phi)\n origMET_py = origMET_pt*math.sin(origMET_phi)\n\n \n\n newMET_px = origMET_px - dpx\n newMET_py = origMET_py - dpy\n newMET_pt = math.sqrt(newMET_px**2 + newMET_py**2)\n newMET_phi = math.atan2(newMET_py,newMET_px)\n\n #if METtype==\"PuppiMET\":\n # print \"origMET_pt = \",origMET_pt\n # print \"newMET_pt = \",newMET_pt\n # print \"origMET_phi = \",origMET_phi\n # print \"newMET_phi = \",newMET_phi\n\n self.out.fillBranch(METtype+'_pt',newMET_pt)\n self.out.fillBranch(METtype+'_phi',newMET_phi)\n\n if nJet != len(self.corrPTs):\n print \"!!!!!!![jhchoi]ERROR, len of cleanjet is not matched bf/after JEC\"\n\n self.out.fillBranch(self.jetColl+'_pt',self.corrPTs)\n \n return True", "def __init__(self, temperatures, daytypes, consumptions, nb_days, nb_particles, sigma2, kappa, u_heat):\n self.temperatures = temperatures\n self.daytypes = daytypes\n self.consumptions = consumptions\n self.nb_days = nb_days\n self.nb_particles = nb_particles\n self.sigma2 = sigma2\n self.kappa = kappa\n self.u_heat = u_heat\n #Var init\n self.s = np.zeros((nb_days, nb_particles)) \n self.g_heat = np.zeros((nb_days, nb_particles))\n #sigma_s and sigma_g are fixed\n self.sigma_s_star_2 = np.zeros((1, nb_particles)) \n self.sigma_g_star_2 = np.zeros((1, nb_particles))\n self.x_season = np.zeros((1, nb_particles))\n self.x_heat = np.zeros((1, nb_particles))\n self.x = np.zeros((1, nb_particles))\n self.w = np.zeros((1, nb_particles))", "def metallicity(method, emsystem):\n if method == 'PG16':\n # Requires Hbeta, [OII], [OIII], [NII], [SII]\n R2 = (emsystem.get_emline('[OII] 3726').attrib['flux'] +\n emsystem.get_emline('[OII] 3729').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n R3 = (emsystem.get_emline('[OIII] 4959').attrib['flux'] +\n emsystem.get_emline('[OIII] 5007').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n N2 = (emsystem.get_emline('[NII] 6548').attrib['flux'] +\n emsystem.get_emline('[NII] 6584').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n S2 = (emsystem.get_emline('[SII] 6716').attrib['flux'] +\n emsystem.get_emline('[SII] 6731').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n # Proceed\n if np.log10(N2) < -0.6:\n r_val = 7.932 + 0.944*np.log10(R3/R2) + 0.695*np.log10(N2) + \\\n ((0.97 - 0.291*np.log10(R3/R2)) - 0.019*np.log10(N2))*np.log10(R2)\n\n s_val = 8.072 + 0.789*np.log10(R3/S2) + 0.726*np.log10(N2) + \\\n (1.069 - 0.170*np.log10(R3/S2) +0.022*np.log10(N2))*np.log10(S2)\n else:\n r_val = 8.589 + 0.022*np.log10(R3/R2) + 0.399*np.log10(N2) + \\\n (-0.137 + 0.164*np.log10(R3/R2) + 0.589*np.log10(N2))*np.log10(R2)\n\n s_val = 8.424 + 0.030*np.log10(R3/S2) + 0.751*np.log10(N2) + \\\n (-0.349 + 0.182*np.log10(R3/S2) +0.508*np.log10(N2))*np.log10(S2)\n return r_val.decompose().value, s_val.decompose().value", "def particle(self) -> Particle:\n return Particle()", "def particle(self) -> Particle:\n return Particle()", "def __init__(self,nparticles,size, mass=1, G=1, boundary_periodic = True,early_universe=False, softner=1, position = [], momentum = []):\n self.softner = softner\n self.G = G\n self.boundary_periodic = boundary_periodic\n self.nparticles = nparticles\n self.size = size\n self.mass = np.ones(nparticles)*mass\n #If the boundary condition are not periodic, the grid_size is double but particle kept in the first quadrant so \n #that the particles cannot feel the effect of the particles closed to the opposite boundary when we take the convolution\n if boundary_periodic==True:\n self.grid_size = size\n else:\n self.grid_size = 2*size\n #Initialize the partticle grid\n # if early_universe == True:\n # self.ptclgrid.early_universe_grid(softner)\n # self.mass = self.ptclgrid.mass\n self.ptclgrid = ParticleGrid(nparticles,self.grid_size,self.size, mass=self.mass, soft=softner, early_universe=early_universe)\n #If initial position are givem, place the particle to the right place on the grid\n if len(position) != 0:\n self.ptclgrid.update_position(position, mass)\n\n self.grid = self.ptclgrid.grid\n self.grid_pos = self.ptclgrid.grid_pos\n x0,y0 = self.ptclgrid.position.transpose()\n initial_condition = np.array([x0,y0, self.mass]).transpose()\n #Initialize the Particle list containing the position and momentum of the particles\n self.particles = ParticleList(nparticles, initial_condition)\n #If initial mometa are given, intialize it \n if len(momentum) != 0:\n self.particles.momentum = momentum\n #Computes the green function on the grid\n self.compute_green_function(self.grid_size)\n #Initialize the array with the acceleration of the particles\n self.acc = np.zeros((len(self),2))", "def __len__(self):\n return self.params['nbins_sfh']+2 # z, mass, met, + logsfr_ratios", "def __len__(self):\n return self.params['nbins_sfh']+2 # z, mass, met, + logsfr_ratios", "def __len__(self):\n return self.params['nbins_sfh']+2 # z, mass, met, + logsfr_ratios", "def __len__(self):\n return self.params['nbins_sfh']+2 # z, mass, met, + logsfr_ratios", "def process(self, maxEv = -1):\n\n # Create the tree branches and associate them to the particle variables\n \n self.tree.Branch(\"Muon_pt\", self.Muon_pt)\n self.tree.Branch(\"Muon_eta\", self.Muon_eta)\n self.tree.Branch(\"Muon_px\", self.Muon_px)\n self.tree.Branch(\"Muon_py\", self.Muon_py)\n self.tree.Branch(\"Muon_pz\", self.Muon_pz)\n self.tree.Branch(\"Muon_energy\", self.Muon_energy)\n self.tree.Branch(\"Muon_isGlobalMuon\", self.Muon_isGlobalMuon)\n self.tree.Branch(\"Muon_isTrackerMuon\", self.Muon_isTrackerMuon)\n self.tree.Branch(\"Muon_isStandAloneMuon\", self.Muon_isStandAloneMuon)\t\n self.tree.Branch(\"Muon_dB\", self.Muon_dB)\n self.tree.Branch(\"Muon_edB\", self.Muon_edB)\n self.tree.Branch(\"Muon_isolation_sumPt\", self.Muon_isolation_sumPt)\n self.tree.Branch(\"Muon_isolation_emEt\", self.Muon_isolation_emEt)\n self.tree.Branch(\"Muon_isolation_hadEt\", self.Muon_isolation_hadEt)\n self.tree.Branch(\"Muon_numberOfValidHits\", self.Muon_numberOfValidHits)\n self.tree.Branch(\"Muon_normChi2\", self.Muon_normChi2)\n self.tree.Branch(\"Muon_charge\", self.Muon_charge)\n self.tree.Branch(\"Muon_distance\",self.Muon_distance)\n\n self.tree.Branch(\"Muon_numOfMatches\", self.Muon_numOfMatches)\n #self.tree.Branch(\"Muon_deltaPt\", self.Muon_delaPt)\n self.tree.Branch(\"Muon_NValidHitsSATk\", self.Muon_NValidHitsSATK)\n #self.tree.Branch(\"Muon_NValidHitsInTk\", self.Muon_NValidHitsInTk)\n #self.tree.Branch(\"Muon_NValidPixelHitsnTk\", self.Muon_NValidPixelHitsnTk)\n\n # Loop the events and populate the variables\n for N, event in enumerate(self.events):\n\n if maxEv >= 0 and (N + 1) >= maxEv:\n break\n\n # Do this for each event:\n muons = self.getMuons(event)\n vertex = self.getVertex(event)\n self.Vertex_Z = vertex.z()\n\n #Do this for each particle in the event\n for i, muon in enumerate(muons): \n\n self.Muon_pt.push_back(muon.pt())\n self.Muon_eta.push_back(muon.eta())\n self.Muon_px.push_back(muon.px())\n self.Muon_py.push_back(muon.py())\n self.Muon_pz.push_back(muon.pz())\n self.Muon_energy.push_back(muon.energy())\n self.Muon_isGlobalMuon.push_back(muon.isGlobalMuon())\n self.Muon_isTrackerMuon.push_back(muon.isTrackerMuon())\n self.Muon_isStandAloneMuon.push_back(muon.isStandAloneMuon())\n self.Muon_dB.push_back(muon.dB(muon.PV3D))\n self.Muon_edB.push_back(muon.edB(muon.PV3D))\n self.Muon_isolation_sumPt.push_back(muon.isolationR03().sumPt)\n self.Muon_isolation_emEt.push_back(muon.isolationR03().emEt)\n self.Muon_isolation_hadEt.push_back(muon.isolationR03().hadEt)\n self.Muon_charge.push_back(muon.charge())\n \n # DISTANCE\n self.Muon_distance.push_back(abs(muon.vertex().z()-self.Vertex_Z))\t\t\n \n self.Muon_numOfMatches.push_back(muon.numberOfMatches())\n \n \n if not muon.globalTrack().isNull():\n\n self.Muon_numberOfValidHits.push_back(muon.numberOfValidHits())\n self.Muon_normChi2.push_back(muon.normChi2())\n\n # Next lines does not work -> The function numberOfValidTrackerHits does not exist for this DATA\n # if not muon.innerTrack().isNull():\n # print muon.innerTrack()\t\n # #self.Muon_NValidHitsInTk.push_back(muon.innerTrack().hitPattern())\n # self.Muon_NValidHitsInTk.push_back(muon.innerTrack().hitPattern().numberOfValidTrackerHits())\n # self.Muon_NValidPixelHitsnTk.push_back(muon.innerTrack().hitPattern().numberOfValidPixelHits()) \n\n else:\n self.Muon_numberOfValidHits.push_back(-999)\n self.Muon_normChi2.push_back(-999)\n\n if not muon.standAloneMuon().isNull():\n self.Muon_NValidHitsSATK.push_back(muon.standAloneMuon().hitPattern().numberOfValidMuonHits())\n\n #Fill the tree\n self.tree.Fill()\n\n #Clear the variables\n self.Muon_pt.clear()\n self.Muon_eta.clear()\n self.Muon_px.clear()\n self.Muon_py.clear()\n self.Muon_pz.clear()\n self.Muon_energy.clear()\n self.Muon_isStandAloneMuon.clear()\n self.Muon_isGlobalMuon.clear()\n self.Muon_isTrackerMuon.clear()\n self.Muon_dB.clear()\n self.Muon_edB.clear()\n self.Muon_isolation_sumPt.clear()\n self.Muon_isolation_emEt.clear()\n self.Muon_isolation_hadEt.clear()\n self.Muon_charge.clear()\n\n self.Muon_numOfMatches.clear()\n\n self.Muon_numberOfValidHits.clear()\n self.Muon_normChi2.clear()\n\n self.Muon_distance.clear()\n self.Vertex_Z = 0.\n\n self.Muon_NValidHitsSATK.clear()\n #self.Muon_NValidHitsInTk.clear()\n #self.Muon_NValidPixelHitsnTk.clear()\n\n # Write the tree in the .root file and close it\n print \"Write\"\n self.f.Write()\n self.f.Close()", "def turbulence(self, particles, current_step=0):\n\n for i in range(len(particles)):\n if i % 6 == 0:\n mutated = self.mutator.mutate(particles[i])\n particles[i].vector = copy(mutated.vector)", "def information_content(self):\n ic = 0\n for row in self.pwm:\n ic += 2.0 + np.sum([row[x] * log(row[x])/log(2) for x in range(4) if row[x] > 0])\n return ic", "def simulation(self):\n\n t_max = 3\n if self.meas_selected_series == 1:\n particle_density_number = self.particle_density_number\n else: # series 2:\n factors = 4/np.array([4, 6, 8, 10, 12, 14, 16, 18])\n factor = factors[(self.meas_selected_number-1)]\n particle_density_number = self.particle_density_number * factor\n\n p_i, p_f = toolbox_2.get_pressure_change(self.measurement)\n size, time2 = toolbox_2.simulate_extinction(self.particle_size_number * 1e-9,\n p_i, p_f,\n particle_density_number * 1e10,\n t_max, self.saturation_percentage / 100)\n smallest_growing_particle = toolbox_2.minimum_particle_diameter(p_i, p_f, self.saturation_percentage / 100)\n # short print:\n # print(\"M:\", self.meas_selected_number, \", \", round((p_i - p_f) / 1000, 3), \"kPa\", \", \", self.saturation_percentage, \"%\", \", \", round(smallest_growing_particle * 1e9, 2), \"nm\", \", \", sep=\"\")\n\n if smallest_growing_particle > 0:\n print(\"M:\", self.meas_selected_number, \" S:\", self.meas_selected_series, \" D:\", self.selected_data,\n \", smallest growing particle for pressure change (\", round(p_i / 1000, 2), \"-\",\n round(p_f / 1000, 2), \" = \", round((p_i - p_f) / 1000, 2), \"kPa) in \", self.saturation_percentage,\n \"% humidity is \", round(smallest_growing_particle * 1e9, 2), \"nm\", sep=\"\")\n else:\n print(\"M:\", self.meas_selected_number, \" S:\", self.meas_selected_series, \" D:\", self.selected_data,\n \", no particle will grow in \", \"(\", round(p_i / 1000, 2), \"-\", round(p_f / 1000, 2), \" = \",\n round((p_i - p_f) / 1000, 2), \"kPa)\", \" pressure change and \", self.saturation_percentage,\n \"% humidity \", sep=\"\")\n\n self.curve_simulate.setData(time2+0.05, size)\n self.simulate_bool = False", "def particlesProduced(self, forceCalculate=False, verbose=0):\n # This one is strictly real, so we should make sure that is updated \n self._fourierHarmonics[0] = np.real(self.fourierHarmonic(0, forceCalculate, verbose))\n return self._fourierHarmonics[0]", "def __init__(self, *fname):\n # Atom positions, types and form factor table\n self.atom_pos = None # atom position -> N x 3 array, sorted based on atom type id\n # Index array saving indices that split atom_pos to get pos for each atom type\n # More specifically, let m = split_idx[i] and n = split_idx[i+1], then\n # atom_pos[m:n] contains all atoms for the ith atom type.\n self.split_idx = None\n self.num_atom_types = None # number of atom types\n self.ff_table = None # form factor table -> atom_type x qSample\n\n # Scattering\n self.q_sample = None # q vector sin(theta)/lambda\n self.num_q_samples = None # number of q samples\n # Compton scattering\n self.compton_q_sample = None # Compton: q vector sin(theta)/lambda\n self.num_compton_q_samples = 0 # number of Compton q samples\n self.sBound = None # Compton: static structure factor S(q)\n self.nFree = None # Compton: number of free electrons\n if len(fname) != 0:\n # read from pmi file to get info about radiation damage at a certain time slice\n if len(fname) == 1:\n datasetname = 'data/snp_0000001' # default dataset name -> set to be initial time\n self.read_h5file(fname[0], datasetname)\n elif len(fname) == 2:\n # both pmi file and the time slice (dataset) are provided\n self.read_h5file(fname[0], fname[1])\n else:\n raise ValueError('Wrong number of parameters to construct the particle object!')", "def __init__(self,particle):\n self.par = particle", "def test_flmb(self):\n self.create_sample_data_set_dir(\"node10p1.dat\", TELEM_DIR, \"node59p1.dat\")\n self.assert_initialize()\n result = self.data_subscribers.get_samples(DataParticleType.METADATA_TELEMETERED,1,30)\n result = self.data_subscribers.get_samples(DataParticleType.SAMPLE_TELEMETERED,5,30)", "def getIntensity(self, pos):\n #Camera doesnt have position so im just using the position of the followed object (of 1st camera)\n camPos = glad.renderer.cameraList[0].objectFollowed.getPos()\n\n r=(pos-camPos)#separation vector\n if r.isNullVector(): #if the vector is null, sound will be max anyways\n sin = 1\n cos = 1\n else:\n #calculate angles to determine where sound is coming from\n cos = dotProduct(r.getNormalized(),Vector(-1,0))\n sin = dotProduct(r.getNormalized(), Vector(0,1))\n #Calculate intensity for left and right channels\n #when sound is directly to the side have 80 percent come from that side speaker\n #hopefully this will give some directional sounds\n k = 130000 #arbitrary constant to calculate sound intensity\n if r.isNullVector():\n intensity = k #removes division by zero error\n else:\n intensity = k/r.getMagnitude()**2\n #major is the percent of the sound intensity from the side with the greater intensity\n a=0.68 #max percent of the intensity coming from one side\n major = (a*0.5)/((0.5*cos)**2+(a*sin)**2)**0.5 #equation for an ellipse\n if r[0] <= 0:\n right = major\n left = 1-major\n else:\n left = major\n right = 1-major\n right *= intensity\n left *= intensity\n if right > 1: right = 1\n if left > 1: left = 1\n return left,right", "def test_particles(snaptype):\n filename = DIR / snaptype.filename\n snap = plonk.load_snap(filename)\n\n snap.set_molecular_weight(2.381)\n\n _test_particles(snap=snap, ignore=False)\n _test_particles(snap=snap, ignore=True)\n\n snap.close_file()", "def f_per_particle( m):\n alpha = 0.9\n total_features = X.shape[1]\n # Get the subset of the features from the binary mask\n if np.count_nonzero(m) == 0: \n #if the particle subset is only zeros, get the original set of attributes\n X_subset = X\n else:\n X_subset = X[:,m==1]\n particleScore = list()\n particleSize = list()\n score = abs(compute_gamma(X_subset, y))\n particleScore.append(score)\n particleSize.append(X_subset.shape[1])\n # Compute for the objective function\n j = (alpha * (1.0 - score)+ (1.0 - alpha) * (1 - (X_subset.shape[1] / total_features)))\n return j", "def getFluxGeometry(self):\r\n\t\treturn self.getTotalFlux() / self.rho_w0;", "def particle_initial_velocity(fignr,N,D,T,m,dim,kb):\n V = np.zeros((3,N))\n V[0:dim,:] = np.random.normal(0, kb*T/m, (dim,N))# / np.sqrt(T/(kb*m))\n plotfunctions.velocity(fignr,N,V)\n # Typical speed for particles\n return V", "def info(self):\r\n print(f\"filename: {self.filename}\")\r\n print(f\"comments: \\n{self.comment_1}{self.comment_2}\")\r\n print(f\"origin: {self.origin[0]}, {self.origin[1]}, {self.origin[2]}\")\r\n print(f\"atoms count: {self.n_atoms}\")\r\n print(f\"voxels count: {self.n_x}, {self.n_y}, {self.n_z}\")\r\n print(f\"voxel x-axis: {self.x[0]}, {self.x[1]}, {self.x[2]}\")\r\n print(f\"voxel y-axis: {self.y[0]}, {self.y[1]}, {self.y[2]}\")\r\n print(f\"voxel z-axis: {self.z[0]}, {self.z[1]}, {self.z[2]}\")", "def __init__(self, init_pos, init_stdev, num_particles, sense_noise):\n self.particles = np.random.multivariate_normal(\n init_pos, [[init_stdev**2, 0], [0, init_stdev**2]], num_particles)\n self.weights = np.array(\n [1. / num_particles for _ in range(num_particles)])\n self.n = num_particles\n self.sense_noise = sense_noise", "def get_flux_density(self):\n if self.no_flux is False:\n return self.snu_at_1GHz\n else:\n return -1", "def get_info(self):\r\n return np.array([self.ypos, self.xpos, self.radius, self.count_sum, self.bg_galaxy, self.no_count])", "def spawn_system(\n max_steps = 25,\n xmax = 10,\n YMAX = 10,\n ZMAX = 10 ,\n NDIM = 3,\n SIDE = (5,5,5),\n VMAX = 0.0,\n dt = 0.05,\n SPACING = 1.0,\n TEMPERATURE = 0.95,\n HLONG = 4.0,\n HSHORT = 2.0,\n RINIT = 'grid',\n ascl = 7.45e+04,\n bscl = 5.84e-01,\n kbscl = 3.29e+04,\n pmass = 1.386e-01,\n ofname = 'data/toybox.nc'\n ):\n\n NP = SIDE[0]*SIDE[1]*SIDE[2]\n cnt = 0\n fps = 0\n\n print \"Initialising\"\n p = particles.SmoothParticleSystem(\n NP,maxn=NP,\n d=3,\n rinit=RINIT,\n vmax=VMAX,\n side=SIDE,\n spacing=SPACING,\n xmax=xmax,\n ymax=YMAX,\n zmax=ZMAX,\n temperature=TEMPERATURE,\n hlong=HLONG,\n hshort=HSHORT,\n thermostat_temp=TEMPERATURE,\n thermostat=True,\n mass=pmass\n )\n nl = neighbour_list.VerletList(p,cutoff=HLONG)\n p.nlists.append(nl)\n p.nl_default = nl\n p.forces.append(\n spam_complete_force.SpamComplete(\n p,nl,adash=ascl,bdash=bscl,kbdash=kbscl))\n #p.forces.append(forces.FortranCollisionForce(p,nl,cutoff=0.5))\n tstart = time()\n nl.build()\n nl.separations()\n spam_properties(p,nl)\n print 'Built list and calc properties',time()-tstart\n cnt = 0\n attribs = {'creator':'Andrew', 'log':'functional test'}\n create_sph_ncfile(ofname,attribs,NP,NDIM)\n print \"STEP INT DERIV = PAIR + SPAM + FORCE \"\n tstartrun = time()\n for i in range(max_steps):\n tstart = time()\n p.update(dt)\n if np.isnan(p.r).any():\n print 'stopping due to nan'\n break\n if i % 10 == 0:\n write_step(ofname,p)\n print 'Step',i,'took',time()-tstart\n g = p.timing.keys()\n g.sort()\n for k in g:\n print k,p.timing[k]\n print 'Completed',i,'steps, in',time()-tstartrun\n return ofname", "def handle_metadata_particle(self, timestamp):\n # change the names in the dictionary from the name in the data file to the parameter name\n header_data_dict = {'glider_eng_filename': self._header_dict.get('filename_label'),\n 'glider_mission_name': self._header_dict.get('mission_name'),\n 'glider_eng_fileopen_time': self._header_dict.get('fileopen_time')}\n\n self._metadata_sent = True\n return self._extract_sample(self._metadata_class, None, header_data_dict, internal_timestamp=timestamp)", "def particleMass(self):\n return self.params['particleMass']", "def turbulence(self, particles, current_step=0):\n\n for i in range(len(particles)):\n if i % 3 == 0:\n mutated = self.uniform_mutator.mutate(particles[i])\n elif i % 3 == 1:\n mutated = self.non_uniform_mutator.mutate(particles[i], current_step)\n particles[i].vector = copy(mutated.vector)\n return", "def particle(*args, attribute: Union[AnyStr, bool]=\"\", cache: bool=True, conserve: Union[float,\n bool]=0.0, count: bool=True, deleteCache: bool=True, dynamicAttrList: bool=True,\n floatValue: float=0.0, gridSpacing: Union[float, List[float], bool]=0.0, inherit:\n Union[float, bool]=0.0, jitterBasePoint: Union[List[float, float, float],\n List[List[float, float, float]], bool]=None, jitterRadius: Union[float,\n List[float], bool]=0.0, lowerLeft: Union[List[float, float, float],\n List[List[float, float, float]], bool]=None, name: Union[AnyStr, bool]=\"\",\n numJitters: Union[int, List[int], bool]=0, order: Union[int, bool]=0, particleId:\n Union[int, bool]=0, perParticleDouble: bool=True, perParticleVector: bool=True,\n position: Union[List[float, float, float], List[List[float, float, float]]]=None,\n shapeName: Union[AnyStr, bool]=\"\", upperRight: Union[List[float, float, float],\n List[List[float, float, float]], bool]=None, vectorValue: List[float, float,\n float]=None, q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def define_particle(self,line):\n\n pattern=re.compile(r'''^\\s*\n (?P<pid>-?\\d+)\\s+ #PID\n (?P<status>1)\\s+ #status (1 for output particle)\n (?P<mother>-?\\d+)\\s+ #mother\n (?P<dum3>-?\\d+)\\s+ #mother\n (?P<color1>[+-e.\\d]*)\\s+ #color1\n (?P<color2>[+-e.\\d]*)\\s+ #color2\n (?P<px>[+-e.\\d]*)\\s+ #px\n (?P<py>[+-e.\\d]*)\\s+ #py\n (?P<pz>[+-e.\\d]*)\\s+ #pz\n (?P<E>[+-e.\\d]*)\\s+ #E\n (?P<mass>[+-e.\\d]*)\\s+ #mass\n (?P<dum1>[+-e.\\d]*)\\s+ #dummy1\n (?P<dum2>[+-e.\\d]*)\\s* #dummy2\n $ #end of string\n ''',66) #verbose+ignore case\n if pattern.search(line):\n obj=pattern.search(line)\n E=obj.group('E')\n px=obj.group('px')\n py=obj.group('py')\n pz=obj.group('pz')\n particle=part_quadvec(E,px,py,pz)\n particle.def_mass(obj.group('mass'))\n particle.def_pid(obj.group('pid'))\n return particle\n else:\n return 0", "def eff_param():\n return r.TF1('photon_eff_param', eff_param_string(), 0, 7)", "def setupParticles(self):\n\n for ss in self.pargs['species']:\n\n # Make sure we are setting up particles, not walls (so we check for id existence)\n if 'id' in ss and 'wall' not in ss:\n if not self.rank:\n logging.info('Setting up particles for group{id}'.format(**ss))\n\n randName = np.random.randint(10**5,10**8)\n pddName = 'pdd' + '{}'.format(np.random.randint(10**5,10**8))\n\n if 'vol_lim' not in ss:\n ss['vol_lim'] = 1e-20\n\n id = ss['id'] - 1\n self.lmp.command('group group{} type {}'.format(id, ss['id']))\n\n if 'args'in ss:\n args = ss['args']\n else:\n args = ()\n\n if 'radius' in ss:\n radius = ss['radius']\n\n if not isinstance(radius, tuple):\n radius = ('constant', radius)\n\n self.lmp.command('fix {} '.format(randName) + 'group{}'.format(id) + ' particletemplate/{style} 15485867 volume_limit {vol_lim} atom_type {id} density constant {density} radius'.format(**ss) + (' {}' * len(radius)).format(*radius) \\\n + (' {}' * len(args)).format(*args))\n else:\n self.lmp.command('fix {} '.format(randName) + 'group{}'.format(id) + ' particletemplate/{style} 15485867 volume_limit {vol_lim} atom_type {id} density constant {density}'.format(**ss) + (' {}' * len(args)).format(*args))\n \n self.lmp.command('fix {} '.format(pddName) + 'group{}'.format(id) + ' particledistribution/discrete 67867967 1'.format(**ss) + ' {} 1.0'.format(randName))\n\n if ss['style'] is 'multisphere':\n itype = ss['style']\n else:\n itype = 'nve/{style}'.format(**ss)\n\n #Do NOT unfix randName! Will cause a memory corruption error\n self.pddName.append(pddName)", "def run(self):\r\n\r\n self.tick = self.tick + 1\r\n print 'Particle tick=:', self.tick", "def getEnergy(pos: dc.float64[N, 3], vel: dc.float64[N, 3],\n mass: dc.float64[N], G: dc.float64):\n # Kinetic Energy:\n # KE = 0.5 * np.sum(np.sum( mass * vel**2 ))\n # KE = 0.5 * np.sum( mass * vel**2 )\n KE = 0.5 * np.sum(np.reshape(mass, (N, 1)) * vel**2)\n\n # Potential Energy:\n\n # positions r = [x,y,z] for all particles\n x = pos[:, 0:1]\n y = pos[:, 1:2]\n z = pos[:, 2:3]\n\n # matrix that stores all pairwise particle separations: r_j - r_i\n # dx = x.T - x\n # dy = y.T - y\n # dz = z.T - z\n # dx = np.transpose(x) - x\n # dy = np.transpose(y) - y\n # dz = np.transpose(z) - z\n dx = np.add.outer(-x, x)\n dy = np.add.outer(-y, y)\n dz = np.add.outer(-z, z)\n\n # matrix that stores 1/r for all particle pairwise particle separations\n inv_r = np.sqrt(dx**2 + dy**2 + dz**2)\n # inv_r[inv_r>0] = 1.0/inv_r[inv_r>0]\n I = inv_r > 0\n np.divide(1.0, inv_r, out=inv_r, where=I)\n\n # sum over upper triangle, to count each interaction only once\n # PE = G * np.sum(np.sum(np.triu(-(mass*mass.T)*inv_r,1)))\n # PE = G * np.sum(np.triu(-(mass*mass.T)*inv_r,1))\n tmp = -np.multiply.outer(mass, mass) * inv_r\n PE = 0.0\n for j in range(N):\n for k in range(j + 1, N):\n PE += tmp[j, k]\n PE *= G\n\n return KE, PE", "def getBeliefDistribution(self):\n # This essentially gives a point to a location for each particle there, then \n # normalizes the point values so they add up to 1.\n dist = util.Counter()\n for part in self.particles: dist[part] += 1\n dist.normalize()\n return dist", "def __init__(self,nparticles,initial_condition):\n self.nparticles = nparticles\n self.particles = np.array([Particle(mass,x,y) for x,y,mass in initial_condition])\n self.mass = np.array([self.particles[i].mass for i in range(len(self.particles))])\n self.position = np.array([self.particles[i].position for i in range(len(self.particles))])\n self.momentum = np.array([self.particles[i].momentum for i in range(len(self.particles))])", "def particle_images (sim,frame_id) :\n # get positions of all particles: define first the atom selection, then jump to\n # the user-requested trajectory frame, get the box dimensions (currently works\n # only for orthorhombic boxes, then calculate the image indices\n atoms = sim.u.select_atoms ('all')\n ts = sim.u.trajectory[frame_id]\n L = ts.dimensions[:3]\n pos = atoms.positions + L/2.\n return pos//L", "def total_KE(particles):\r\n return sum([particle.kinetic_energy() for particle in particles])", "def nParticle(*args, attribute: Union[AnyStr, bool]=\"\", cache: bool=True, conserve: Union[float,\n bool]=0.0, count: bool=True, deleteCache: bool=True, dynamicAttrList: bool=True,\n floatValue: float=0.0, gridSpacing: Union[float, List[float], bool]=0.0, inherit:\n Union[float, bool]=0.0, jitterBasePoint: Union[List[float, float, float],\n List[List[float, float, float]], bool]=None, jitterRadius: Union[float,\n List[float], bool]=0.0, lowerLeft: Union[List[float, float, float],\n List[List[float, float, float]], bool]=None, name: Union[AnyStr, bool]=\"\",\n numJitters: Union[int, List[int], bool]=0, order: Union[int, bool]=0, particleId:\n Union[int, bool]=0, perParticleDouble: bool=True, perParticleVector: bool=True,\n position: Union[List[float, float, float], List[List[float, float, float]]]=None,\n shapeName: Union[AnyStr, bool]=\"\", upperRight: Union[List[float, float, float],\n List[List[float, float, float]], bool]=None, vectorValue: List[float, float,\n float]=None, q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr,\n Any]:\n pass", "def TotalEnergy(self):\n return (math.sqrt((Particle.RestEnergy(self) ** 2)\n + (np.linalg.norm(Particle.Momentum(self)) * const.speed_of_light) ** 2))", "def intensity(self) -> int:", "def GetSoft(self, particles):\n\n if not self.__openSFI:\n print('ERROR: Must use OpenSFI before GetSoft.')\n sys.exit(-1)\n if not self.__containsParticles:\n print('ERROR: RSoftSF file must contain particles to use this')\n print(' file.')\n sys.exit(-1)\n\n # Obtains structure functions\n SFs = self._ParticlesToSFs(particles)\n\n\n soft_arr = np.zeros(len(particles))\n for idx_particle, particle in enumerate(particles):\n\n f = particle[0]\n p = particle[1]\n type_ = self.__NcIO_dyn.GetDataCol(f,'type')[p]\n idx_type = np.where(type_==self.__types_unique)[0]\n\n # Loads plane (a) and intercept (b)\n if self.__containsRadial:\n a_rad = self.radial_plane[idx_type]\n if self.__containsAngular:\n a_ang = self.angular_plane[idx_type]\n else:\n a_ang = np.array([])\n a = np.hstack((a_rad,a_ang))\n b = self.intercept[idx_type]\n\n # Calculates softness\n soft_arr[idx_particle] = np.dot(a,SFs[idx_particle])+b\n\n return soft_arr", "def calcEnergy(self):\n speed_light = constants.physical_constants[\"speed of light in vacuum\"][0]#m/sec by default\n if self.mass is None:\n raise CoordinateVector(\"The particle mass needs to be specified to calculate the energy.\")\n return speed_light*math.sqrt(self.p*self.p + (self.mass*speed_light)**2)", "def _init_particles(self):\n self.NPART = self.grid.get_npart()\n self.particles = np.empty(self.NPART, dtype=object)\n for i in range(self.NPART):\n tmem = TMEM\n ux = UXM + UPRIME*normal()*LANGFACTOR\n vy = VYM + UPRIME*normal()*LANGFACTOR\n self.particles[i] = Particle(tmem=tmem, ux=ux, vy=vy)\n #\n # PUT THE PARTICLES IN THE CELLS.\n # LOOP OVER CELLS AND DEFINE THEIR PARTICLES.\n # FOR NOW, ONLY POSITION DEPENDS ON SPACE HEIGHT & MEMORY DO NOT.\n # FIRST THE TREE PARTICLES, THEN THE BUILDING PARTICLES.\n #\n NX = self.grid.NX\n NY = self.grid.NY\n icounter = 0\n for i in range(NX - 1):\n for j in range(NY - 1):\n cell = self.grid.CELLS[i, j]\n x = self.grid.XCELL[i, j]\n y = self.grid.YCELL[i, j]\n for k in range(cell.NPARTTR):\n self.particles[k + icounter].update(x=x, y=y, type=1)\n for k in range(cell.NPARTRAD):\n self.particles[k + cell.NPARTTR + icounter].update(x=x, y=y, type=2)\n icounter += cell.NPARTTR + cell.NPARTRAD", "def moments(self):", "def kinetic_energy(self):\r\n position, velocity, escaped_particles,impact, wall_collision,mom = self.box_collision_info()\r\n for j in xrange(1,self.n):\r\n abs_velocity = np.sqrt(velocity[:,0]**2+velocity[:,1]**2\r\n + velocity[:,2]**2)\r\n KE = 0.5*self.m*abs_velocity**2\r\n total_KE = np.sum(KE)\r\n invid_KE = total_KE/self.Npart\r\n\r\n return total_KE, invid_KE", "def __InitializeNorms(self, norm_SF, norm_plane):\n # Initializes values \n nc = self.__nc_RSoft_I\n n_SF = self.__n_SF_rad+self.__n_SF_ang\n mean_SF = np.zeros(n_SF)\n std_SF = np.zeros(n_SF)\n cov_SF = np.zeros((n_SF,n_SF))\n n_parts = 0\n\n for idx_type, type_ in enumerate(self.__types_unique):\n n_parts = 0\n idx_type_SF = np.where(self.__types==type_)[0]\n for f in range(self._n_f):\n # Finds particle typtes for each particle. \n particle_types = self.__NcIO_dyn.GetDataCol(f,'type')\n type_ids = np.where(particle_types==type_)[0]\n\n # Obtains radial and angular SFs for f\n if self.__containsRadial:\n rSF = nc.variables['radial_structures'][f][type_ids]\n else:\n rSF = np.zeros((len(type_ids),0))\n if self.__containsAngular:\n aSF = nc.variables['angular_structures'][f][type_ids]\n else:\n aSF = np.zeros((len(type_ids),0))\n SF = np.hstack((rSF,aSF))\n SF = SF[~np.isnan(np.sum(SF,axis=1))] # SHOULD REMOVE NaNs\n\n # Counts number of SFs in frame and sums particles to find\n # mean. We do not use mean function in case number of \n # particles changes between frames\n n_parts += len(SF)\n mean_SF[idx_type_SF] += np.sum(SF[:,idx_type_SF],axis=0)\n cov_SF[idx_type_SF[:,None],idx_type_SF[None,:]] += \\\n np.dot(SF[:,idx_type_SF].T,SF[:,idx_type_SF])\n\n # Calculates mean and covariance\n mean_SF[idx_type_SF] /= float(n_parts)\n cov_SF[idx_type_SF[:,None],idx_type_SF[None,:]] /= \\\n float(n_parts)\n cov_SF[idx_type_SF[:,None],idx_type_SF[None,:]] -= \\\n np.outer(mean_SF[idx_type_SF],mean_SF[idx_type_SF])\n std_SF = np.sqrt(np.diagonal(cov_SF))\n\n # Checks if std_SF == 0 for any structure functions\n if np.any(std_SF==0):\n print('WARNING: stdev of following structure functions is 0')\n idx_0s = np.where(std_SF==0)[0]\n for idx_0 in idx_0s:\n std_SF[idx_0] = 1\n if idx_0 < self.__n_SF_rad:\n mu = self.mus[idx_0]\n L = self.Ls[idx_0]\n X = self.radial_Xs[idx_0]\n Y = self.radial_Ys[idx_0]\n print(' radial structure function: mu = '+str(mu)+\\\n ', L = '+str(L)+', X = '+str(X)+', Y = '+str(Y))\n else:\n idx_0 -= self.__n_SF_rad\n xi = self.xis[idx_0]\n l = self.lambdas[idx_0] \n z = self.zetas[idx_0]\n X = self.angular_Xs[idx_0]\n Y = self.angular_Ys[idx_0]\n Z = self.angular_Za[idx_0]\n print(' angular structure function: xi = '+str(xi)+\\\n ', lambda = '+str(l)+', zeta = '+str(z)+\\\n ', X = '+str(X)+', Y = '+str(Y)+', Z = '+str(Z))\n\n self._mean_SF = mean_SF\n self._cov_SF = cov_SF\n self._std_SF = std_SF", "def Script3():\n # In non-script code, use getLogger(__name__) at module scope instead.\n logger = logging.getLogger(\"Script3\") \n gal_flux = 1.e5 # ADU\n gal_n = 3.5 #\n gal_re = 3.7 # pixels\n g1 = -0.23 #\n g2 = 0.15 #\n atmos_a_sigma=2.1 # pixels\n atmos_a_g1 = -0.13 # (shear for \"a\")\n atmos_a_g2 = -0.09 #\n atmos_fa=0.2 # (fraction of flux in \"a\")\n atmos_b_sigma=0.9 # pixels\n atmos_b_g1 = 0.02 # (shear for \"b\")\n atmos_b_g2 = -0.04 #\n opt_defocus=0.53 # wavelengths\n opt_a1=-0.29 # wavelengths\n opt_a2=0.12 # wavelengths\n opt_c1=0.64 # wavelengths\n opt_c2=-0.33 # wavelengths\n opt_padFactor=6 # multiples of Airy padding required to avoid folding for aberrated PSFs\n lam = 800 # nm NB: don't use lambda - that's a reserved word.\n tel_diam = 4. # meters \n pixel_scale = 0.23 # arcsec / pixel\n wcs_g1 = -0.02 #\n wcs_g2 = 0.01 #\n sky_level = 1.e3 # ADU / pixel\n gain = 1.7 # ADU / e-\n read_noise = 0.3 # ADU / pixel\n\n logger.info('Starting script 3 using:')\n logger.info(' - sheared (%.2f,%.2f) Sersic galaxy (flux = %.1e, n = %.1f, re = %.2f),', \n g1, g2, gal_flux, gal_n, gal_re)\n logger.info(' - sheared double-Gaussian atmospheric PSF')\n logger.info(' First component: sigma = %.2f, shear = (%.2f,%.2f), frac = %.2f',\n atmos_a_sigma, atmos_a_g1, atmos_a_g2, atmos_fa)\n logger.info(' Second component: sigma = %.2f, shear = (%.2f,%.2f), frac = %.2f',\n atmos_b_sigma, atmos_b_g1, atmos_b_g2, 1-atmos_fa)\n logger.info(' - optical PSF with defocus = %.2f, astigmatism = (%.2f,%.2f),',\n opt_defocus, opt_a1, opt_a2)\n logger.info(' coma = (%.2f,%.2f), lambda = %.0f nm, D = %.1f m', \n opt_c1, opt_c2, lam, tel_diam)\n logger.info(' - pixel scale = %.2f,',pixel_scale)\n logger.info(' - WCS distortion = (%.2f,%.2f),',wcs_g1,wcs_g2)\n logger.info(' - Poisson noise (sky level = %.1e, gain = %.1f).',sky_level, gain)\n logger.info(' - Gaussian read noise (sigma = %.2f).',read_noise)\n\n \n # Define the galaxy profile.\n gal = galsim.Sersic(gal_n, flux=gal_flux, re=gal_re)\n\n # Shear the galaxy by some value.\n gal.applyShear(g1, g2)\n logger.info('Made galaxy profile')\n\n # Define the atmospheric part of the PSF.\n atmos_a = galsim.Gaussian(flux=atmos_fa, sigma=atmos_a_sigma)\n atmos_a.applyShear(atmos_a_g1 , atmos_a_g2)\n atmos_b = galsim.Gaussian(flux=1-atmos_fa, sigma=atmos_b_sigma)\n atmos_b.applyShear(atmos_b_g1 , atmos_b_g2)\n atmos = galsim.Add([atmos_a, atmos_b])\n logger.info('Made atmospheric PSF profile')\n\n # Define the optical part of the PSF.\n # The first argument of OpticalPSF below is lambda/D,\n # which needs to be in pixel units, so do the calculation:\n lam_over_D = lam * 1.e-9 / tel_diam # radians\n lam_over_D *= 206265 # arcsec\n lam_over_D *= pixel_scale # pixels\n logger.info('Calculated lambda over D = %f pixels', lam_over_D)\n # The rest of the values here should be given in units of the \n # wavelength of the incident light. padFactor is used to here to reduce 'folding' for these\n # quite strong aberration values\n optics = galsim.OpticalPSF(lam_over_D, \n defocus=opt_defocus, coma1=opt_c1, coma2=opt_c2, astig1=opt_a1,\n astig2=opt_a2, padFactor=opt_padFactor)\n logger.info('Made optical PSF profile')\n\n # Start with square pixels\n pix = galsim.Pixel(xw=pixel_scale, yw=pixel_scale)\n # Then shear them slightly by the negative of the wcs shear.\n # This way the later distortion of the full image will bring them back to square.\n pix.applyShear(-wcs_g1, -wcs_g2)\n logger.info('Made pixel profile')\n\n # Final profile is the convolution of these.\n final = galsim.Convolve([gal, atmos, optics, pix])\n final_epsf = galsim.Convolve([atmos, optics, pix])\n logger.info('Convolved components into final profile')\n\n # Now apply the wcs shear to the final image.\n final.applyShear(wcs_g1, wcs_g2)\n final_epsf.applyShear(wcs_g1, wcs_g2)\n logger.info('Applied WCS distortion')\n\n # Draw the image with a particular pixel scale.\n image = final.draw(dx=pixel_scale)\n image_epsf = final_epsf.draw(dx=pixel_scale)\n # Draw the optical PSF component at its Nyquist sample rate\n image_opticalpsf = optics.draw(dx=lam_over_D/2.)\n logger.info('Made image of the profile')\n\n # Add a constant sky level to the image.\n sky_image = galsim.ImageF(bounds=image.getBounds(), initValue=sky_level)\n image += sky_image\n\n # Add Poisson noise to the image.\n rng = galsim.UniformDeviate(1314662)\n galsim.noise.addPoisson(image, rng, gain=gain)\n\n # Also add (Gaussian) read noise.\n galsim.noise.addGaussian(image, rng, sigma=read_noise)\n\n # Subtract off the sky.\n image -= sky_image\n logger.info('Added Gaussian and Poisson noise')\n\n # Write the image to a file\n if not os.path.isdir('output'):\n os.mkdir('output')\n file_name = os.path.join('output', 'demo3.fits')\n file_name_opticalpsf = os.path.join('output','demo3_opticalpsf.fits')\n file_name_epsf = os.path.join('output','demo3_epsf.fits')\n \n image.write(file_name, clobber=True)\n image_opticalpsf.write(file_name_opticalpsf, clobber=True)\n image_epsf.write(file_name_epsf, clobber=True)\n logger.info('Wrote image to %r', file_name)\n logger.info('Wrote optics-only PSF image (Nyquist sampled) to %r', file_name_opticalpsf)\n logger.info('Wrote effective PSF image to %r', file_name_epsf)\n\n moments = HSM_Moments(file_name)\n moments_corr = HSM_Regauss(file_name, file_name_epsf, image.array.shape)\n\n logger.info('HSM reports that the image has measured moments:')\n logger.info(' Mxx = %.3f, Myy = %.3f, Mxy = %.3f', moments.mxx, moments.myy, moments.mxy)\n logger.info('When carrying out Regaussianization PSF correction, HSM reports')\n logger.info(' g1,g2 = %f,%f', moments_corr.g1, moments_corr.g2)\n logger.info('Expected values in the limit that noise and non-Gaussianity are negligible:')\n logger.info(' g1,g2 = %f,%f', g1+wcs_g1,g2+wcs_g2)\n print", "def calP(self):\n N = len(self.listOfParticles)\n m = self.listOfParticles[0].m\n vsum = 0\n for particle in self.listOfParticles:\n vsum += particle.V.len()\n A = np.pi*self.R**2\n F = 0.5 * A * (2*self.R) * m * N * vsum**2\n return F", "def analyze(self, event):\n pfcands = Collection(event, \"FatJetPFCands\")\n jets = Collection(event, \"FatJet\")\n svs = Collection(event, \"SV\")\n taus = Collection(event, \"Tau\")\n met = Object(event, \"MET\")\n pupmet = Object(event, \"PuppiMET\")\n\n IN_hadhad_v4p1_old = np.full(1, -1., dtype=np.float32)\n GRU_hadel_v6p1_old = np.full(1, -1., dtype=np.float32)\n GRU_hadmu_v6p1_old = np.full(1, -1., dtype=np.float32)\n\n IN_hadhad_v4p1 = np.full(1, -1., dtype=np.float32)\n GRU_hadel_v6p1 = np.full(1, -1., dtype=np.float32)\n GRU_hadmu_v6p1 = np.full(1, -1., dtype=np.float32)\n\n IN_hadhad_v4p1_ohe = np.full(1, -1., dtype=np.float32)\n IN_hadel_v4p1_ohe = np.full(1, -1., dtype=np.float32)\n IN_hadmu_v4p1_ohe = np.full(1, -1., dtype=np.float32)\n\n PostTagger_hadhad_v1p1 = np.full(1, -1., dtype=np.float32)\n PostTagger_hadel_v1p1 = np.full(1, -1., dtype=np.float32)\n PostTagger_hadmu_v1p1 = np.full(1, -1., dtype=np.float32)\n\n Ztagger_Zee = np.full(1, -1., dtype=np.float32)\n Ztagger_Zmm = np.full(1, -1., dtype=np.float32)\n Ztagger_Zhh = np.full(1, -1., dtype=np.float32)\n Ztagger_Zhe = np.full(1, -1., dtype=np.float32)\n Ztagger_Zhm = np.full(1, -1., dtype=np.float32)\n\n MassReg_hadhad = np.full(1, -1., dtype=np.float32)\n MassReg_hadel = np.full(1, -1., dtype=np.float32)\n MassReg_hadmu = np.full(1, -1., dtype=np.float32)\n\n jet_idx = -1\n min_dphi = 999.\n for ij, jet in enumerate(jets):\n if (jet.pt < 200.): continue\n this_dphi = abs(signedDeltaPhi(met.phi, jet.phi))\n if (this_dphi < min_dphi):\n min_dphi = this_dphi\n jet_idx = ij\n pf_idx = 0\n\n for ij, jet in enumerate(jets):\n\n # if jet.pt < 400 or jet.msoftdrop < 30 : continue\n if (ij < jet_idx):\n pf_idx = pf_idx + jet.nPFConstituents\n continue\n elif (ij > jet_idx):\n continue\n if jet.nPFConstituents < 1: continue\n ##Fill basic jet properties\n jpt = jet.pt\n jLSpt = jet.LSpt\n jeta = jet.eta\n jphi = jet.phi\n jmsd = jet.msoftdrop\n jLSmsd = jet.LSmsoftdrop\n jm = jet.mass\n jdRLep = jet.dRLep\n jlsf3 = jet.lsf3\n jn2b1 = jet.n2b1\n jLSn2b1 = jet.LSn2b1\n jdeepTagZqq = jet.deepTagZqq\n jdeepTagWqq = jet.deepTagWqq\n jn3b1 = jet.n3b1\n jLSn3b1 = jet.LSn3b1\n try:\n jtau21 = float(jet.tau2) / float(jet.tau1)\n except:\n jtau21 = 0.\n try:\n jtau32 = float(jet.tau3) / float(jet.tau2)\n except:\n jtau32 = 0.\n try:\n jtau43 = float(jet.tau4) / float(jet.tau3)\n except:\n jtau43 = 0.\n try:\n jLStau21 = float(jet.LStau2) / float(jet.LStau1)\n except:\n jLStau21 = 0.\n try:\n jLStau32 = float(jet.LStau3) / float(jet.LStau2)\n except:\n jLStau32 = 0.\n try:\n jLStau43 = float(jet.LStau4) / float(jet.LStau3)\n except:\n jLStau43 = 0.\n\n jetv = ROOT.TLorentzVector()\n jetv.SetPtEtaPhiM(jet.pt, jet.eta, jet.phi, jet.mass)\n\n ##Fill SV\n svpt = np.zeros(self.Nsvs, dtype=np.float16)\n svdlen = np.zeros(self.Nsvs, dtype=np.float16)\n svdlenSig = np.zeros(self.Nsvs, dtype=np.float16)\n svdxy = np.zeros(self.Nsvs, dtype=np.float16)\n svdxySig = np.zeros(self.Nsvs, dtype=np.float16)\n svchi2 = np.zeros(self.Nsvs, dtype=np.float16)\n svpAngle = np.zeros(self.Nsvs, dtype=np.float16)\n svx = np.zeros(self.Nsvs, dtype=np.float16)\n svy = np.zeros(self.Nsvs, dtype=np.float16)\n svz = np.zeros(self.Nsvs, dtype=np.float16)\n svmass = np.zeros(self.Nsvs, dtype=np.float16)\n svphi = np.zeros(self.Nsvs, dtype=np.float16)\n sveta = np.zeros(self.Nsvs, dtype=np.float16)\n svv = ROOT.TLorentzVector()\n arrIdx = 0\n for isv, sv in enumerate(svs):\n if arrIdx == self.Nsvs: break\n svv.SetPtEtaPhiM(sv.pt, sv.eta, sv.phi, sv.mass)\n if jetv.DeltaR(svv) < 0.8:\n svpt[arrIdx] = sv.pt / jpt\n svdlen[arrIdx] = sv.dlen\n svdlenSig[arrIdx] = sv.dlenSig\n svdxy[arrIdx] = sv.dxy\n svdxySig[arrIdx] = sv.dxySig\n svchi2[arrIdx] = sv.chi2\n svpAngle[arrIdx] = sv.pAngle\n svx[arrIdx] = sv.x\n svy[arrIdx] = sv.y\n svz[arrIdx] = sv.z\n sveta[arrIdx] = sv.eta - jeta\n svphi[arrIdx] = signedDeltaPhi(sv.phi, jphi)\n svmass[arrIdx] = sv.mass\n arrIdx += 1\n\n # Fill Taus\n tau_charge = np.zeros(self.Ntaus, dtype=np.float16)\n tau_chargedIso = np.zeros(self.Ntaus, dtype=np.float16)\n tau_dxy = np.zeros(self.Ntaus, dtype=np.float16)\n tau_dz = np.zeros(self.Ntaus, dtype=np.float16)\n tau_eta = np.zeros(self.Ntaus, dtype=np.float16)\n tau_leadTkDeltaEta = np.zeros(self.Ntaus, dtype=np.float16)\n tau_leadTkDeltaPhi = np.zeros(self.Ntaus, dtype=np.float16)\n tau_leadTkPtOverTauPt = np.zeros(self.Ntaus, dtype=np.float16)\n tau_mass = np.zeros(self.Ntaus, dtype=np.float16)\n tau_neutralIso = np.zeros(self.Ntaus, dtype=np.float16)\n tau_phi = np.zeros(self.Ntaus, dtype=np.float16)\n tau_photonsOutsideSignalCone = np.zeros(self.Ntaus, dtype=np.float16)\n tau_pt = np.zeros(self.Ntaus, dtype=np.float16)\n tau_rawAntiEle = np.zeros(self.Ntaus, dtype=np.float16)\n tau_rawIso = np.zeros(self.Ntaus, dtype=np.float16)\n tau_rawIsodR03 = np.zeros(self.Ntaus, dtype=np.float16)\n tau_rawMVAoldDM2017v2 = np.zeros(self.Ntaus, dtype=np.float16)\n tau_rawMVAoldDMdR032017v2 = np.zeros(self.Ntaus, dtype=np.float16)\n tauv = ROOT.TLorentzVector()\n tauIdx = 0\n for tau in taus:\n if tauIdx == self.Ntaus:\n break\n tauv.SetPtEtaPhiM(tau.pt, tau.eta, tau.phi, tau.mass)\n if jetv.DeltaR(tauv) < 0.8:\n tau_charge[tauIdx] = tau.charge\n tau_chargedIso[tauIdx] = tau.chargedIso / tau.pt\n tau_dxy[tauIdx] = tau.dxy\n tau_dz[tauIdx] = tau.dz\n tau_eta[tauIdx] = tau.eta - jeta\n tau_leadTkDeltaEta[tauIdx] = tau.leadTkDeltaEta\n tau_leadTkDeltaPhi[tauIdx] = tau.leadTkDeltaPhi\n tau_leadTkPtOverTauPt[tauIdx] = tau.leadTkPtOverTauPt\n tau_mass[tauIdx] = tau.mass\n tau_neutralIso[tauIdx] = tau.neutralIso / tau.pt\n tau_phi[tauIdx] = signedDeltaPhi(tau.phi, jphi)\n tau_photonsOutsideSignalCone[tauIdx] = tau.photonsOutsideSignalCone\n tau_pt[tauIdx] = tau.pt / jpt\n tau_rawAntiEle[tauIdx] = tau.rawAntiEle\n tau_rawIso[tauIdx] = tau.rawIso / tau.pt\n tau_rawIsodR03[tauIdx] = tau.rawIsodR03\n tau_rawMVAoldDM2017v2[tauIdx] = tau.rawMVAoldDM2017v2\n tau_rawMVAoldDMdR032017v2[tauIdx] = tau.rawMVAoldDMdR032017v2\n tauIdx += 1\n\n ##find candidates associated to jet\n candrange = range(pf_idx, pf_idx + jet.nPFConstituents)\n\n ##Fill PF candidates\n pfpt = np.zeros(self.Nparts, dtype=np.float16)\n pfeta = np.zeros(self.Nparts, dtype=np.float16)\n pfphi = np.zeros(self.Nparts, dtype=np.float16)\n pftrk = np.zeros(self.Nparts, dtype=np.float16)\n pfpup = np.zeros(self.Nparts, dtype=np.float16)\n pfpupnolep = np.zeros(self.Nparts, dtype=np.float16)\n pfq = np.zeros(self.Nparts, dtype=np.float16)\n pfid = np.zeros(self.Nparts, dtype=np.float16)\n pfdz = np.zeros(self.Nparts, dtype=np.float16)\n pfdxy = np.zeros(self.Nparts, dtype=np.float16)\n pfdxyerr = np.zeros(self.Nparts, dtype=np.float16)\n arrIdx = 0\n for ip, part in enumerate(pfcands):\n if ip not in candrange: continue\n if arrIdx == self.Nparts: break\n pfpt[arrIdx] = part.pt / jpt\n pfeta[arrIdx] = part.eta - jeta\n pfphi[arrIdx] = signedDeltaPhi(part.phi, jphi)\n pfpup[arrIdx] = part.puppiWeight\n pfpupnolep[arrIdx] = part.puppiWeightNoLep\n pfq[arrIdx] = part.charge\n pfid[arrIdx] = part.pdgId\n pfdz[arrIdx] = part.dz\n pfdxy[arrIdx] = part.d0\n pfdxyerr[arrIdx] = part.d0Err\n pftrk[arrIdx] = part.trkChi2\n arrIdx += 1\n\n # print(pfpt,pfeta,pfphi,pfdz,pfd0)\n ##define and reshape features\n pfData = np.vstack([pfpt, pfeta, pfphi, pfq, pfdz, pfdxy, pfdxyerr, pfpup, pfpupnolep, pfid])\n pfData = np.transpose(pfData)\n pfData = np.expand_dims(pfData,axis=0)\n svData = np.vstack([svdlen,svdlenSig, svdxy, svdxySig, svchi2, svpAngle, svx, svy, svz, svpt, svmass, sveta, svphi])\n svData = np.transpose(svData)\n svData = np.expand_dims(svData, axis=0)\n tauData = np.vstack([tau_charge, tau_chargedIso, tau_dxy, tau_dz, tau_eta, tau_leadTkDeltaEta, tau_leadTkDeltaPhi, tau_leadTkPtOverTauPt, tau_mass, tau_neutralIso, tau_phi, tau_photonsOutsideSignalCone, tau_pt, tau_rawAntiEle, tau_rawIso, tau_rawIsodR03, tau_rawMVAoldDM2017v2, tau_rawMVAoldDMdR032017v2])\n tauData = np.transpose(tauData)\n tauData = np.expand_dims(tauData, axis=0)\n #[\"MET_covXX\",\"MET_covXY\",\"MET_covYY\",\"MET_phi\",\"MET_pt\",\"MET_significance\",\"PuppiMET_pt\",\"PuppiMET_phi\",\"fj_eta\",\"fj_phi\",\"fj_msd\",\"fj_pt\"]\n #evtData = np.array([met.covXX,met.covXY,met.covYY,met.phi,met.pt,met.significance,pupmet.pt,pupmet.phi,jeta,jphi,jmsd,jpt])\n evtData = np.array([met.covXX,met.covXY,met.covYY,signedDeltaPhi(met.phi,jphi),met.pt,met.significance,pupmet.pt,signedDeltaPhi(pupmet.phi,jphi),jeta,jphi,jmsd,jpt])\n evtData = np.expand_dims(evtData,axis=0)\n\n IN_hadhad_v4p1_old[0] = float(self.model4p1_hadhad_old.predict([pfData, svData]))\n GRU_hadel_v6p1_old[0] = float(self.model6p1_hadel_old.predict([pfData, svData]))\n GRU_hadmu_v6p1_old[0] = float(self.model6p1_hadmu_old.predict([pfData, svData]))\n\n idconv = {211.:1, 13.:2, 22.:3, 11.:4, 130.:5, 1.:6, 2.:7, 3.:8, 4.:9,\n 5.:10, -211.:1, -13.:2,\n -11.:4, -1.:-6, -2.:7, -3.:8, -4.:9, -5.:10, 0.:0}\n pfData[:,:,-1] = np.vectorize(idconv.__getitem__)(pfData[:,:,-1])\n\n IN_hadhad_v4p1[0] = float(self.model4p1_hadhad.predict([pfData, svData]))\n GRU_hadel_v6p1[0] = float(self.model6p1_hadel.predict([pfData, svData]))\n GRU_hadmu_v6p1[0] = float(self.model6p1_hadmu.predict([pfData, svData]))\n\n # Need to add in one hot encoding for particle data here! The order is just 0 through 10 consecutively\n\n IN_hadhad_v4p1_ohe[0] = float(self.model4p1_hadhad_ohe.predict([pfData, svData]))\n IN_hadel_v4p1_ohe[0] = float(self.model4p1_hadel_ohe.predict([pfData, svData]))\n IN_hadmu_v4p1_ohe[0] = float(self.model4p1_hadmu_ohe.predict([pfData, svData]))\n\n PostTagger_hadhad_v1p1[0] = float(self.postTagger1p1_hadhad.predict([tauData, IN_hadhad_v4p1_ohe[0]]))\n PostTagger_hadel_v1p1[0] = float(self.postTagger1p1_hadel.predict([tauData, IN_hadel_v4p1_ohe[0]]))\n PostTagger_hadmu_v1p1[0] = float(self.postTagger1p1_hadmu.predict([tauData, IN_hadmu_v4p1_ohe[0]]))\n\n Ztagger_pred = self.Ztagger.predict([pfData, svData])\n Ztagger_Zee[0] = float(Ztagger_pred[0][0])\n Ztagger_Zmm[0] = float(Ztagger_pred[0][1])\n Ztagger_Zhh[0] = float(Ztagger_pred[0][2])\n Ztagger_Zhe[0] = float(Ztagger_pred[0][3])\n Ztagger_Zhm[0] = float(Ztagger_pred[0][4])\n\n MassReg_hadhad[0] = float(self.massreg_hadhad.predict([pfData, svData, evtData]))\n MassReg_hadel[0] = float(self.massreg_hadel.predict([pfData, svData, evtData]))\n MassReg_hadmu[0] = float(self.massreg_hadmu.predict([pfData, svData, evtData]))\n\n #self.log_pf.append(pfData)\n #self.log_sv.append(svData)\n #self.log_evt.append(evtData)\n #self.log_mreg.append(np.array([MassReg_hadhad[0], MassReg_hadel[0], MassReg_hadmu[0]]))\n\n #with open('test.npy', 'wb') as f:\n # np.save(f, np.vstack(self.log_pf))\n # np.save(f, np.vstack(self.log_sv))\n # np.save(f, np.vstack(self.log_evt))\n # np.save(f, np.vstack(self.log_mreg))\n #np.save(f, pfData)\n #np.save(f, svData)\n #np.save(f, evtData)\n #np.save(f, np.array([MassReg_hadhad[0], MassReg_hadel[0], MassReg_hadmu[0]]))\n #np.save(f, self.massreg_hadhad.get_weights())\n #np.save(f, self.massreg_hadel.get_weights())\n #np.save(f, self.massreg_hadmu.get_weights())\n\n # assert abs( 1 - float(self.model.predict(X)[0,1]) - float(self.model.predict(X)[0,0])) < 0.02\n # print(X,IN_hadhad_v4p1[0], GRU_hadel_v6p1[0])\n self.out.fillBranch(\"IN_hadhad_v4p1_old\", IN_hadhad_v4p1_old)\n self.out.fillBranch(\"GRU_hadel_v6p1_old\", GRU_hadel_v6p1_old)\n self.out.fillBranch(\"GRU_hadmu_v6p1_old\", GRU_hadmu_v6p1_old)\n\n self.out.fillBranch(\"IN_hadhad_v4p1\", IN_hadhad_v4p1)\n self.out.fillBranch(\"GRU_hadel_v6p1\", GRU_hadel_v6p1)\n self.out.fillBranch(\"GRU_hadmu_v6p1\", GRU_hadmu_v6p1)\n\n self.out.fillBranch(\"IN_hadhad_v4p1_ohe\", IN_hadhad_v4p1_ohe)\n self.out.fillBranch(\"IN_hadel_v4p1_ohe\", IN_hadel_v4p1_ohe)\n self.out.fillBranch(\"IN_hadmy_v4p1_ohe\", IN_hadmu_v4p1_ohe)\n\n self.out.fillBranch(\"PostTagger_hadhad_v1p1\", PostTagger_hadhad_v1p1)\n self.out.fillBranch(\"PostTagger_hadel_v1p1\", PostTagger_hadel_v1p1)\n self.out.fillBranch(\"PostTagger_hadmu_v1p1\", PostTagger_hadmu_v1p1)\n\n self.out.fillBranch(\"Ztagger_Zee\", Ztagger_Zee)\n self.out.fillBranch(\"Ztagger_Zmm\", Ztagger_Zmm)\n self.out.fillBranch(\"Ztagger_Zhh\", Ztagger_Zhh)\n self.out.fillBranch(\"Ztagger_Zhe\", Ztagger_Zhe)\n self.out.fillBranch(\"Ztagger_Zhm\", Ztagger_Zhm)\n\n self.out.fillBranch(\"MassReg_hadhad\", MassReg_hadhad)\n self.out.fillBranch(\"MassReg_hadel\", MassReg_hadel)\n self.out.fillBranch(\"MassReg_hadmu\", MassReg_hadmu)\n return True", "def save_particles_image(self):\n base_filename = self.config['info']['filename_microscope']\n self.save_image_microscope_camera(base_filename)", "def function_donnee_pep():\r\n\r\n particles = data_function_particle()\r\n weather = data_function_weather()\r\n wind = data_function_wind()\r\n temperature = data_function_temperature()\r\n season = data_function_season()\r\n deaparture = data_function_departure()\r\n day = data_function_day()\r\n rank = data_function_ranking()\r\n pressure = data_function_pressure()\r\n demonstration = data_function_demonstration()\r\n\r\n return particles, weather, wind, temperature, season, deaparture,\\\r\n day, rank, pressure, demonstration", "def flux():\n delta = 0.01 # film thickness, [dm]\n c = pre * 10 ** 2 / (R * tem) # total concentration calculated by ideal gas equation, in [mol/L]\n D12 = 0.001626528 / pre # HCl diffusion in Air, [dm2/s] @296K\n D13 = 3e-7 # HCl gas diffusion in water, [dm2/s] @296K\n D23 = 1.5e-7 # CH4 gas diffusion in water, [dm2/s] @296K\n N1 = ((x1_bar * x2d * D23) / (x2_bar * delta * D13) - x1_bar / delta) / \\\n (x2_bar / (D12 * c) + x3_bar / (D13 * c) + D23 * x1_bar / (D12 * D13 * c))\n # print 'Flux of HCl into water', abs(N1), [mol/(dm2*sec)]\n return N1", "def getP0(self):\n\t\tmyhmag.initializehelmholtz()\n\t\tabar = 13.714285714285715\n\t\tzbar = abar/2.0\n\t\tself.data[\"P0\"] = np.zeros(len(self.data[\"rho\"]))\n\t\tfor i in range(len(self.data[\"rho\"])):\n\t\t\tself.data[\"P0\"][i],energ,sound,gammaout,entropy,dummyfail = myhmag.gethelmholtzeos(1000.,self.data[\"rho\"][i],abar,zbar,True)", "def values(self):\n return _osgAnimation.mapVertexInfluence_values(self)", "def info(self):\n\n\t\tprint(\"Pixels on a side: {0}\".format(self.data.shape[0]))\n\t\tprint(\"Pixel size: {0}\".format(self.resolution))\n\t\tprint(\"Total angular size: {0}\".format(self.side_angle))\n\t\tprint(\"lmin={0:.1e} ; lmax={1:.1e}\".format(self.lmin,self.lmax))", "def test_particle_obj():\n # Set up the base parameters describing a particle object\n T = 273.15 + 15.\n P = 150e5\n Sa = 35.\n Ta = 273.15 + 4.\n composition = ['methane', 'ethane', 'propane', 'oxygen']\n yk = np.array([0.85, 0.07, 0.08, 0.0])\n de = 0.005\n K = 1.\n Kt = 1.\n fdis = 1e-6\n\n # Compute a few derived quantities\n bub = dbm.FluidParticle(composition)\n m0 = bub.masses_by_diameter(de, T, P, yk)\n\n # Create a `SingleParticle` object\n bub_obj = dispersed_phases.SingleParticle(bub, m0, T, K, fdis=fdis,\n K_T=Kt)\n\n # Check if the initial attributes are correct\n for i in range(len(composition)):\n assert bub_obj.composition[i] == composition[i]\n assert_array_almost_equal(bub_obj.m0, m0, decimal=6)\n assert bub_obj.T0 == T\n assert bub_obj.cp == seawater.cp() * 0.5\n assert bub_obj.K == K\n assert bub_obj.K_T == Kt\n assert bub_obj.fdis == fdis\n for i in range(len(composition)-1):\n assert bub_obj.diss_indices[i] == True\n assert bub_obj.diss_indices[-1] == False\n\n # Check if the values returned by the `properties` method match the input\n (us, rho_p, A, Cs, beta, beta_T, T_ans) = bub_obj.properties(m0, T, P,\n Sa, Ta, 0.)\n us_ans = bub.slip_velocity(m0, T, P, Sa, Ta)\n rho_p_ans = bub.density(m0, T, P)\n A_ans = bub.surface_area(m0, T, P, Sa, Ta)\n Cs_ans = bub.solubility(m0, T, P, Sa)\n beta_ans = bub.mass_transfer(m0, T, P, Sa, Ta)\n beta_T_ans = bub.heat_transfer(m0, T, P, Sa, Ta)\n assert us == us_ans\n assert rho_p == rho_p_ans\n assert A == A_ans\n assert_array_almost_equal(Cs, Cs_ans, decimal=6)\n assert_array_almost_equal(beta, beta_ans, decimal=6)\n assert beta_T == beta_T_ans\n assert T == T_ans\n\n # Check that dissolution shuts down correctly\n m_dis = np.array([m0[0]*1e-10, m0[1]*1e-8, m0[2]*1e-3, 1.5e-5])\n (us, rho_p, A, Cs, beta, beta_T, T_ans) = bub_obj.properties(m_dis, T, P,\n Sa, Ta, 0)\n assert beta[0] == 0.\n assert beta[1] == 0.\n assert beta[2] > 0.\n assert beta[3] > 0.\n m_dis = np.array([m0[0]*1e-10, m0[1]*1e-8, m0[2]*1e-7, 1.5e-16])\n (us, rho_p, A, Cs, beta, beta_T, T_ans) = bub_obj.properties(m_dis, T, P,\n Sa, Ta, 0.)\n assert np.sum(beta[0:-1]) == 0.\n assert us == 0.\n assert rho_p == seawater.density(Ta, Sa, P)\n\n # Check that heat transfer shuts down correctly\n (us, rho_p, A, Cs, beta, beta_T, T_ans) = bub_obj.properties(m_dis, Ta, P,\n Sa, Ta, 0)\n assert beta_T == 0.\n (us, rho_p, A, Cs, beta, beta_T, T_ans) = bub_obj.properties(m_dis, T, P,\n Sa, Ta, 0)\n assert beta_T == 0.\n\n # Check the value returned by the `diameter` method\n de_p = bub_obj.diameter(m0, T, P, Sa, Ta)\n assert_approx_equal(de_p, de, significant=6)\n\n # Check functionality of insoluble particle\n drop = dbm.InsolubleParticle(isfluid=True, iscompressible=True)\n m0 = drop.mass_by_diameter(de, T, P, Sa, Ta)\n\n # Create a `Particle` object\n drop_obj = dispersed_phases.SingleParticle(drop, m0, T, K, fdis=fdis,\n K_T=Kt)\n\n # Check if the values returned by the `properties` method match the input\n (us, rho_p, A, Cs, beta, beta_T, T_ans) = drop_obj.properties(\n np.array([m0]), T, P, Sa, Ta, 0)\n us_ans = drop.slip_velocity(m0, T, P, Sa, Ta)\n rho_p_ans = drop.density(T, P, Sa, Ta)\n A_ans = drop.surface_area(m0, T, P, Sa, Ta)\n beta_T_ans = drop.heat_transfer(m0, T, P, Sa, Ta)\n assert us == us_ans\n assert rho_p == rho_p_ans\n assert A == A_ans\n assert beta_T == beta_T_ans\n\n # Check that heat transfer shuts down correctly\n (us, rho_p, A, Cs, beta, beta_T, T_ans) = drop_obj.properties(m_dis, Ta, P,\n Sa, Ta, 0)\n assert beta_T == 0.\n (us, rho_p, A, Cs, beta, beta_T, T_ans) = drop_obj.properties(m_dis, T, P,\n Sa, Ta, 0)\n assert beta_T == 0.\n\n # Check the value returned by the `diameter` method\n de_p = drop_obj.diameter(m0, T, P, Sa, Ta)\n assert_approx_equal(de_p, de, significant=6)", "def metropolis_step(self, positions):\n \"\"\"with brute-force sampling of new positions.\"\"\"\n\n # r = random.random()*random.choice((-1, 1))\n # r is a random number drawn from the uniform prob. dist. in [0,1]\n r = np.zeros(self.num_d)\n for i in range(self.num_d):\n r[i] = np.random.uniform(-1, 1)\n # Pick a random particle\n random_index = np.random.randint(0, high=len(positions))\n new_positions = np.array(positions)\n new_random_position = new_positions[random_index, :]\n # Suggest a new move\n new_positions[random_index, :] = new_random_position + r*self.delta_R\n # Old system and wavefunction\n wavefunction = self.w.wavefunction(positions)\n old_wavefunction_squared = wavefunction**2\n\n # Test the new position with a new system and wavefunction\n # sys_test = System(self.num_p, self.num_d)\n # sys_test.positions_distances(new_positions)\n # alpha = self.w.alpha\n # beta = self.w.beta\n # a = self.w.a\n # wave_test = Wavefunction(self.num_p, self.num_d, alpha, beta, a, sys_test)\n # test_wavefunction = wave_test.wavefunction(new_positions)\n test_wavefunction = self.w.wavefunction(new_positions)\n\n new_wavefunction_squared = test_wavefunction**2\n # print ('Old = ', positions)\n\n if new_wavefunction_squared <= 1e-14:\n pass\n else:\n # acceptance_ratio = self.w.wavefunction_ratio(positions,\n # new_positions)\n acceptance_ratio = new_wavefunction_squared/old_wavefunction_squared\n epsilon = np.random.sample()\n\n if acceptance_ratio > epsilon:\n positions = new_positions\n # print ('New = ', positions)\n # self.s.distances_update(positions, random_index)\n # self.s.positions_distances(new_positions)\n self.c += 1.0\n\n else:\n pass\n\n return positions", "def _perturbation(self):\n if self.P > 1:\n scales = []\n for term_i in range(self.n_randEffs):\n _scales = sp.randn(self.diag[term_i].shape[0])\n if self.jitter[term_i] > 0:\n _scales = sp.concatenate((_scales, sp.zeros(1)))\n scales.append(_scales)\n scales = sp.concatenate(scales)\n else:\n scales = sp.randn(self.vd.getNumberScales())\n return scales", "def info(self):\n ss = \"\\nSummary PSF3D info\\n\"\n ss += \"---------------------\\n\"\n ss += array_stats_str(self.energy_lo, \"energy_lo\")\n ss += array_stats_str(self.energy_hi, \"energy_hi\")\n ss += array_stats_str(self.offset, \"offset\")\n ss += array_stats_str(self.rad_lo, \"rad_lo\")\n ss += array_stats_str(self.rad_hi, \"rad_hi\")\n ss += array_stats_str(self.psf_value, \"psf_value\")\n\n # TODO: should quote containment values also\n\n return ss", "def read_core_temp(self) -> float:", "def atmospheric_ion_neutral_collision_frequency(self):\n nu = 3.8e-11*self.msis[\"nn\"]\n return nu", "def bndy_plasma(self):\n self.ne[0], self.ne[-1] = 1e11, 1e11\n self.ni[0], self.ni[-1] = 1e11, 1e11\n self.nn[0], self.nn[-1] = 1e11, 1e11\n self.Te[0], self.Te[-1] = 0.1, 0.1\n self.Ti[0], self.Ti[-1] = 0.01, 0.01\n # self.coll_em[0], self.coll_em[-1] = 1e5, 1e5\n # self.coll_im[0], self.coll_im[-1] = 1e5, 1e5", "def analyse ( self ) :\n \n ## get all B0 particles\n bs1 = self.gselect ( 'bs1' , \"[ Beauty => ( D_s+ ==> K- K+ pi+ ) K-]CC \")\n bs2 = self.gselect ( 'bs2' , \"[ Beauty -> ( D_s+ --> K- K+ pi+ ) K-]CC \")\n \n cnt = self.counter(\"#1 + photos \")\n cnt += bs1.size()\n \n cnt = self.counter(\"#2 - photos \")\n cnt += bs2.size()\n\n if len(bs1) != len(bs2) :\n self.Warning(\" FOUND!!!!\" , SUCCESS )\n for b in bs1:\n print ' With PHOTOS: ', b.decay() , b.barcode()\n for b in bs2:\n print ' Without PHOTOS: ', b.decay() , b.barcode()\n \n \n return SUCCESS # RETURN ", "def get_velocities(self):\n\n return np.array([p.velocity for p in self.particles])", "def f_per_particle(self, m, alpha):\n fit_obj = ff.FitenessFunction()\n\n total_features = 14\n # Get the subset of the features from the binary mask\n if np.count_nonzero(m) == 0:\n X_subset = self.X\n else:\n feature_idx = np.where(np.asarray(m) == 1)[0]\n X_subset = self.X.iloc[:, feature_idx]\n\n # print(\"particle : \", m)\n P = fit_obj.calculate_fitness(self.classifier, X_subset, self.Y)\n\n # Perform classification and store performance in P\n # classifier.fit(X_subset, self.Y)\n # P = (classifier.predict(X_subset) == self.Y).mean()\n # Compute for the objective function\n j = (alpha * (1.0 - P)\n + (1.0 - alpha) * (1 - (X_subset.shape[1] / total_features)))\n\n # alpha = random.random()\n # beta = 1 - alpha\n\n # j = alpha * P + beta * (X_subset.shape[1] / total_features)\n\n return j", "def __init__(self, fluorescenceSeries, conditionName, conditionSalt, conditionPh, conditiondpHdT, conditionIsControl):\n #name, temperatures, and curve from data\n self.name = fluorescenceSeries.name\n self.temperatures = fluorescenceSeries.index\n self.fluorescence = [x for x in fluorescenceSeries]\n\n stepSize = self.temperatures[1]-self.temperatures[0]\n \n #from the non normalised curve we get the max for each individual curve\n #the overall max on the plate will decide what the monotenicity threshold for the experiment will be\n self.maxNonNormalised = 0\n for x in self.fluorescence:\n if x > self.maxNonNormalised:\n self.maxNonNormalised = x\n \n #================= normalisation happens here ================#\n #the curve is then normalised to have an area below the curve of 1\n count = 0\n for height in self.fluorescence:\n count += height*stepSize\n self.fluorescence = [x / count for x in self.fluorescence]\n #used to calculate the monotenicity threshold\n self.normalisationFactor = count\n \n #from the now normalised curve we get the max and min for each individual curve\n #this is used in complex detection and plotting\n self.maxNormalised = self.maxNonNormalised / count\n self.minNormalised = 1\n for x in self.fluorescence:\n if x < self.minNormalised:\n self.minNormalised = x\n \n #other attributes of the curve are set to false/none until later analysis of the curve\n self.complex = False\n self.mono = False\n #tm and tm error are calulated upon calling the computeTm() method\n self.Tm = None \n self.TmError = None\n \n #the contents of the well is contained in an object of Contents inside well\n self.contents = Contents(conditionName, conditionSalt, conditionPh, conditiondpHdT, conditionIsControl)\n return" ]
[ "0.62281686", "0.6164579", "0.6101659", "0.6095347", "0.6041156", "0.6022373", "0.59413975", "0.5927904", "0.5923357", "0.59131575", "0.5900847", "0.5889833", "0.5871284", "0.58277595", "0.5819351", "0.5806105", "0.57995564", "0.5764965", "0.5761083", "0.5722828", "0.56739026", "0.56683755", "0.5648044", "0.56471425", "0.56396645", "0.5620207", "0.5619646", "0.56156915", "0.55956954", "0.5571542", "0.55594414", "0.5557327", "0.5551672", "0.55309784", "0.5522045", "0.5522045", "0.5521144", "0.552088", "0.552088", "0.552088", "0.552088", "0.552073", "0.55057734", "0.5504851", "0.5485141", "0.5479389", "0.5467497", "0.54646575", "0.54590076", "0.5446706", "0.54390025", "0.543794", "0.5426042", "0.5424529", "0.54225814", "0.54137635", "0.5407695", "0.540714", "0.5391921", "0.5383886", "0.53812903", "0.5380942", "0.5380513", "0.53776634", "0.5373979", "0.5371868", "0.537081", "0.53696275", "0.5360592", "0.5357471", "0.535579", "0.5350721", "0.53453785", "0.5321179", "0.53171253", "0.531618", "0.5316123", "0.5312326", "0.5310823", "0.53086764", "0.5304677", "0.5296979", "0.5296687", "0.5293692", "0.52902836", "0.52888024", "0.5285481", "0.5274295", "0.5272531", "0.5265829", "0.52515745", "0.5247017", "0.52439845", "0.52439827", "0.52339184", "0.52324194", "0.5225383", "0.52231914", "0.52185345", "0.5213152", "0.5211966" ]
0.0
-1
describes the micrscope setup
def __init__(self, diameter, height, height_of_reservoir=None, material=None): g = 9.81 # m/s**2 self.material = material self.diameter = diameter self.height = height self.volume = np.pi*self.diameter**2/4 self.height_of_reservoir = height_of_reservoir if material and height_of_reservoir: self.hydrostatic_pressure = material.density*g*self.height_of_reservoir
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n ...", "def setup(self):\n pass", "def setup( self ):", "def setup(self):\n pass # pragma: no cover", "def setup(self):\r\n pass", "def setup(self):\n\t\tpass", "def setup(self) -> None:", "def setup(self):", "def setup(self):", "def setup(self):", "def setup(self):", "def setup(self):\n pass", "def setup(self):\n raise NotImplementedError", "def setup(self):\n raise NotImplementedError", "def setup(self):\n raise NotImplementedError", "def setup(self):\n raise NotImplementedError", "def setup():\n pass", "def setup(self, rc):\n pass", "def setup(self) -> None:\n pass", "def setup(self) -> None:\n pass", "def setup(self) -> None:\n pass", "def setup(self,**kwargs):\n pass", "def _setup(self):", "def _setup(self):", "def analysis_setup(self):\n pass", "def setup(self):\n pass", "def setup(self):\n pass", "def setup(self, *args, **kwargs):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def setup_dev():\n setup_general()", "def setup():\n setFormat()\n setFilename()\n setScreenMode()", "def setup(self): \n pass", "def setup(bot):\n bot.add_cog(Info(bot))", "def _setup(self) -> None:\n\t\treturn", "def setup(bot):\n bot.add_cog(Help(bot))", "def setup(bot: Bot) -> None:\n bot.add_cog(Armory(bot))", "def setup(self, args={}):\n\n return Status.RUN", "def setup_method(self):\n self.hass = get_test_home_assistant()\n\n self.config = {\n ip.DOMAIN: {\n \"platform\": \"microsoft_face_identify\",\n \"source\": {\"entity_id\": \"camera.demo_camera\", \"name\": \"test local\"},\n \"group\": \"Test Group1\",\n },\n \"camera\": {\"platform\": \"demo\"},\n mf.DOMAIN: {\"api_key\": \"12345678abcdef6\"},\n }\n\n self.endpoint_url = f\"https://westus.{mf.FACE_API_URL}\"", "def main():\n setup(**setup_params)", "def setup(bot):\n bot.add_cog(TruthOrDareCmd(bot))", "def setup(bot: util.CustomBot):\r\n bot.add_cog(Info(bot))", "def _setup(self):\n raise NotImplementedError()", "def gmcp_setup_data(self):\n yield \"Core.Supports.Debug\", 20\n yield \"Core.Supports.Set\", [ \"MG.char 1\", \"MG.room 1\", \"comm.channel 1\" ]", "def setup(self) -> None:\n mlflow.set_tracking_uri('file://' + hutils.get_original_cwd() + '/mlruns')\n if self.log_mlflow:\n mlflow.set_experiment(self.config.runner.exp_name)\n \n if self.log_mlflow:\n self.log_parameters(self.config)\n mlflow.log_param('node', os.uname()[1])", "def setup(bot: Bot) -> None:\n bot.add_cog(Help(bot))", "def SetupEnvironment(self):\n pass", "def setup(self):\n\n if self.user is 'Daisy':\n import socket\n host = socket.gethostname()\n\n simName = self.name_prefix[:self.name_prefix.find('_')]\n\n if 'ursa' in host:\n self.raw_sim_dir = '/disk01/rad/sim/' + simName + '/' + self.feedback\n self.caesar_dir = '/disk01/rad/sim/' + simName + '/' + self.feedback + 'Groups/'\n self.redshiftFile = '/home/rad/gizmo-extra/outputs_boxspace50.info'\n self.d_data = '/home/dleung/Downloads/SIGAME_dev/sigame/temp/z' + str(int(self.zCloudy)) + '_data_files/'\n elif 'flatironinstitute.org' or 'worker' in host:\n self.raw_sim_dir = '/mnt/ceph/users/daisyleung/simba/sim/' + simName + '/' + self.feedback # dummy\n self.caesar_dir = '/mnt/ceph/users/daisyleung/simba/sim/' + simName + '/' + self.feedback + 'Groups/'\n self.redshiftFile = '/mnt/ceph/users/daisyleung/simba/gizmo-extra/outputs_boxspace50.info'\n self.d_data = '/mnt/home/daisyleung/Downloads/SIGAME_dev/sigame/temp/z' + str(int(self.zCloudy)) + '_data_files/'\n else:\n raise NotImplementedError", "def setUp(self):\n lang = self._sim_lang\n self._simulator = self._find_resource(\n f\"drake/examples/hardware_sim/hardware_sim_{lang}\")\n self._example_scenarios = self._find_resource(\n \"drake/examples/hardware_sim/example_scenarios.yaml\")\n self._test_scenarios = self._find_resource(\n \"drake/examples/hardware_sim/test/test_scenarios.yaml\")\n self._default_extra = {\n # For our smoke test, exit fairly quickly.\n \"simulation_duration\": 0.0625,\n }", "def setup(self, *args, **kwargs):\n return True", "def Setup(self):\n return True", "def setup(self):\n self.machine = Machine(['a', 'b', 'c', '_'])", "def setup(bot: Bot) -> None:\n bot.add_cog(VoiceGate(bot))", "def _setup(app_obj):", "def Setup(self):\n raise NotImplementedError(\n 'No runtime setup defined for %s' % self.__class__.__name__)", "def setup(self):\n self.log.debug('upm - in upm setup()')\n # Add resource setup code here", "def setup(self):\n super(__class__, self).setup()\n # construct command line call\n setup_script = '%s/tfMRI.py' % \\\n os.environ['ABCDTASKPREPDIR']\n arg1 = self.kwargs['path']\n arg2 = self.kwargs['sourcedata_root']\n arg3 = self.kwargs['subject']\n arg4 = self.kwargs['session']\n anat_metadata = self.config.get_bids('t1w_metadata')\n # get make/software information\n make = anat_metadata['Manufacturer']\n if make == 'GE':\n reg = re.compile(r'.*(DV2[56]).*')\n software_version = reg.match(anat_metadata[\n 'SoftwareVersions']).group(1)\n else:\n software_version = 'NA'\n cmd = ' '.join((setup_script, arg1, arg2, arg3, arg4, make,\n software_version))\n print(cmd)\n\n log_dir = self._get_log_dir()\n out_log = os.path.join(log_dir, self.__class__.__name__ + '_setup.out')\n err_log = os.path.join(log_dir, self.__class__.__name__ + '_setup.err')\n result = self.call(cmd, out_log, err_log)", "def setup(self):\n\n self.testInst = pysat.Instrument('pysat', 'testing2D_xarray',\n clean_level='clean')\n self.testInst.bounds = (dt.datetime(2008, 1, 1),\n dt.datetime(2008, 2, 1))\n self.dname = 'profiles'\n self.test_val_length = 15\n\n return", "def setup(self, registers):\n \"\"\" tasks before any generation functions are called \"\"\"\n pass", "def setup(self, run, run_id):\n\n raise NotImplementedError", "def setup(bot):\n bot.add_cog(Miniscape(bot))", "def setup(self):\n ### Set Names\n # Name of the pipeline reduction step\n self.name='sortobs'\n # Shortcut for pipeline reduction step and identifier for\n # saved file names.\n self.procname = 'RAW'\n # Set Logger for this pipe step\n self.log = logging.getLogger('pipe.step.%s' % self.name)\n ### Set Parameter list\n # Clear Parameter list\n self.paramlist = []\n # Append Parameters\n self.paramlist.append(['pattern', '(^.+_([gri]-band|oiii|sii|clear|h-alpha))',\n 'Regex pattern used to get name by matching name_filter'])\n # Confirm end of setup\n self.log.debug('Setup: done')", "def setup(self):\n raise NotImplementedError(\"Need to be implemented in subclasses\")", "def config (self):\n import wikicode\n class Config (wikicode.extension):\n def run (self):\n self.send_page (\"Generic DC Setup\")\n wikicode.run_extension (Config)", "def init():\n\n return \"Welcome to SIX SIGMA, this api is only available to SIX SIGMA developers\"", "def setups():\n setups = []\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F2 = dict()\n kotani2017_F2['name'] = 'kotani2017_F2'\n kotani2017_F2['piltemplate'] = kotani2017_F2_pil\n kotani2017_F2['pilparams'] = [None]\n kotani2017_F2['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F2['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=1'),\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=0.5'),\n ('pilsimulator', '--nxy', '--atol', '1e-13', '--rtol', '1e-13', '--mxstep', '10000', '--t8', '36000', '--p0', 'S1=10', 'S2=10', 'R=20', 'C1=0.05')]\n kotani2017_F2['reporter'] = 'D'\n kotani2017_F2['exp_results'] = [(7733, 7.42), (11333, 6.18), (25533, 1.40)]\n setups.append(kotani2017_F2)\n\n\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F3 = dict()\n kotani2017_F3['name'] = 'kotani2017_F3'\n kotani2017_F3['piltemplate'] = kotani2017_F3_pil\n kotani2017_F3['pilparams'] = [None]\n kotani2017_F3['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F3['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.1'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.01'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S1=10', 'S2=10', 'S3=10', 'S4=10', 'R=20', 'C1=0.001')]\n kotani2017_F3['reporter'] = 'D'\n kotani2017_F3['exp_results'] = [(21220, 7.72), (64203, 3.12), (86996, 0.69)]\n setups.append(kotani2017_F3)\n\n # If you run this in detailed mode, you need to set --t8 to 1e8\n kotani2017_F4 = dict()\n kotani2017_F4['name'] = 'kotani2017_F4'\n kotani2017_F4['piltemplate'] = kotani2017_F4_pil\n kotani2017_F4['pilparams'] = [None]\n kotani2017_F4['pepperargs'] = {'condensed': True, 'conc': 'nM', 'release_cutoff': 10}\n kotani2017_F4['simulation'] = [\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.1'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.01'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0.001'),\n ('pilsimulator', '--nxy', '--atol', '1e-10', '--rtol', '1e-10', '--mxstep', '10000', '--t8', '360000', '--p0', 'S5au=10', 'S6au=10', 'R=20', 'C1x=0')]\n kotani2017_F4['reporter'] = 'D'\n kotani2017_F4['exp_results'] = [(6815, 6.06), (9004, 4.78), (10278, 4.03), (10795, 3.73)]\n setups.append(kotani2017_F4)\n\n return setups", "def __init__(self):\n self.label = \"Get SDM parameters\"\n self.description = \"This tool is used to view the Environment and SDM modeling parameters that have been set by the user. All of the values reported by this tool must be set to values specific to the model to be made. Using the ESRI default values will cause SDM to fail. If the Environment is not completely set, then an error message stating \\\"Improper SDM setup\\\" will occur. The successful running of this tool does not assure that the setup is correct; only that the default values have been changed. See the Environment Settings section of the Help file for Calculate Weights for the details.\"\n\n self.canRunInBackground = False\n self.category = \"Utilities\"", "def setup(self):\n self.testInst = pysat.Instrument('pysat', 'testing2D_xarray',\n clean_level='clean')\n self.testInst.bounds = (dt.datetime(2008, 1, 1),\n dt.datetime(2008, 2, 1))\n self.dname = 'variable_profiles'\n self.test_val_length = 15\n\n return", "def configure(self):", "def configure(self):", "def configure(self):", "def configure(self):", "def setup(bot: Red):\n bot.add_cog(Welcome(bot))", "def setup(self):\n raise NotImplemented", "def setup(self):\n \n # Define ui file to be used as a graphical interface\n # This file can be edited graphically with Qt Creator\n # sibling_path function allows python to find a file in the same folder\n # as this python module\n self.ui_filename = sibling_path(__file__, \"ant_watch_plot.ui\")\n \n #Load ui file and convert it to a live QWidget of the user interface\n self.ui = load_qt_ui_file(self.ui_filename)\n\n # Measurement Specific Settings\n # This setting allows the option to save data to an h5 data file during a run\n # All settings are automatically added to the Microscope user interface\n self.settings.New('save_video', dtype = bool, initial = False)\n self.settings.New('track_ant',dtype = bool, initial = False)\n self.settings.New('pixel_size', dtype = float, initial = 0.05547850208, ro = True)\n self.settings.New('binning', dtype = int, initial = 16, ro = True)\n self.settings.New('threshold', dtype = int, initial = 85, ro = False)\n self.settings.New('proportional', dtype = float, initial = 0.12, ro = False)\n self.settings.New('integral', dtype = float, initial = 0, ro = False)\n self.settings.New('derivative', dtype = float, initial = 0.05, ro = False)\n \n # x and y is for transmitting signal\n self.settings.New('x',dtype = float, initial = 32, ro = True, vmin = 0, vmax = 63.5)\n self.settings.New('y',dtype = float, initial = 32, ro = True, vmin = 0, vmax = 63.5)\n \n # Define how often to update display during a run\n self.display_update_period = 0.01\n \n \n # Convenient reference to the hardware used in the measurement\n self.track_cam = self.app.hardware['track_cam']\n self.wide_cam = self.app.hardware['wide_cam']\n self.recorder = self.app.hardware['flirrec']\n self.daqmotor = self.app.hardware['daqmotor']\n \n #setup experiment condition\n self.track_cam.settings.frame_rate.update_value(50)\n self.track_cam.read_from_hardware()", "def setUp(self):\n\n self.niceArgV = (\"--long Alpha -n Beta \"\n \"--shortless Gamma -f --myflag \"\n \"--myparam Tofu\").split()\n\n self.nice = WellBehaved()", "def setup(self):\n env_name = rospy.get_param('ros_gym/environment_name')\n max_episode_steps = rospy.get_param('ros_gym/max_episode_steps')\n self.task_env = self.register_env(env_name, max_episode_steps)\n\n self.agent = \\\n AgentBase.get_agent(rospy.get_param('~agent'), env=self.task_env)\n rospy.loginfo('Using agent of type: {}'.format(self.agent.name))\n\n # Set the logging system\n rospack = rospkg.RosPack()\n pkg_path = rospack.get_path('ros_gym')\n outdir = pkg_path + '/training_results'\n self.task_env = wrappers.Monitor(self.task_env, outdir, force=True)", "def setUp(self):\n self.sampler = {\n \"name\": \"samplername\",\n \"backend_name\": \"\",\n \"backend_header\": \"\",\n \"backend_prefix\": \"\",\n \"backend_suffix\": \"\",\n \"backend_footer\": \"\",\n \"ncores\": 2,\n \"threads_per_core\": 1,\n \"omp_enabled\": True,\n \"papi_enabled\": True,\n \"papi_counters_max\": 2,\n \"papi_counters_avail\": (\"C1\", \"C2\", \"C3\"),\n \"kernels\": {\"dgemm\": (\n 'dgemm', 'char*', 'char*', 'int*', 'int*', 'int*', 'double*',\n 'double*', 'int*', 'double*', 'int*', 'double*', 'float*',\n 'int*'\n )},\n \"nt_max\": random.randint(1, 10),\n \"exe\": \"x\"\n }\n self.i = Symbol(\"i\")\n self.j = Symbol(\"j\")\n self.k = Symbol(\"k\")\n self.ns = [random.randint(1, 100) for _ in range(5)]", "def setup(self):\n self.ae = None", "async def setup(self, ctx):\n pass", "def setup(self):\n\n self.insts = []\n self.testInst = pysat.Instrument('pysat', 'testing2D',\n clean_level='clean')\n self.testInst.bounds = (dt.datetime(2008, 1, 1),\n dt.datetime(2008, 1, 3))\n self.insts.append(self.testInst)\n self.insts.append(self.testInst)\n\n self.dname = 'series_profiles'\n self.test_vals = np.arange(50) * 1.2\n\n self.testC = pysat.Constellation(instruments=self.insts)\n\n return", "def setup(self, app_args):\n raise NotImplementedError", "def setup(bot):\n bot.add_cog(Session(bot))", "def setup(self):\n self.testInst = pysat.Instrument('pysat', 'testing2D',\n clean_level='clean')\n self.testInst.bounds = (dt.datetime(2008, 1, 1),\n dt.datetime(2008, 2, 1))\n self.dname = 'series_profiles'\n self.test_vals = np.arange(50) * 1.2\n\n return", "def setup(self):\n # define misfit function and adjoint source generator\n self.misfit = getattr(misfit, PAR.MISFIT)\n self.adjoint = getattr(adjoint, PAR.MISFIT)\n\n # define seismic data reader and writer\n self.reader = getattr(readers, PAR.READER)\n self.writer = getattr(writers, PAR.WRITER)\n\n # prepare channels list\n self.channels = []\n for char in PAR.CHANNELS:\n self.channels += [char]", "def setup(self):\n self.log.debug('RFSwitch - in RFSwitch setup()')\n # Add resource setup code here\n print(\"Calling RFSwitch:setup\")" ]
[ "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6317719", "0.6309029", "0.6301397", "0.6280084", "0.62563354", "0.6255936", "0.6254392", "0.62412417", "0.6226122", "0.6226122", "0.6226122", "0.6226122", "0.6211043", "0.6092844", "0.6092844", "0.6092844", "0.6092844", "0.60887855", "0.6072536", "0.60481685", "0.60481685", "0.60481685", "0.6027901", "0.59710795", "0.59710795", "0.594678", "0.59327334", "0.59327334", "0.590857", "0.5900024", "0.5900024", "0.5900024", "0.5900024", "0.5900024", "0.5888943", "0.5883497", "0.58645135", "0.5853421", "0.58309644", "0.5743328", "0.57386583", "0.5695076", "0.56922555", "0.568683", "0.56477123", "0.56318593", "0.5595929", "0.55615574", "0.5559562", "0.55465704", "0.5542546", "0.5536642", "0.5526248", "0.5521963", "0.55142236", "0.55005157", "0.55004907", "0.5480602", "0.54739803", "0.5466162", "0.5460451", "0.5457194", "0.54565465", "0.5455007", "0.5451243", "0.54417694", "0.54380345", "0.54363555", "0.5420823", "0.54123545", "0.5389246", "0.53889674", "0.53877455", "0.53877455", "0.53877455", "0.53877455", "0.53865206", "0.53865176", "0.53808963", "0.5380804", "0.5373419", "0.5368176", "0.5363429", "0.5348206", "0.5341226", "0.53347284", "0.5334573", "0.53303355", "0.53116655", "0.53114647" ]
0.0
-1
describes the fluid handling system
def __init__(self, fluid_reservoir=None, all_tubing=None, onchip_reservoir=None): self.fluid_reservoir=fluid_reservoir self.all_tubing = all_tubing self.onchip_reservoir = onchip_reservoir
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def system_fleet_dimensioning(self):", "def define_forms(self):\n\n # Define UFL objects corresponding to the local acceleration\n # if problem is unsteady.\n self.define_ufl_local_inertia()\n self.define_ufl_local_inertia_diff()\n\n # Define UFL objects corresponding to the convective acceleration\n # if problem is formulated with respect to Eulerian coordinates\n self.define_ufl_convec_accel()\n self.define_ufl_convec_accel_diff()\n\n # Define UFL objects corresponding to the stress tensor term.\n # This should always be non-zero for deformable bodies.\n self.define_ufl_stress_work()\n self.define_ufl_stress_work_diff()\n\n # Define UFL object corresponding to the body force term. Assume\n # it is zero if key was not provided.\n self.define_ufl_body_force()\n\n # Define UFL object corresponding to the traction force terms. Assume\n # it is zero if key was not provided.\n self.define_ufl_neumann_bcs()\n self.define_ufl_neumann_bcs_diff()\n\n return None", "def engine_and_general_info(self):\r\n pos,vel,esc_part, impact, wall_collision,mom = self.box_collision_info()\r\n tot_kin, kin_er = self.kinetic_energy()\r\n esc_mom, force = self.escaped_momentum()\r\n pres = self.pressure()\r\n tot_force = self.engine_boost()\r\n #force, acceleration, fuel = self.engine_boost()\r\n\r\n print\" Engine started and launched \"\r\n\r\n print \"###############################################\"\r\n print \" Engine status (Numerical values) \"\r\n print \"-----------------------------------------------\"\r\n print \"The amount of particle escaped %g\" %(esc_part)\r\n print \"Amount of particles collided with one wall %i\" %wall_collision\r\n print \"Momentum escaped %g kgm/s\" %(esc_mom)\r\n print \"Kinetic energy per particle %gj\" %(kin_er)\r\n print \"Total kinetic energy %gj\" %(tot_kin)\r\n print \"Pressure inside the engine is %f\" %(pres)\r\n print \"momentum on the wall %g\" %(mom)\r\n print \"total force %g\"%(tot_force)\r\n print \"###############################################\"\r\n print \" Launch info \"\r\n print \"-----------------------------------------------\"\r\n #print \"acceleration per engine %g m/s^2\" %(acceleration)\r\n #print \"force per engine %g N \" %(force)\r\n print \"################################################\"", "def main_multimodal_fusion(im_vis, im_ir, kernel, levels, window_size):\n\n im_vis = convert_image_to_floats(im_vis)\n im_ir = convert_image_to_floats(im_ir)\n\n im_vis_hsv = rgb2hsv(im_vis)\n value_channel = im_vis_hsv[:, :, 2]\n\n plt.subplot(1, 2, 1)\n plt.imshow(value_channel, cmap='gray')\n plt.subplot(1, 2, 2)\n plt.imshow(im_ir, cmap='gray')\n plt.show()\n\n # kernels to compute visibility\n kernel1 = classical_gaussian_kernel(5, 2)\n kernel2 = classical_gaussian_kernel(5, 2)\n\n # Computation of local entropy, local contrast and visibility for value channel\n local_entropy_value = normalized_local_entropy(value_channel, window_size)\n local_contrast_value = local_contrast(value_channel, window_size)\n visibility_value = visibility(value_channel, kernel1, kernel2)\n # Combination of local entropy, local contrast and visibility for value channel\n weight_value = weight_combination(local_entropy_value, local_contrast_value, visibility_value, 1, 1, 1)\n\n # Computation of local entropy, local contrast and visibility for IR image\n local_entropy_ir = normalized_local_entropy(im_ir, window_size)\n local_contrast_ir = local_contrast(im_ir, window_size)\n visibility_ir = visibility(im_ir, kernel1, kernel2)\n # Combination of local entropy, local contrast and visibility for IR image\n weight_ir = weight_combination(local_entropy_ir, local_contrast_ir, visibility_ir, 1, 1, 1)\n\n plt.subplot(2, 3, 1)\n plt.imshow(local_entropy_value, cmap='gray')\n plt.subplot(2, 3, 2)\n plt.imshow(local_contrast_value, cmap='gray')\n plt.subplot(2, 3, 3)\n plt.imshow(visibility_value, cmap='gray')\n plt.subplot(2, 3, 4)\n plt.imshow(local_entropy_ir, cmap='gray')\n plt.subplot(2, 3, 5)\n plt.imshow(local_contrast_ir, cmap='gray')\n plt.subplot(2, 3, 6)\n plt.imshow(visibility_ir, cmap='gray')\n plt.show()\n\n # Normalising weights of value channel and IR image\n weightN_value, weightN_ir = weight_normalization(weight_value, weight_ir)\n\n plt.subplot(1, 2, 1)\n plt.imshow(weightN_value, cmap='gray')\n plt.subplot(1, 2, 2)\n plt.imshow(weightN_ir, cmap='gray')\n plt.show()\n\n # Creating Gaussian pyramids of the weights maps of respectively the value channel and IR image\n gauss_pyr_value_weights = gaussian_pyramid(weightN_value, kernel, levels)\n gauss_pyr_ir_weights = gaussian_pyramid(weightN_ir, kernel, levels)\n\n # Creating Laplacian pyramids of respectively the value channel and IR image\n lap_pyr_value = laplacian_pyramid(value_channel, kernel, levels)\n lap_pyr_ir = laplacian_pyramid(im_ir, kernel, levels)\n\n # Creating the fused Laplacian of the two modalities\n lap_pyr_fusion = fused_laplacian_pyramid(gauss_pyr_value_weights, gauss_pyr_ir_weights, lap_pyr_value, lap_pyr_ir)\n\n i = 1\n for l in lap_pyr_fusion:\n plt.subplot(1, len(lap_pyr_fusion), i)\n plt.imshow(l, cmap='gray')\n i += 1\n plt.show()\n\n # Creating the Gaussian pyramid of value channel in order to collapse the fused Laplacian pyramid\n gauss_pyr_value = gaussian_pyramid(value_channel, kernel, levels)\n collapsed_image = collapse_pyramid(lap_pyr_fusion, gauss_pyr_value)\n\n # Replacing the value channel in HSV visible image by the collapsed image\n im_vis_hsv_fusion = im_vis_hsv.copy()\n im_vis_hsv_fusion[:, :, 2] = collapsed_image\n im_vis_rgb_fusion = hsv2rgb(im_vis_hsv_fusion)\n\n plt.subplot(1, 2, 1)\n plt.imshow(im_vis)\n plt.subplot(1, 2, 2)\n plt.imshow(im_vis_rgb_fusion)\n plt.show()", "def process(self, system_label):\n self.system_labels.append(system_label)\n # Sn_out = SiestaReadOut(\"Sn\")\n\n with open(f\"{system_label}.PDOS\", 'r') as file:\n for line in file:\n if \"fermi_energy\" in line:\n self.fermi_levels.append(float(line.split(\"<\")[-2].split(\">\")[-1]))", "def define_ufl_equations(self):\n\n self.define_ufl_velocity_equation()\n self.define_ufl_momentum_equation()\n self.define_ufl_incompressibility_equation()\n\n return None", "def title( self ):\n\t\treturn \"Fix Zero Handles\"", "def displayFiducial(self):\n #obsolete?\n profbox()\n modelNodes = slicer.util.getNodes('vtkMRMLModelNode*')\n for modelNode in modelNodes.values():\n displayNode = modelNode.GetDisplayNode()\n if modelNode.GetAttribute(\"segmented\") == \"1\" and modelNode.GetAttribute(\"nth\")!=None:\n if 1:\n i = int(modelNode.GetAttribute(\"nth\"))\n if self.fiducialnode[i] == 0: \n polyData = modelNode.GetPolyData()\n nb = int(polyData.GetNumberOfPoints()-1)\n coord = [0,0,0]\n if nb>10:\n self.fiducialnode[i] = slicer.vtkMRMLAnnotationFiducialNode()\n polyData.GetPoint(nb,coord) \n self.fiducialnode[i].SetName(self.option[i])\n self.fiducialnode[i].SetFiducialCoordinates(coord) \n self.fiducialnode[i].Initialize(slicer.mrmlScene)\n self.fiducialnode[i].SetLocked(1)\n self.fiducialnode[i].SetSelectable(0)\n fidDN = self.fiducialnode[i].GetDisplayNode()\n fidDN.SetColor(modelNode.GetDisplayNode().GetColor())\n fidDN.SetGlyphScale(0)\n fidTN = self.fiducialnode[i].GetAnnotationTextDisplayNode()\n fidTN.SetTextScale(3)\n fidTN.SetColor(modelNode.GetDisplayNode().GetColor())\n \n self.fiducialnode[i].SetDisplayVisibility(modelNode.GetDisplayNode().GetVisibility())\n else: \n if modelNode.GetDisplayNode().GetVisibility():\n self.fiducialnode[i].SetDisplayVisibility(abs(self.fiducialnode[i].GetDisplayVisibility()-1))\n if self.fiducialnode[i].GetDisplayVisibility()==1:\n self.displayFiducialButton.text = \"Hide Labels on Needles\"\n else:\n self.displayFiducialButton.text = \"Display Labels on Needles\"", "def displayFiducial(self):\r\n # obsolete?\r\n profbox()\r\n modelNodes = slicer.util.getNodes('vtkMRMLModelNode*')\r\n for modelNode in modelNodes.values():\r\n displayNode = modelNode.GetDisplayNode()\r\n if modelNode.GetAttribute(\"segmented\") == \"1\" and modelNode.GetAttribute(\"nth\") != None:\r\n if 1:\r\n i = int(modelNode.GetAttribute(\"nth\"))\r\n if self.fiducialnode[i] == 0:\r\n polyData = modelNode.GetPolyData()\r\n nb = int(polyData.GetNumberOfPoints() - 1)\r\n coord = [0, 0, 0]\r\n if nb > 10:\r\n self.fiducialnode[i] = slicer.vtkMRMLAnnotationFiducialNode()\r\n polyData.GetPoint(nb, coord)\r\n self.fiducialnode[i].SetName(self.option[i])\r\n self.fiducialnode[i].SetFiducialCoordinates(coord)\r\n self.fiducialnode[i].Initialize(slicer.mrmlScene)\r\n self.fiducialnode[i].SetLocked(1)\r\n self.fiducialnode[i].SetSelectable(0)\r\n fidDN = self.fiducialnode[i].GetDisplayNode()\r\n fidDN.SetColor(modelNode.GetDisplayNode().GetColor())\r\n fidDN.SetGlyphScale(0)\r\n fidTN = self.fiducialnode[i].GetAnnotationTextDisplayNode()\r\n fidTN.SetTextScale(3)\r\n fidTN.SetColor(modelNode.GetDisplayNode().GetColor())\r\n\r\n self.fiducialnode[i].SetDisplayVisibility(modelNode.GetDisplayNode().GetVisibility())\r\n else:\r\n if modelNode.GetDisplayNode().GetVisibility():\r\n self.fiducialnode[i].SetDisplayVisibility(abs(self.fiducialnode[i].GetDisplayVisibility() - 1))\r\n if self.fiducialnode[i].GetDisplayVisibility() == 1:\r\n self.displayFiducialButton.text = \"Hide Labels on Needles\"\r\n else:\r\n self.displayFiducialButton.text = \"Display Labels on Needles\"", "def _define_biophysics(self):\n\t\tfor node in self.node:\n\t\t\tnode.nseg=1\n\t\t\tnode.diam=self._nodeD\n\t\t\tnode.L=self._nodeLength\n\t\t\tnode.Ra=self._rhoa/10000\n\t\t\tnode.cm=2\n\t\t\tnode.insert('axnode')\n\t\t\tnode.insert('extracellular')\n\t\t\tnode.xraxial[0]=self._Rpn0\n\t\t\tnode.xg[0]=1e10\n\t\t\tnode.xc[0]=0\n\n\t\tfor mysa in self.mysa:\n\t\t\tmysa.nseg=1\n\t\t\tmysa.diam=self._fiberD\n\t\t\tmysa.L=self._paraLength1\n\t\t\tmysa.Ra=self._rhoa*(1/(self._paraD1/self._fiberD)**2)/10000\n\t\t\tmysa.cm=2*self._paraD1/self._fiberD\n\t\t\tmysa.insert('pas')\n\t\t\tmysa.g_pas=0.001*self._paraD1/self._fiberD\t\t\n\t\t\tmysa.e_pas=-80\n\t\t\tmysa.insert('extracellular')\n\t\t\tmysa.xraxial[0]=self._Rpn1\n\t\t\tmysa.xg[0]=self._mygm/(self._nl*2)\n\t\t\tmysa.xc[0]=self._mycm/(self._nl*2)\n\n\t\tfor flut in self.flut:\n\t\t\tflut.nseg=1\n\t\t\tflut.diam=self._fiberD\n\t\t\tflut.L=self._paraLength2\n\t\t\tflut.Ra=self._rhoa*(1/(self._paraD2/self._fiberD)**2)/10000\n\t\t\tflut.cm=2*self._paraD2/self._fiberD\n\t\t\tflut.insert('pas')\n\t\t\tflut.g_pas=0.0001*self._paraD2/self._fiberD\t\t\n\t\t\tflut.e_pas=-80\n\t\t\tflut.insert('extracellular')\n\t\t\tflut.xraxial[0]=self._Rpn2\n\t\t\tflut.xg[0]=self._mygm/(self._nl*2)\n\t\t\tflut.xc[0]=self._mycm/(self._nl*2)\n\t\t\n\t\tfor stin in self.stin:\n\t\t\tstin.nseg=1\n\t\t\tstin.diam=self._fiberD\n\t\t\tstin.L=self._interLength\n\t\t\tstin.Ra=self._rhoa*(1/(self._axonD/self._fiberD)**2)/10000\n\t\t\tstin.cm=2*self._axonD/self._fiberD\n\t\t\tstin.insert('pas')\n\t\t\tstin.g_pas=0.0001*self._axonD/self._fiberD\n\t\t\tstin.e_pas=-80\n\t\t\tstin.insert('extracellular')\n\t\t\tstin.xraxial[0]=self._Rpx\n\t\t\tstin.xg[0]=self._mygm/(self._nl*2)\n\t\t\tstin.xc[0]=self._mycm/(self._nl*2)", "def getHFtableData(self, ep=None):\n HFdict = {}\n if self.hfMode == 'limiter':\n HFdict['Heat Flux Mode'] = 'Limiter'\n if self.lqCNmode == 'eich':\n HFdict[\"\\u03BB Near Mode\"] = 'Eich Regression #15'\n HFdict[\"Common Region Near Heat Flux Width (\\u03BBq CN) [mm]\"] = self.lqEich\n else:\n HFdict[\"\\u03BB Near Mode\"] = 'User Defined'\n HFdict[\"Common Region Near Heat Flux Width (\\u03BBq CN) [mm]\"] = self.lqCN\n if self.lqCFmode == 'horacek':\n HFdict[\"\\u03BB Far Mode\"] = 'Horacek Figure 6a'\n HFdict[\"Common Region Far Heat Flux Width (\\u03BBq CF) [mm]\"] = self.lqCF\n else:\n HFdict[\"\\u03BB Far Mode\"] = 'User Defined'\n HFdict[\"Common Region Far Heat Flux Width (\\u03BBq CF) [mm]\"] = self.lqCF\n\n HFdict[\"Common Region Near Power Fraction\"] = self.fracCN\n HFdict[\"Common Region Far Power Fraction\"] = self.fracCF\n\n elif self.hfMode == 'multiExp':\n HFdict['Heat Flux Mode'] = 'Multiple (4) Exponentials'\n if self.lqCNmode == 'eich':\n HFdict[\"\\u03BB Near Mode\"] = 'Eich Regression #15'\n HFdict[\"Common Region Near Heat Flux Width (\\u03BBq CN) [mm]\"] = self.lqEich\n else:\n HFdict[\"\\u03BB Near Mode\"] = 'User Defined'\n HFdict[\"Common Region Near Heat Flux Width (\\u03BBq CN) [mm]\"] = self.lqCN\n\n if self.lqCFmode == 'horacek':\n HFdict[\"\\u03BB Far Mode\"] = 'Horacek Figure 6a'\n else:\n HFdict[\"\\u03BB Far Mode\"] = 'User Defined'\n\n\n\n HFdict[\"Common Region Far Heat Flux Width (\\u03BBq CF) [mm]\"] = self.lqCF\n HFdict[\"Private Region Near Heat Flux Width (\\u03BBq PN) [mm]\"] = self.lqPN\n HFdict[\"Private Region Far Heat Flux Width (\\u03BBq PF) [mm]\"] = self.lqPF\n HFdict[\"Common Region Near Power Fraction\"] = self.fracCN\n HFdict[\"Common Region Far Power Fraction\"] = self.fracCF\n HFdict[\"Private Region Near Power Fraction\"] = self.fracPN\n HFdict[\"Private Region Far Power Fraction\"] = self.fracPF\n\n elif self.hfMode == 'qFile':\n HFdict[\"Heat Flux Mode\"] = 'Read HF from qFile'\n HFdict['qFilePath'] = self.qFilePath\n HFdict['qFileTag'] = self.qFileTag\n\n elif self.hfMode == 'eich':\n HFdict['Heat Flux Mode'] = 'Gaussian Spreading'\n if self.lqCNmode == 'eich':\n HFdict[\"\\u03BB Mode\"] = 'Eich Regression #15'\n HFdict[\"Heat Flux Width (\\u03BBq) [mm]\"] = self.lqEich\n else:\n HFdict[\"\\u03BB Mode\"] = 'User Defined'\n HFdict[\"Heat Flux Width (\\u03BBq) [mm]\"] = self.lqCN\n\n if self.SMode == 'makowski':\n HFdict['Greenwald Density Fraction'] = self.fG\n HFdict['Spreading (S) Mode'] = 'Makowski Figure 6'\n else:\n HFdict['Spreading (S) Mode'] = 'User Defined'\n HFdict['Greenwald Density Fraction'] = 'Only used for Makowski S Mode'\n HFdict['S [mm]'] = self.S\n HFdict['Background Heat Flux'] = self.qBG\n\n if self.hfMode != 'qFile':\n HFdict[\"Power Injected (Pinj) [MW]\"] = self.Pinj\n HFdict[\"Radiated Fraction of Injected Power\"] = self.coreRadFrac\n HFdict[\"Power Crossing Separatrix (Psol) [MW]\"] = self.Psol\n HFdict[\"Upper Inner Divertor Power Fraction\"] = self.fracUI\n HFdict[\"Upper Outer Divertor Power Fraction\"] = self.fracUO\n HFdict[\"Lower Inner Divertor Power Fraction\"] = self.fracLI\n HFdict[\"Lower Outer Divertor Power Fraction\"] = self.fracLO\n\n return HFdict", "def describe_operating_systems():\n pass", "def main():\n\t#print(scipy.__version__)\n\t#image()\n\t#heat_capacity2()\n\t#hist()\n\t#single_plot()\n\n\t#heat_capacity2()\n\t#single_plot()\n\t#plt.show()\n\t#u0_tc()\n\t#multi_heat_capacity(\"HL_DM_flux5\",True)\n\t#multi_heat_capacity2()\n\t#plot_spin()\n\t#plt.show()\n\theat_capacity2(1,2)\n\t#hist()\n\tplt.show()\n\t#potential()\n\t#plt.show()\n\t#heat_capacity(3,4)\n\t#heat_capacity(5,6)\n\t#heat_capacity(7,8)\n\t#final_spins()\n\t#plot_spin()\n\t#plot_from_csv()\n\t#difference_plot()", "def __init__(\n self,\n model,\n ipakcb=None,\n intercellt=0,\n laycon=3,\n trpy=1.0,\n hdry=-1e30,\n iwdflg=0,\n wetfct=0.1,\n iwetit=1,\n ihdwet=0,\n ikvflag=0,\n ikcflag=0,\n tran=1.0,\n hy=1.0,\n vcont=1.0,\n kv=1.0,\n anglex=0.0,\n ksat=1.0,\n sf1=1e-5,\n sf2=0.15,\n wetdry=-0.01,\n extension=\"bcf\",\n unitnumber=None,\n filenames=None,\n add_package=True,\n ):\n msg = (\n \"Model object must be of type flopy.mfusg.MfUsg\\n\"\n f\"but received type: {type(model)}.\"\n )\n assert isinstance(model, MfUsg), msg\n\n super().__init__(\n model,\n ipakcb=ipakcb,\n intercellt=intercellt,\n laycon=laycon,\n trpy=trpy,\n hdry=hdry,\n iwdflg=iwdflg,\n wetfct=wetfct,\n iwetit=iwetit,\n ihdwet=ihdwet,\n tran=tran,\n hy=hy,\n vcont=vcont,\n sf1=sf1,\n sf2=sf2,\n wetdry=wetdry,\n extension=extension,\n unitnumber=unitnumber,\n filenames=filenames,\n add_package=False,\n )\n\n dis = model.get_package(\"DIS\")\n if dis is None:\n dis = model.get_package(\"DISU\")\n structured = self.parent.structured\n\n nrow, ncol, nlay, _ = self.parent.nrow_ncol_nlay_nper\n\n self.ikvflag = ikvflag\n self.ikcflag = ikcflag\n self.kv = kv\n self.anglex = anglex\n self.ksat = ksat\n\n if not structured:\n njag = dis.njag\n self.anglex = Util2d(\n model,\n (njag,),\n np.float32,\n anglex,\n \"anglex\",\n locat=self.unit_number[0],\n )\n\n # item 1\n self.kv = Util3d(\n model,\n (nlay, nrow, ncol),\n np.float32,\n kv,\n \"Vertical Hydraulic Conductivity\",\n locat=self.unit_number[0],\n )\n if not structured:\n self.ksat = Util3d(\n model,\n (njag,),\n np.float32,\n ksat,\n \"ksat\",\n locat=self.unit_number[0],\n )\n\n if add_package:\n self.parent.add_package(self)", "def describe(self):\n\n print(\"Correlation length: {0}\".format(self.cl))\n print(\"icoordchange: {0}\".format(self.icoordchange))\n print(\"ispec: {0}\".format(self.ispec))\n print(\"ireg: {0}\".format(self.ireg))\n print(\"Domain: x-axis: from {0} to {1} with {2} steps of {3}\".format(self.xori, self.xend,\n self.nx, self.dx))\n print(\"Domain: y-axis: from {0} to {1} with {2} steps of {3}\".format(self.yori, self.yend,\n self.ny, self.dy))\n print(\"Exclusion value: {0}\".format(self.valex))\n print(\"Signal-to-noise ratio: {0}\".format(self.snr))\n print(\"Variance of the background field: {0}\".format(self.varbak))", "def falcon():", "def fluid_properties(fluid_str):\n fluid_lib = {'water':(1000., 1.0e-6), \n 'glycol':(965.3,6.216e-4),\n 'glycerin':(1260,1.18e-3)}\n if fluid_str in fluid_lib.keys():\n return fluid_lib[fluid_str]\n else:\n print 'valid fluids are:'\n for keys in fluid_lib:\n print \" '%s' \" % keys\n raise KeyError('invalid fluid specified')", "def fluid_properties(fluid_str):\n fluid_lib = {'water':(1000., 1.0e-6), \n 'glycol':(965.3,6.216e-4),\n 'glycerin':(1260,1.18e-3)}\n if fluid_str in fluid_lib.keys():\n return fluid_lib[fluid_str]\n else:\n print 'valid fluids are:'\n for keys in fluid_lib:\n print \" '%s' \" % keys\n raise KeyError('invalid fluid specified')", "def setup_fermi(self):\n eventclass=5 # 2 (Source) or 5 (UltracleanVeto)\n eventtype=0 # 0 (all), 3 (bestpsf) or 5 (top3 quartiles)\n mask_type='top300'\n force_mask_at_bin_number=10\n\n self.f1 = fp.fermi_plugin(maps_dir,fermi_data_dir=fermi_data_dir,work_dir=work_dir,CTB_en_min=0,CTB_en_max=40,nside=self.nside,eventclass=eventclass,eventtype=eventtype,newstyle=1,data_July16=True)\n\n if mask_type != 'False':\n self.f1.make_ps_mask(mask_type = mask_type,force_energy = True,energy_bin = force_mask_at_bin_number)\n self.f1.add_diffuse_newstyle(comp = 'p7', eventclass = eventclass, eventtype = eventtype)\n self.f1.add_bubbles(comp='bubs') #bubbles\n self.f1.add_iso(comp='iso') #iso\n self.f1.add_ps_model(comp='ps_model')\n\n # Exposure correct J_map_arr\n self.J_map_arr *= self.f1.CTB_exposure_maps\n\n # Add J-factor map with mean 1 in each energy bin\n self.f1.add_template_by_hand('J_map',np.array([self.J_map_arr[i]/np.mean(self.J_map_arr[i]) for i in range(40)]))", "def d_responsive_test(self):", "def setup_ssem(self):\n \n groundBox = box_polygon_shape(-20,-500,1,1) # A tiny box that just acts as the ground body for everything else\n groundBody = self.world.CreateStaticBody(shapes=groundBox)\n self.memory_sender_y = -500\n self.groundBody = groundBody\n # Initial charge for main injector\n self.ball_bearing_block(0,190,cols=16)\n self.add_static_polygon([ (0,20), (100,0), (100,5), (0,25)], -132, 220)\n self.add_static_polygon([ (0,0), (3,0), (3,20), (0,20)], -132, 240)\n self.injector_cranks = []\n self.parts.main_injector_raiser = self.injector(-32,150, groundBody, injector_crank_array=self.injector_cranks)\n\n (self.parts.memory_selector_holdoff, self.parts.memory_follower_holdoff) = self.memory_module(0,0, groundBody)\n self.upper_regenerators = []\n self.parts.accumulator_diverter_lever = self.diverter_set(0,130, groundBody, slope_x=-240, slope_y=180, return_weight=10) # Diverter 1. Splits to subtractor reader.\n\n self.parts.discard_lever_2 = self.diverter_set(-5,-30, groundBody, discard=470) # Diverter 2a. Discard reader-pulse data.\n self.parts.upper_regen_control = self.regenerator(-5,-65, groundBody, self.upper_regenerators) # Regenerator 1. For regenning anything read from memory.\n self.parts.ip_diverter_lever = self.diverter_set(-5,-95, groundBody, slope_x=320, slope_y=170, start_at=3, return_weight=5) # Diverter 1. Splits to instruction counter.\n self.parts.diverter_3 = self.diverter_set(-10,-158, groundBody, slope_x=200, slope_y=310) # Diverter 3; splits to instruction reg/PC\n \n # PC injector\n pc_injector_x = 230\n pc_injector_y = -290\n self.pc_injector_cranks = [] \n self.parts.pc_injector_raiser = self.injector(pc_injector_x,pc_injector_y, groundBody, injector_crank_array=self.pc_injector_cranks, columns=5)\n # Initial charge for PC injector\n self.ball_bearing_block(250,-240,cols=8)\n\n\n sub_pos_x = -15\n sub_pos_y = -220\n self.parts.accumulator_reset_lever = self.subtractor(sub_pos_x,sub_pos_y, groundBody, discard_bands=True, toggle_joint_array=self.accumulator_toggles, comparison_output=True)\n self.dropper = self.slow_drop_unit(groundBody, sub_pos_x-18, sub_pos_y+40)\n\n # The 'skip lever' - the large balance arm which injects one\n # into the program counter when comparison (CMP) succeeds\n skip_lever_x = sub_pos_x - 200\n skip_lever_y = sub_pos_y - 200\n a = fixtureDef(shape=polygonShape(vertices=[(-50,-32), (0,-30), (0,-25), (-50,-30)]), filter=filters[0], density=1.0)\n b = fixtureDef(shape=box_polygon_shape(0,-30,5,30), filter=filters[0], density=1.0)\n c = fixtureDef(shape=box_polygon_shape(-30,-30,5,30), filter=filters[0], density=1.0)\n d = fixtureDef(shape=box_polygon_shape(0,0,300,5), filter=filters[2], density=1.0)\n e = fixtureDef(shape=box_polygon_shape(285,-15,15,15), filter=filters[2], density=2.10)\n f = fixtureDef(shape=box_polygon_shape(150,-50,5,50), filter=filters[0], density=1.0)\n skip_lever=self.add_multifixture([a,b,d,e,f], skip_lever_x, skip_lever_y)\n skip_lever.attachment_point = (150,-50)\n skip_lever.origin = (skip_lever_x,skip_lever_y)\n\n self.revolving_joint(groundBody, skip_lever, (skip_lever_x+150, skip_lever_y+2.5), friction=0)\n self.add_static_polygon(polygonShape(vertices=box_vertices(0, 0, 10,10)), skip_lever_x+270, skip_lever_y-15, filter=filters[2])\n self.parts.cmp_injector = self.horizontal_injector(skip_lever_x-48,skip_lever_y+257, groundBody)\n self.ball_bearing_block(skip_lever_x-30,skip_lever_y+280,cols=1)\n self.add_static_polygon(polygonShape(vertices=[(0,0), (20,0), (0,20)]), skip_lever_x-30,skip_lever_y+230)\n \n self.lower_regenerators = []\n self.parts.lower_regen_control = self.regenerator(-203,-400, groundBody, self.lower_regenerators)\n #Program counter\n self.parts.pc_reset_lever = self.subtractor(400,-320, groundBody, lines=5, toggle_joint_array=self.ip_toggles, is_actually_adder=True)\n # Thing that adds one ball bearing to the PC\n pc_incrementer_x = 457\n pc_incrementer = self.horizontal_injector(pc_incrementer_x,-240, groundBody)\n # Overspill loose ball bearings from the PC incrementer to the PC reader\n overspill_width = pc_incrementer_x - pc_injector_x - 125\n self.add_static_polygon([ (0, 0), (overspill_width,15), (overspill_width,20), (0,5) ], pc_injector_x+125,pc_injector_y+65)\n\n # Block to stop right-side overspill on incrementer\n self.add_static_polygon([ (0, 0), (5,0), (5,20), (0,20) ], pc_incrementer_x+27,-240+40)\n\n\n self.ball_bearing_block(457+20,-250+30,cols=1)\n self.distance_joint(skip_lever, pc_incrementer)\n\n self.connect_regenerators()\n\n # Large collection plates at the bottom\n self.add_static_polygon([ (-300,-600),(700,-550), (700,-610), (-300,-610)])\n self.add_static_polygon([ (600,-610),(700,-610), (850,-400), (800,-400)])\n self.add_static_polygon([ (-500,-400),(-450,-400), (-310,-610), (-400,-610)])\n\n # Instruction decoder ROM\n self.rom_followers = []\n self.rom_selectors = []\n (self.parts.instruction_selector_holdoff, self.parts.instruction_follower_holdoff) = self.add_row_decoder(200, 0, groundBody, self.rom_followers, self.rom_selectors)\n self.labels.append((\"Instruction decoder\", 400,0,0))\n\n self.add_instruction_cranks(groundBody, 550, 140)\n self.parts.sender_eject = self.memory_sender(198,self.memory_sender_y, groundBody)\n self.connect_memory()\n\n # A guard which stops waste data from the subtractor falling into the instruction register\n self.add_static_polygon([ (0,0), (120,120), (120,123), (0,3)], 120, self.memory_sender_y-10)\n\n # Add one final transfer band to move everything back into band 0\n self.transfer_bands.append((-550+10, -550, [ (-300,800)], 1))", "def name(self):\n return f\"PoolSense {BINARY_SENSORS[self.info_type]['name']}\"", "def info(self):\n\n print(\"pupil file =\", self.pupil_file)\n print(\"phase file =\", self.phase_file)\n print(\"wavelengths and weights =\")\n for i in range(len(self.filter[0])):\n print(\" %10.5f %6.4f\" % (self.filter[0][i], self.filter[1][i]))\n print(\"pupil diameter (meters) =\", self.D)\n if self.oversample == 2:\n print(\"oversampling factor = 2 (Nyquist sampling)\")\n else:\n r = float(self.oversample) / 2.\n print(\"oversampling factor = %d (%g * Nyquist sampling)\" % \\\n (self.oversample, r))\n if self.type == SINGLE_PREC:\n print(\"computations will use single precision\")\n else:\n print(\"computations will use double precision\")\n print(\"size of output image =\", self.output_size)\n if self.cdelt is not None:\n print(\"output pixel size (arcsec) =\", self.cdelt / ARCSECtoDEGREES)\n if self.output_written:\n print(\"The computed PSF has been written to the output file.\")\n else:\n print(\"The output file has not been written yet.\")", "def bus_interruptions_num(self) -> float:\n return self.dss_obj.BUSF(7, 0)", "def read_uef_details(chunks):\n\n\tpos, chunk = find_next_chunk(chunks, 0, [0x0])\n\n\tif pos == None:\n\n\t\toriginator = 'Unknown'\n\n\telif chunk[1] == '':\n\n\t\toriginator = 'Unknown'\n\telse:\n\t\toriginator = chunk[1]\n\n\tpos, chunk = find_next_chunk(chunks, 0, [0x5])\n\n\tif pos == None:\n\n\t\tmachine, keyboard = 'Unknown', 'Unknown'\n\n\telse:\n\n\t\tmachines = ('BBC Model A', 'Electron', 'BBC Model B', 'BBC Master')\n\t\tkeyboards = ('Any layout', 'Physical layout', 'Remapped')\n\n\t\tmachine = ord(chunk[1][0]) & 0x0f\n\t\tkeyboard = (ord(chunk[1][0]) & 0xf0) >> 4\n\n\t\tif machine < len(machines):\n\t\t\tmachine = machines[machine]\n\t\telse:\n\t\t\tmachine = 'Unknown'\n\n\t\tif keyboard < len(keyboards):\n\t\t\tkeyboard = keyboards[keyboard]\n\t\telse:\n\t\t\tkeyboard = 'Unknown'\n\n\tpos, chunk = find_next_chunk(chunks, 0, [0xff00])\n\n\tif pos == None:\n\n\t\temulator = 'Unknown'\n\n\telif chunk[1] == '':\n\n\t\temulator = 'Unknown'\n\telse:\n\t\temulator = chunk[1]\n\n\n\t# Remove trailing null bytes\n\twhile originator[-1] == '\\000':\n\n\t\toriginator = originator[:-1]\n\n\twhile emulator[-1] == '\\000':\n\n\t\temulator = emulator[:-1]\n\n\tfeatures = ''\n\tif find_next_chunk(chunks, 0, [0x1])[0] != None:\n\t\tfeatures = features + '\\n' + 'Instructions'\n\tif find_next_chunk(chunks, 0, [0x2])[0] != None:\n\t\tfeatures = features + '\\n' + 'Credits'\n\tif find_next_chunk(chunks, 0, [0x3])[0] != None:\n\t\tfeatures = features + '\\n' + 'Inlay'\n\n\treturn originator, machine, keyboard, emulator, features", "def design_body(self):\n pass", "def main():\n parser = argparse.ArgumentParser(description=\"Generate standard form system FMUs through commandline\")\n parser.add_argument(\"--name\", default=\"qmodel\", type=str, help=\"Target FMU identifier\")\n parser.add_argument(\"--dir\", default=os.getcwd(), type=str, help=\"Target FMU path\")\n parser.add_argument(\"-v\", \"--verbose\", help=\"Verbose output\", action=\"store_true\")\n parser.add_argument(\"-n\", \"--dry-run\", help=\"Only print system information, use with -v.\", action=\"store_true\")\n\n subparsers = parser.add_subparsers(title=\"System form\", dest=\"subcmd\")\n ss = subparsers.add_parser(\"ss\", help=\"State space model: A, B, C, D\",\n description=\"Define ABCD matrices using string. The string is interpreted as a matrix with commas or spaces separating columns, and semicolons separating rows. e.g. '1,2;3,4' -> 2x2 matrix\")\n ss.add_argument(\"-A\", required=False, type=str, help=\"A matrix\")\n ss.add_argument(\"-B\", required=False, type=str, help=\"B matrix\")\n ss.add_argument(\"-C\", required=False, type=str, help=\"C matrix\")\n ss.add_argument(\"-D\", required=False, type=str, help=\"D matrix\")\n ss.add_argument(\"-x0\", required=False, type=str, help=\"Init state values, zero vector if empty\")\n ss.add_argument(\"-u0\", required=False, type=str, help=\"Init input values, zero vector if empty\")\n\n # tf = subparsers.add_parser(\"tf\", help=\"Transfer function (WIP)\")\n # tf.add_argument(\"-n\", default=\"1,0\", type=str, help=\"Numerator\")\n # tf.add_argument(\"-d\", default=\"1\", type=str, help=\"Denominator\")\n\n try:\n args = parser.parse_args()\n if args.subcmd == \"ss\":\n from qfmu.utils import str_to_1d_array, str_to_2d_array\n A = None if args.A is None or args.A==\"\" else str_to_2d_array(args.A)\n B = None if args.B is None or args.B==\"\" else str_to_2d_array(args.B)\n C = None if args.C is None or args.C==\"\" else str_to_2d_array(args.C)\n D = None if args.D is None or args.D==\"\" else str_to_2d_array(args.D)\n x0 = None if args.x0 is None or args.x0==\"\" else str_to_1d_array(args.x0)\n u0 = None if args.u0 is None or args.u0==\"\" else str_to_1d_array(args.u0)\n ss = StateSpace(A, B, C, D, x0, u0)\n m = Lti(ss, identifier=args.name)\n if args.verbose:\n logging.basicConfig(level=logging.INFO)\n if args.dry_run:\n print(f\"Target FMU:\\n{os.path.join(os.path.abspath(args.dir), args.name)}.fmu\")\n print(f\"System info:\\n{ss}\")\n else:\n m.buildFMU(args.dir)\n else:\n raise Exception(\"Unknown system form\")\n except Exception as ex:\n logging.error(ex)\n return -1\n\n return 0", "def CheckBasicFluidProperty(self, site, addSiteError):\n if not (site.IsFluid == Site.SOLID or\n site.IsFluid == Site.FLUID):\n addSiteError('Site doesn\\'t appear to have an appropriate value for \\'IsFluid\\': %d'.format(site.IsFluid))\n return", "def singularity_handling(self):\n return self._sing_handling", "def description(self):\n return '%s and a disk' % self.component.description()", "def __init__(\n self,\n system,\n class_name,\n header_path_prefix,\n header_extension,\n period_variant=False,\n ):\n self.system = system\n self.class_name = class_name\n self.header_path_prefix = header_path_prefix\n self.header_extension = header_extension\n template = (\n \"<\"\n + str(system.sysd.A.shape[0])\n + \", \"\n + str(system.sysd.B.shape[1])\n + \", \"\n + str(system.sysd.C.shape[0])\n + \">\"\n )\n\n self.period_variant = period_variant\n if period_variant:\n self.class_type = \"PeriodVariant\"\n self.plant_coeffs_header = \"PeriodVariantPlantCoeffs\"\n self.obsv_coeffs_header = \"PeriodVariantKalmanFilterCoeffs\"\n self.loop_header = \"PeriodVariantLoop\"\n else:\n self.class_type = \"StateSpace\"\n self.plant_coeffs_header = \"StateSpacePlantCoeffs\"\n self.obsv_coeffs_header = \"StateSpaceObserverCoeffs\"\n self.loop_header = \"StateSpaceLoop\"\n\n self.ctrl_coeffs_header = \"StateSpaceControllerCoeffs\"\n self.ctrl_coeffs_type = \"frc::\" + self.ctrl_coeffs_header + template\n self.plant_coeffs_type = \"frc::\" + self.plant_coeffs_header + template\n self.obsv_coeffs_type = \"frc::\" + self.obsv_coeffs_header + template\n self.loop_type = \"frc::\" + self.loop_header + template", "def main():\n import run\n width = 1024\n height = 768\n myEmpire = {'imageFile':'testempire', 'name': 'Kurita', 'ip': '', 'AL': 0, 'EC': 0, 'color1': 'red', 'simulationsLeft': 0, 'color1': 'black', 'emailAddress': '', 'key': '', 'designsLeft': 0, 'IA': 0, 'CR': 0, 'cities': 10}\n dIndustry = {'1':'Factory-1', '3':'Research Center-1', '2':'Marine Academy-2'}\n dShips = {'1':'5 Scouts', '2':'10 Destroyers', '3':'3 Super Dreadnoughts'}\n dMarines = {'1':'10 Flamers', '2':'10 Strikers', '3':'5 Gunners'}\n myIntelReport = {'round':99, 'industryReport':dIndustry, 'shipReport':dShips, 'marineReport':dMarines}\n mySystem = {'intelReport':myIntelReport, 'AL':12032, 'EC':455, 'IA':54, 'name': 'Hannah', 'imageFile': 'sys_8_yellow_black', 'id': '1001', 'connectedSystems': [], 'y': 300, 'x': 200, 'cities': 10, 'citiesUsed':4, 'myEmpireID': '2'}\n systemID = '1000'\n pyui.init(width, height, 'p3d', 0, 'Testing System Info Panel')\n app = run.TestApplication(width, height)\n frame = SystemInfoFrame(None, app)\n frame.panel.populate(myEmpire, mySystem)\n app.addGui(frame)\n app.run()\n pyui.quit()", "def display_network_architecture(self):\n self.log(\"\\n-------- Network architecture --------\")\n self.log(\"y_res: {}\".format(self.y_res))\n self.log(\"x_res: {}\".format(self.x_res))\n self.log(\"n_input_channels: {}\".format(self.n_input_channels))\n self.log(\"n_output_classes: {}\".format(self.n_output_classes))\n self.log(\"fc1_n_chan: {}\".format(self.fc1_n_chan))\n self.log(\"fc1_dropout: {}\".format(self.fc1_dropout))\n self.log(\"alpha: {}\".format(self.alpha))\n self.log(\"n_samples_trained: {}\".format(self.n_samples_trained))\n for c in range(self.n_output_classes):\n self.log( \" * Class {}, m = {}\".format( \\\n c, self.n_class_samples_trained[c] ) )", "def setDetails(self):\n pwd = utils.get_cwd()\n production_idx = pwd.find('production')+13\n self.prodDir = pwd[0:production_idx]\n self.nd280Version = os.path.basename(os.getenv('ND280ROOT'))\n self.production = self.prodDir.split('production')[-1].strip('0')\n self.respin = utils.split_path(pwd[production_idx+1:])[0]\n self.nuMCType = utils.split_path(pwd[production_idx+1:])[2]\n self.fillFromCard('runInfo.card')\n\n self.usingNUCP = False\n if 'beam/' in pwd:\n self.beam = 'beam'\n if 'nue/' in pwd:\n self.beam = 'nue'\n self.nuType = 'nue'\n if 'run1/' in pwd:\n self.beam = 'run1'\n self.ecalPeriods = '1-2'\n if 'run2/' in pwd:\n self.beam = 'run2'\n self.tpcPeriods = 'runs2-3'\n self.ecalPeriods = '1-2'\n if 'run3/' in pwd:\n self.beam = 'run3'\n self.tpcPeriods = 'runs2-3'\n self.ecalPeriods = '3-4'\n if 'run4/' in pwd:\n self.beam = 'run4'\n self.tpcPeriods = 'runs2-3-4'\n self.ecalPeriods = '3-4'\n if 'run5/' in pwd:\n self.beam = 'run5'\n self.tpcPeriods = 'runs2-3-4'\n self.ecalPeriods = '3-4'\n if 'ccpiplus/' in pwd:\n self.beam = 'ccpiplus'\n self.nMesons = 0\n self.nLeptons = 1\n self.nMuMinus = 1\n self.nPiZero = 0\n self.nPiPlus = 1\n self.usingNUCP = True\n if 'ccpizero/' in pwd:\n self.beam = 'ccpizero'\n self.nMesons = 0\n self.nLeptons = 1\n self.nMuMinus = 1\n self.nPiZero = 1\n self.nPiPlus = 0\n self.usingNUCP = True\n if 'ncpiplus/' in pwd:\n self.beam = 'ncpiplus'\n self.nMesons = 0\n self.nLeptons = 0\n self.nMuMinus = 0\n self.nPiZero = 0\n self.nPiPlus = 1\n self.usingNUCP = True\n if 'ncpizero/' in pwd:\n self.beam = 'ncpizero'\n self.nMesons = 0\n self.nLeptons = 0\n self.nMuMinus = 0\n self.nPiZero = 1\n self.nPiPlus = 0\n self.usingNUCP = True\n if 'tpcgas/' in pwd:\n self.beam = 'tpcgas'\n if 'verify/' in pwd:\n self.verify = True\n if self.nuMCType == 'anti-genie':\n self.runprefix -= 10000000\n if 'genie' in pwd:\n self.mc = 'Genie'\n self.runprefix += 1000000\n self.respin = pwd[pwd.find(self.prodDir)+len(self.prodDir)+1:][0]\n if self.respin not in string.uppercase:\n print 'Respin', self.respin, 'doesn\\'t appear to be an UPPER CASE LETTER'\n if '2010-11' in pwd:\n self.baseline = '2010-11'\n\n if 'magnet/' in pwd:\n self.runN = int(pwd[pwd.find('/run')+4])\n self.runprefix += (self.runN-1)*100000\n\n if 'water' in pwd:\n self.fill = 'water'\n self.p0dwater = 1\n self.runprefix += 10000\n if 'basket/' in pwd:\n self.fluxVolume = 'basket'\n self.fluxMasterVolume = 'Basket'\n self.fluxName = 'basket'\n self.runN = 2\n self.runprefix += 101000\n if 'nue/' in pwd:\n self.fluxName = 'Nue'\n self.runprefix += 1000\n elif 'ncpizero/' in pwd:\n self.fluxName = 'NC1pi0'\n self.runprefix += 2000\n elif 'ccpizero/' in pwd:\n self.fluxName = 'CC1pi0'\n self.runprefix += 3000\n elif 'ncpiplus/' in pwd:\n self.fluxName = 'NC1pi+'\n self.runprefix += 4000\n elif 'ccpiplus/' in pwd:\n self.fluxName = 'CC1pi+'\n self.runprefix += 5000\n elif 'ncpizerofgd/' in pwd:\n self.fluxName = 'NCpi0FGD'\n self.fluxMasterVolume = 'FGD1'\n self.runprefix += 6000\n elif 'ccpicoh/' in pwd:\n self.fluxName = 'CCpicoh'\n self.fluxMasterVolume = 'FGD1'\n self.runprefix += 7000\n elif 'tpcgas/' in pwd:\n self.fluxName = 'TPCGas'\n # set this to mask ND280 geometry\n # the self.standalone option can be set to a single ND280 detector\n # and overrides the baseline setting. However, turns out that\n # setting master_volume to Half produces events only on argon so\n # we are using that instead.\n # self.standalone = 'TPC'\n self.fluxMasterVolume = 'Half'\n self.forceVolume = 'true'\n self.runprefix += 6000\n\n self.setBasePath()\n self.setNumcDir()\n self.setPassThruDir()\n self.setFluxDir()\n self.setFluxInfo()", "def construction(self):\n\t\n\t\tif self.simscale == 'aamd':\n\t\t\tif not os.path.isfile(self.rootdir+'vacuum-minimized.gro'): self.vacuum()\n\t\t\telse: print 'skipping vacuum construction because vacuum-minimized.gro exists'\n\t\telif self.simscale == 'cgmd':\t\n\t\t\tif not os.path.isfile(self.rootdir+'vacuum-minimized.gro'): self.vacuum_cgmd()\n\t\t\telse: print 'skipping vacuum construction because vacuum-minimized.gro exists'\n\t\telse: raise Exception('except: unclear simulation scale')\n\t\tif not os.path.isfile(self.rootdir+'solvate-minimized.gro'): self.solvate()\n\t\telse: print 'skipping solvate construction because solvate-minimized.gro exists'\n\t\tif not os.path.isfile(self.rootdir+'counterions-minimized.gro'): self.counterions()\n\t\telse: print 'skipping counterions construction because counterions-minimized.gro exists'\n\t\tif not os.path.isfile(self.rootdir+'system.gro'): self.groups()\n\t\telse: print 'skipping the grouping step because system.gro exists'", "def Handle(self) -> _n_0_t_8:", "def info(self):\n\n\t\tprint(\"Pixels on a side: {0}\".format(self.data.shape[0]))\n\t\tprint(\"Pixel size: {0}\".format(self.resolution))\n\t\tprint(\"Total angular size: {0}\".format(self.side_angle))\n\t\tprint(\"lmin={0:.1e} ; lmax={1:.1e}\".format(self.lmin,self.lmax))", "def define_description(self):\n self._description = 'NODDI-based processing of DWI datasets.'", "def view_system():\n\n pass", "def test_fc(self):\n self.assertEqual(self.nhf.metadata[\"ndim\"], 3)\n self.assertEqual(self.nhf.metadata[\"ngroup\"], 4)\n self.assertEqual(self.nhf.metadata[\"ninti\"], 5)\n self.assertEqual(self.nhf.metadata[\"nintj\"], 5)\n self.assertEqual(self.nhf.metadata[\"nintk\"], 6)\n self.assertEqual(self.nhf.metadata[\"nSurf\"], 6)\n self.assertEqual(self.nhf.metadata[\"nMom\"], 5)\n self.assertEqual(self.nhf.metadata[\"nintxy\"], 19)\n self.assertEqual(self.nhf.metadata[\"npcxy\"], 144)\n self.assertEqual(self.nhf.metadata[\"iaprx\"], 4)\n self.assertEqual(self.nhf.metadata[\"iaprxz\"], 3)\n\n variantControlInfo = nhflux.FILE_SPEC_1D_KEYS_VARIANT11\n for info in variantControlInfo:\n self.assertTrue(info not in self.nhf.metadata)", "def device_class(self):\n return \"heat\"", "def logic_program_form(self):\r\n return '% ASP{f} Translation of System Description ' + self.name + '\\n\\n'", "def __init__(self):\n ProcessingUnit.__init__(self)\n print(\" [ START ] init - Metodo Simulator Reader\")\n\n self.isConfig = False\n self.basicHeaderObj = BasicHeader(LOCALTIME)\n self.systemHeaderObj = SystemHeader()\n self.radarControllerHeaderObj = RadarControllerHeader()\n self.processingHeaderObj = ProcessingHeader()\n self.profileIndex = 2**32-1\n self.dataOut = Voltage()\n #code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])\n code0 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1,-1,1,-1,-1,-1,1,1,1,-1,1])\n #code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])\n code1 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,-1,-1,-1,1,-1,-1,1,-1,1,1,1,-1,-1,-1,1,-1])\n #self.Dyn_snCode = numpy.array([code0,code1])\n self.Dyn_snCode = None", "def classIIsys(Nen, W_eng, V_fuel, Iy_SI, S_cstot, L_fus, OEW, MTOW, Vmax, b):\n Wen = W_eng / 0.45359237 # lb\n Vt = V_fuel * 264.172052 # gallons\n Vi = V_fuel * 2 # lbs\n Vp = 0. # this is for self-sealing fuel tanks\n Nt = 6. # number of tanks\n Iy = Iy_SI / (0.45359237 * 0.3048 ** 2) # lbs ft^2\n Scs = S_cstot / 0.3048 ** 2 # ft^2 - total control surface area\n Nf = 6. # number of functions performed by the controls (range of 4-7)\n Nm = 1. # number of mechanical functions performed by controls (range 0-2)\n Lf = L_fus / 0.3048 # ft\n Bw = b / 0.3048 # ft\n Rkva = 2000. # system electical rating [kVA] --> coming form Iris\n Ngen = 10.\n La = 0.15 * b / 0.3048\n Vp = 0. # this is for self-sealing fuel tanks\n Nt = 6. # number of tanks\n\n '---------System weights------------------'\n W_starter = 49.19 * (Nen * Wen / 1000) ** 0.541 * 0.45359237 # kg\n # print 'W_starter',W_starter\n # cg_starter = -(l_pylon-0.5*l_eng)\n W_fuelsys = 2.405 * Vt ** 0.606 * (1 + Vi / Vt) ** -1 * (1 + Vp / Vt) * Nt ** 0.5 * 0.45359237 # kg\n # print 'W_fuelsys', W_fuelsys\n # cg_fuelsys = 0.5*c_root #ADJUST!\n W_fc = 145.9 * Nf ** 0.554 * (1 + Nm / Nf) ** -1 * Scs ** 0.2 * (Iy * 10 ** -6) ** 0.07 * 0.45359237 # kg\n # print 'W_fc', W_fc\n # cg_fc = 0.85*c_root\n # W_hyd = 0.2673* Nf* (Lf + Bw)**0.937* 0.45359237 #kg\n W_hyd_tor = 0.015 * OEW + 272 # kg\n # print 'W_hyd_tor = ', W_hyd_tor, 'kg'\n W_hyd = W_hyd_tor\n # print 'W_hyd', W_hyd\n # cg_hyd = 0.5*c_root\n W_el = 7.291 * Rkva ** 0.782 * La ** 0.346 * Ngen ** 0.1 * 0.45359237 # kg\n # print 'W_el',W_el\n # cg_el = 0.2*c_root\n W_ecs = 0.018 * OEW # from Torenbeek Try to find relation from ROSKAM 1990 or later!!\n # print 'W_ecs',W_ecs\n # cg_ecs = 0.3*c_root\n W_comms = 200 # kg - Assumption. After looking into the system, make a revision of this\n # print 'W_comms', W_comms\n # cg_comms = -0.85*l_nose\n W_apu = 0.004 * MTOW\n # print 'W_apu', W_apu\n '''---------------Avionics------------------'''\n W_autopilot = 11 * 5 * 0.45359237 * 1.5 * 1 * 0.4 # kg\n W_ins = 2 * 5 * 0.45359237 * 1.5 * 2 * 0.1 * 3 # kg\n W_gps = 0.5 * 5 * 1.5 * 2 * 1 # kg - unit weight*redundancy factor*adjustment_weights\n W_processors = 500 # kg - assumption\n W_cameras = 4 * 2 * 0.5 * (\n 4 * 0.5 + 4 * 1) * 0.45359237 # kg --> firrst three values are weights based on rugedness, performance, minituarisation\n W_recorders = 100 * 0.45359237 # kg - assumption (this is for 200GB of recorder)\n W_atc = (3 * (2.1 + 2 * 0.4) + 2 * 2.06) * 0.45359237 # kg for transponders and radios\n W_ads = 1.2 * (Vmax / 0.5144444) * 0.05 * 1.2 * 0.6 * 1.2 * 0.45359237 # kg Air Data System\n W_avionics = 1.5 * (W_autopilot + W_ins + W_gps + W_processors + W_cameras + W_recorders + W_atc + W_ads)\n # cg_avionics = -0.5*l_nose\n # print 'W_avionics',W_avionics\n\n W_sys = W_starter + W_fc + W_fuelsys + W_hyd + W_el + W_ecs + W_comms + W_avionics + W_apu\n # cg_sys = (cg_starter*W_starter + cg_fuelsys*W_fuelsys + cg_hyd*W_hyd + cg_el*W_el + cg_ecs*W_ecs + cg_comms*W_comms + cg_avionics*W_avionics)/W_sys\n # print 'W_sys', W_sys\n # Pack outputs\n output = Data()\n output.w_starter = W_starter\n output.w_fuelsys = W_fuelsys\n output.w_flightcontrol = W_fc\n output.w_hydraullics = W_hyd\n output.w_electrical = W_el\n output.w_env = W_ecs\n output.w_comms = W_comms\n output.w_avionics = W_avionics\n output.w_apu = W_apu\n output.w_system = W_sys\n return output", "def initializeDomainCondition(self):\n print('Initialize the condition.')\n\n self.fluidPDF = np.zeros([self.typesFluids, self.ny, self.nx, 9])\n self.fluidsDensity = np.zeros([self.typesFluids, self.ny, self.nx])\n self.physicalVX = np.zeros([self.ny, self.nx])\n self.physicalVY = np.zeros([self.ny, self.nx])\n self.forceX = np.zeros([self.typesFluids, self.ny, self.nx])\n self.forceY = np.zeros([self.typesFluids, self.ny, self.nx])\n if (self.PictureExistance == \"'no'\"):\n for i in sp.arange(self.ny):\n for j in sp.arange(self.nx):\n# for k in sp.arange(self.typesFluids):\n tmpCenterX = int(self.nx / 2); tmpCenterY = int(self.ny / 2)\n if (self.isDomain[i, j] == True):\n# if (sp.sqrt((i - tmpCenterY) * (i - tmpCenterY) + (j - \\\n# tmpCenterX) * (j - tmpCenterX)) <= 15.):\n# if (i < 15 and np.abs(j - tmpCenterX) < 15):\n# if ((i >0 and i < 28) and (j >=102 and j < 154)):\n if (i < self.ny - 10):\n# if (i < 128 and i > 70):\n self.fluidsDensity[0, i, j] = self.initialDensities[0]\n self.fluidPDF[0, i, j, :] = self.weightsCoeff * self.initialDensities[0]\n self.fluidsDensity[1, i, j] = self.backgroundDensities[1]\n self.fluidPDF[1, i, j, :] = self.weightsCoeff * self.backgroundDensities[1]\n else:\n self.fluidsDensity[1, i, j] = self.initialDensities[1]\n self.fluidPDF[1, i, j, :] = self.weightsCoeff * self.initialDensities[1]\n self.fluidsDensity[0, i, j] = self.backgroundDensities[0]\n self.fluidPDF[0, i, j, :] = self.weightsCoeff * self.backgroundDensities[0] \n \n if (self.isCycles == \"'no'\" and self.PictureExistance == \"'yes'\"):\n for i in sp.arange(self.ny):\n for j in sp.arange(self.nx):\n if (i < self.ny - 20):\n # if ( np.abs(i - 60) < 20):\n for k in sp.arange(self.typesFluids):\n if (k == 0 and self.isDomain[i, j] == 1):\n self.fluidPDF[k, i, j, :] = self.initialDensities[k] * self.weightsCoeff\n self.fluidsDensity[k, i, j] = self.initialDensities[k]\n if (k == 1 and self.isDomain[i, j] == 1):\n self.fluidPDF[k, i, j, :] = self.backgroundDensities[k] * self.weightsCoeff\n self.fluidsDensity[k, i, j] = self.backgroundDensities[k]\n else:\n for k in sp.arange(self.typesFluids):\n if (k == 0 and self.isDomain[i, j] == 1):\n self.fluidPDF[k, i, j, :] = self.backgroundDensities[k] * self.weightsCoeff\n self.fluidsDensity[k, i, j] = self.backgroundDensities[k]\n if (k == 1 and self.isDomain[i, j] == 1):\n self.fluidPDF[k, i, j, :] = self.initialDensities[k] * self.weightsCoeff\n self.fluidsDensity[k, i, j] = self.initialDensities[k]\n elif (self.isCycles == \"'yes'\" and self.PictureExistance == \"'yes'\"):\n username = getpass.getuser()\n pathIniFile = '/home/' + username + '/LBMInitial/'\n if (os.path.exists(pathIniFile) == True): \n #for the old fluid distribution\n #the domain of the network\n iniFile = tb.open_file(pathIniFile + 'SimulationResults.h5', 'r')\n for i in sp.arange(self.typesFluids-1):\n self.fluidsDensity[i, :-30, :] = eval('iniFile.root.FluidMacro.FluidDensityType%gin%d[:-30, :]' % (i, self.lastStep))\n self.fluidsDensity[i, -30:, :] = self.backgroundDensities[i]\n for j in sp.arange(self.ny):\n for k in sp.arange(self.nx):\n self.fluidPDF[i, j, k, :] = self.weightsCoeff * \\\n self.fluidsDensity[i, j, k]\n iniFile.close()\n# for the new fluid in the domain\n for i in sp.arange(self.ny):\n for j in sp.arange(self.nx):\n if (i < self.ny - 30 and self.isDomain[i, j] == 1):\n self.fluidsDensity[-1, i, j] = self.backgroundDensities[-1]\n self.fluidPDF[-1, i, j, :] = self.backgroundDensities[-1] * \\\n self.weightsCoeff\n# continue\n elif (i >= self.ny - 30 and self.isDomain[i, j] == 1):\n self.fluidsDensity[-1, i, j] = self.initialDensities[-1]\n self.fluidPDF[-1, i, j, :] = self.initialDensities[-1] * \\\n self.weightsCoeff\n else:\n print(\"There is no file for initializing the domain.\")\n sys.exit()", "def get_system_description(\n submitter: str,\n division: str,\n status: str,\n system_name: Optional[str] = None,\n host_processors_per_node: Optional[int] = None,\n) -> Dict[str, str]:\n is_cuda = torch.cuda.is_available()\n cpu_info = cpuinfo.get_cpu_info()\n\n system_desc = {\n 'submitter': submitter,\n 'division': division,\n 'status': status,\n 'number_of_nodes': dist.get_world_size() / dist.get_local_world_size(),\n 'host_processors_per_node': str(host_processors_per_node) if host_processors_per_node else '',\n 'host_processor_model_name': str(cpu_info.get('brand_raw', 'CPU')),\n 'host_processor_core_count': str(psutil.cpu_count(logical=False)),\n 'host_processor_vcpu_count': '',\n 'host_processor_frequency': '',\n 'host_processor_caches': '',\n 'host_processor_interconnect': '',\n 'host_memory_capacity': '',\n 'host_storage_type': '',\n 'host_storage_capacity': '',\n 'host_networking': '',\n 'host_networking_topology': '',\n 'host_memory_configuration': '',\n 'accelerators_per_node': str(dist.get_local_world_size()) if is_cuda else '0',\n 'accelerator_model_name': str(torch.cuda.get_device_name(None)) if is_cuda else '',\n 'accelerator_host_interconnect': '',\n 'accelerator_frequency': '',\n 'accelerator_on-chip_memories': '',\n 'accelerator_memory_configuration': '',\n 'accelerator_memory_capacity': '',\n 'accelerator_interconnect': '',\n 'accelerator_interconnect_topology': '',\n 'cooling': '',\n 'hw_notes': '',\n 'framework':\n f'PyTorch v{torch.__version__} and MosaicML composer v{composer.__version__}', # type: ignore (third-party missing stub)\n 'other_software_stack': {\n 'cuda_version': torch.version.cuda if is_cuda else '', # type: ignore (third-party missing stub)\n 'composer_version': composer.__version__,\n 'python_version': sys.version,\n },\n 'operating_system': f'{platform.system()} {platform.release()}',\n 'sw_notes': '',\n }\n\n if system_desc['number_of_nodes'] != 1:\n warnings.warn('Number of nodes > 1 not tested, proceed with caution.')\n\n if system_name is None:\n world_size = dist.get_world_size()\n if is_cuda:\n device_name = system_desc['accelerator_model_name']\n else:\n device_name = system_desc['host_processor_model_name']\n\n device_name = device_name.replace(' ', '_')\n system_name = f'{world_size}x{device_name}_composer'\n\n # default to system name as \"[world_size]x[device_name]\"\n # e.g. 8xNVIDIA_A100_80GB\n system_desc['system_name'] = system_name\n return system_desc", "def build(self):\n\n # Create a custom grid, fe_set \n nfe = 6\n fe_a = 1/4.0\n fe_b = 0.2\n fe_set = [0, 0.004]\n for i in range(1,nfe+1):\n if i < nfe*fe_a:\n fe_set.append(i*fe_b/(nfe*fe_a))\n elif i == nfe: \n fe_set.append(1)\n else:\n fe_set.append(fe_b + (i-nfe*fe_a)*(1-fe_b)/(nfe*(1-fe_a)))\n\n \"\"\"\n Args:\n dae_method = method to use for calcuating derivatives (default = OCLR)\n - BFD1 - 1st order backwards finite difference\n - OCLR - Orthogonal collocation, Lagrange-Radau\n - OCLL - Orthogonal collocation, Lagrange-Legendre\n press_drop = Pressure drop correlation for superficial velocity calc.\n - SimplifiedP - simplified pressure correlations \n - Ergun - Ergun equation\n fe_set = set of normalised finite element locations\n nfe = number of finite elements for bed discretization (default = 15)\n (not used if fe_set specified)\n ncp = number of collocation points (OCLR or OCLL only, default = 3)\n \"\"\" \n\n # Create unit model for fuel reactor\n self.MB_fuel = MB_CLC_fuel.MB(\n parent=self,\n dae_method = 'OCLR',\n press_drop = 'Ergun',\n fe_set = fe_set,\n ncp = 3)", "def analyseSNRfluctuations(self, fsu='FSUA', snr='PD', plot=True,\n xlims=None, title='', normalized=False):\n t = self.raw['IMAGING_DATA_'+fsu].data.field('TIME')\n\n if (fsu=='FSUB' and self.insmode=='NORMAL') or \\\n (fsu=='FSUA' and self.insmode=='SWAPPED'):\n wno = np.where((np.interp(t, self.raw['OPDC'].data.field('TIME'),\n self.raw['OPDC'].data.field('STATE'))<3))\n else:\n wno = np.where(np.interp(t, self.raw['DOPDC'].data.field('TIME'),\n self.raw['DOPDC'].data.field('STATE'))<3)\n if (fsu=='FSUB' and self.insmode=='NORMAL') or \\\n (fsu=='FSUA' and self.insmode=='SWAPPED'):\n wftk = np.where((np.interp(t, self.raw['OPDC'].data.field('TIME'),\n self.raw['OPDC'].data.field('STATE'))>=7))\n else:\n wftk = np.where(np.interp(t, self.raw['DOPDC'].data.field('TIME'),\n self.raw['DOPDC'].data.field('STATE'))>=7)\n\n snrNo = self.raw['IMAGING_DATA_'+fsu].data.field(snr+'SNR')[wno[0]]\n snrFtk = self.raw['IMAGING_DATA_'+fsu].data.field(snr+'SNR')[wftk[0]]\n\n if plot:\n fig = plt.figure(1, figsize=(8,4))\n plt.subplots_adjust(left=0.08, right=.98)\n fig.clf()\n if normalized:\n norma = np.median(snrNo)\n plt.xlabel('normalized SNR to out-of-fringes')\n else:\n norma = 1\n plt.xlabel('SNR')\n\n plt.hist(snrNo/norma, bins=50, normed=True, alpha=0.5,\n color='r', label='NOT FTK')\n hno = np.histogram(snrNo, bins=50, normed=True)\n plt.hist(snrFtk/norma, bins=50, normed=True, alpha=0.5,\n color='g', label='FTK')\n hftk = np.histogram(snrFtk, bins=50, normed=True)\n\n if not xlims is None:\n plt.xlim(xlims[0], xlims[1])\n plt.title(title)\n poissonDist = lambda x,p:\\\n poisson(p['m']*p['p']).pmf(np.int_(np.floor(x*p['p'])))*p['p'] +\\\n (x*p['p']-np.floor(x*p['p']))/\\\n (np.ceil(x*p['p'])-np.floor(x*p['p']))*\\\n (poisson(p['m']*p['p']).pmf(np.int_(np.ceil(x*p['p'])))*p['p'] -\n poisson(p['m']*p['p']).pmf(np.int_(np.floor(x*p['p'])))*p['p'])\n\n guess = {'m':np.median(snrNo), 'p':1}\n X = 0.5*(hno[1][:-1]+hno[1][1:])\n fit = dpfit.leastsqFit(poissonDist, X, guess, hno[0])\n guessNo = fit['best']\n uncer = fit['uncer']\n chi2 = fit['chi2']\n model = fit['model']\n print guessNo\n print 'NOFTK; POISSON: LAMBDA', guessNo['p']*guessNo['m']\n print 'NOFTK; POISSON: STD/MEAN', 1/np.sqrt(guessNo['p']*guessNo['m'])\n plt.plot(X/norma, poissonDist(X, guessNo)*norma, '-r',\n linewidth=3, alpha=0.8, linestyle='dashed')\n\n guess = {'m':np.median(snrNo), 'p':1/10.}\n X = 0.5*(hftk[1][:-1]+hftk[1][1:]) \n fit = dpfit.leastsqFit(poissonDist, X, guess, hftk[0])\n guess = fit['best']\n uncer = fit['uncer']\n chi2 = fit['chi2']\n model = fit['model']\n print guess\n print ' FTK; POISSON: LAMBDA', guess['p']*guess['m']\n print ' FTK; POISSON: STD/MEAN', 1/np.sqrt(guess['p']*guess['m'])\n plt.plot(X/norma, poissonDist(X, guess)*norma, '-g',\n linewidth=3, alpha=0.8, linestyle='dashed')\n plt.legend( loc='upper left')\n #plt.xscale('log')\n\n print 'DIFFERENCIATION',\\\n np.abs(guess['m']-guessNo['m'])/\\\n (np.sqrt(guessNo['m']/guessNo['p']) +\n np.sqrt(guess['m']/guess['p']))\n return", "def handler_description(self):\n return None", "def width_h_invis(self):\n if m_higgs > 2.0 * self.mx:\n coupling = self.gsxx * self.stheta / np.sqrt(1 - self.stheta**2)\n\n val = (\n (coupling**2 * (m_higgs**2 - 4 * self.mx**2) ** 1.5)\n / (8.0 * m_higgs**2 * np.pi)\n ).real\n\n assert val >= 0\n\n return val\n else:\n return 0.0", "def startFluidinfo():\n sudo('start fluidinfo-api')\n sudo('/etc/init.d/haproxy start')\n sudo('/etc/init.d/nginx start')", "def runse(self):\n\n # check for se catalog\n\n \n\n t = self.image.split('.fits')\n froot = t[0]\n # check for se catalog\n secat = froot+'.cat'\n\n os.system('ln -s ' +self.astrodir + '/default.* .') \n if self.instrument == 'h':\n defaultcat = 'default.sex.HDI'\n elif self.instrument == 'i':\n defaultcat = 'default.sex.INT'\n self.keepsection=[1000,5000,0,4000]\n elif self.instrument == 'm':\n defaultcat = 'default.sex.HDI'\n elif self.instrument == 'b':\n print(\"hey Rose - \")\n print(\"using default.sex.BOK!!!\")\n print()\n defaultcat = 'default.sex.BOK.getzp'\n header = fits.getheader(self.image)\n try:\n expt = header['EXPTIME']\n except KeyError:\n expt = 1.\n ADUlimit = 40000.\n if self.instrument == 'i':\n if (self.filter == 'r'):\n ADUlimit = 400000./60#/float(expt)\n elif self.filter == 'ha':\n ADUlimit = 40000./180.\n #print('saturation limit in ADU/s {:.1f}'.format(ADUlimit))\n if self.fwhm is None:\n t = 'sex ' + self.image + ' -c '+defaultcat+' -CATALOG_NAME ' + froot + '.cat -MAG_ZEROPOINT 0 -SATUR_LEVEL '+str(ADUlimit)\n #t = 'sex ' + self.image + ' -c '+defaultcat+' -CATALOG_NAME ' + froot + '.cat -MAG_ZEROPOINT 0 -SATUR_LEVEL '\n if self.verbose:\n print('running SE first time to get estimate of FWHM')\n print(t)\n os.system(t)\n\n # clean up SE files\n # skipping for now in case the following command accidentally deletes user files\n # os.system('rm default.* .')\n\n\n ###################################\n # Read in Source Extractor catalog\n ###################################\n if self.verbose:\n print('reading in SE catalog from first pass')\n secat_filename = froot+'.cat'\n self.secat = fits.getdata(secat_filename,2)\n self.secat0 = self.secat\n # get median fwhm of image\n # for some images, this comes back as zero, and I don't know why\n fwhm = np.median(self.secat['FWHM_IMAGE'])*self.pixelscale\n \n \n t = 'sex ' + self.image + ' -c '+defaultcat+' -CATALOG_NAME ' + froot + '.cat -MAG_ZEROPOINT 0 -SATUR_LEVEL '+str(ADUlimit)+' -SEEING_FWHM '+str(fwhm)\n if float(fwhm) == 0:\n print('WARNING: measured FWHM is zero!')\n if self.verbose:\n print('running SE again with new FWHM to get better estimate of CLASS_STAR')\n else:\n t = 'sex ' + self.image + ' -c '+defaultcat+' -CATALOG_NAME ' + froot + '.cat -MAG_ZEROPOINT 0 -SATUR_LEVEL '+str(ADUlimit)+' -SEEING_FWHM '+str(self.fwhm)\n if self.verbose:\n print(t)\n print('running SE w/user input for FWHM to get better estimate of CLASS_STAR') \n #############################################################\n # rerun Source Extractor catalog with updated SEEING_FWHM\n #############################################################\n\n #print(t)\n os.system(t)\n self.read_se_cat()", "def print_me(self):\n\n print(\"----- Model:\",self.name,\" -----\")\n print(\"Mass (in M_sun): %.5f\" % (self.glb[imass]/constants.solar_mass))\n print(\"Radius (in R_sun): %.5f\" % (self.glb[iradius]/constants.solar_radius))\n print(\"Reference frequency (in uHz): %.3f\" % self.glb[ifreq_ref])\n print(\"Temperature (in K): %.1f\" % self.glb[itemperature])\n print(\"Luminosity (in L_sun): %.3g\" % (self.glb[iluminosity]/constants.solar_luminosity))\n print(\"Age (in Myrs): %.2f\" % self.glb[iage])\n print(\"Z: %.4f\" % self.glb[iz0])\n print(\"X: %.4f\" % self.glb[ix0])\n for (name, latex_name) in config.user_params:\n print(\"{0:29} {1:.5e}\".format(name,self.glb[user_params_index[name]]))\n print(\"Modes (in muHz):\")\n size = self.modes.shape[0]\n for i in range(size):\n print(\" (n,l,freq,IK) = (%d, %d, %.15f, %.5e)\" % \\\n (self.modes['n'][i], self.modes['l'][i], \\\n self.modes['freq'][i]*self.glb[ifreq_ref],\\\n self.modes['inertia'][i]))", "def display_network_architecture(self):\n self.log(\"\\n-------- Network architecture --------\")\n self.log(\"y_res: {}\".format(self.y_res))\n self.log(\"x_res: {}\".format(self.x_res))\n self.log(\"n_input_channels: {}\".format(self.n_input_channels))\n self.log(\"n_output_classes: {}\".format(self.n_output_classes))\n self.log(\"input_dropout: {}\".format(self.fc1_dropout))\n self.log(\"alpha: {}\".format(self.alpha))\n self.log(\"n_samples_trained: {}\".format(self.n_samples_trained))\n for c in range(self.n_output_classes):\n self.log( \" * Class {}, m = {}\".format( \\\n c, self.n_class_samples_trained[c] ) )", "def print_info(self):\r\n self.system.print_to_log(\r\n f\"{self.__class__.__name__} model: Infection probability: {self.p}, Infectious period: {self.i}, Recovery period: {self.r}.\")", "def show_device_information_enclosures(self):\n\n if self._report_header:\n print(\"\\n\")\n print(\" Linux SCSI Firmware Drive Block Curr Enc SCSI Enc Enc Slot\")\n print(\" Device Device Vendor Product Revision Capacity Length Temp Serial Number SAS Address Device Slot Description\")\n print(\"---------- ---------- -------- ---------------- -------- ------------ ------ ---- -------------- ------------------ ---------- ---- -----------------\")\n\n for device in self._devices:\n if not device['Device Type'].startswith(\"disk\"):\n continue\n print('{dsf:<10} {sdsf:<10} {vid:<8} {pid:<16} {fw:<8} {capacity:>12} {blocklen:>4} {temp:<4} {serial:<14} {sas:<18} {edsf:<10} {slot:<3} {text:<16}'\n .format(dsf=device['Linux Device Name'],\n sdsf=device['SCSI Device Name'],\n vid=device['Vendor Identification'],\n pid=device['Product Identification'],\n fw=device['Firmware Version'],\n capacity=device['Drive Capacity'],\n blocklen=device['Block Length'],\n temp=device['Current Temperature'],\n serial=device['Serial Number'],\n sas=device['SAS Address'],\n edsf=device['Enclosure Device'],\n slot=device['Enclosure Slot'],\n text=device['Slot Description']))\n\n if self._report_header:\n print(\"\\n\")", "def main():\n print\n print 'Module: be_process_mntstation_level0000'\n print 'Version: ' + __version__\n print 'Author: ' + __author__\n print 'License: ' + __license__\n print \n \n config_file = \"ki_config.cnf\"\n toplevel_processing_plots_path, project_id = configure(config_file)\n path = toplevel_processing_plots_path + project_id\n station_dataset=locate(\"*.*\", \"*\", path)\n for dataset in station_dataset:\n print \" \"\n print \"Checking dataset \", dataset\n act_filepath = os.path.dirname(dataset)\n act_filename = os.path.basename(dataset)\n if \"ki_\" in act_filename:\n act_filename = act_filename[act_filename.index(\"ki_\"):]\n os.rename(dataset, act_filepath + os.sep + act_filename)\n\n print \" \"\n print \" \"\n print \" \"\n station_dataset=locate(\"*.dat\", \"*\", path)\n for dataset in station_dataset:\n act_filepath = os.path.dirname(dataset)\n act_filename = os.path.basename(dataset)\n if act_filename.startswith(\"ki_\") == False:\n print dataset", "def display_network_architecture(self):\n self.log(\"\\n-------- Network architecture --------\")\n self.log(\"y_res: {}\".format(self.y_res))\n self.log(\"x_res: {}\".format(self.x_res))\n self.log(\"n_input_channels: {}\".format(self.n_input_channels))\n self.log(\"n_output_classes: {}\".format(self.n_output_classes))\n self.log(\"conv1_size: {}\".format(self.conv1_size))\n self.log(\"conv1_n_chan: {}\".format(self.conv1_n_chan))\n self.log(\"conv1_n_pool: {}\".format(self.conv1_n_pool))\n self.log(\"conv2_size: {}\".format(self.conv2_size))\n self.log(\"conv2_n_chan: {}\".format(self.conv2_n_chan))\n self.log(\"conv2_n_pool: {}\".format(self.conv2_n_pool))\n self.log(\"fc1_n_chan: {}\".format(self.fc1_n_chan))\n self.log(\"fc1_dropout: {}\".format(self.fc1_dropout))\n self.log(\"alpha: {}\".format(self.alpha))\n self.log(\"n_samples_trained: {}\".format(self.n_samples_trained))\n for c in range(self.n_output_classes):\n self.log( \" * Class {}, m = {}\".format( \\\n c, self.n_class_samples_trained[c] ) )", "def extract_specs(self):\n vDeflection_unit = \"lcd-info.{}.conversion-set.conversion.force.scaling.unit.unit\".format(\n self.channel_numbers[\"vDeflection\"])\n self.units[\"vDeflection\"] = self.general[vDeflection_unit]\n\n height_unit = \"lcd-info.{}.conversion-set.conversion.nominal.scaling.unit.unit\".format(\n self.channel_numbers[\"height\"])\n self.units[\"height\"] = self.general[height_unit]", "def IC_FC_visualization(self):\n legend = ['1st CWT','2nd CWT','IC','FC']\n title = 'Optimized ICs and FCs detection'\n IC_values = [self.IC,normalize(self.cwt1)[self.IC]]\n FC_values = [self.FC,normalize(self.cwt2)[self.FC]]\n visualize_signal(legend, title, normalize(self.cwt1), normalize(self.cwt2), IC = IC_values, FC = FC_values)", "def test_issue_1216(self):\n from openff.toolkit.typing.engines.smirnoff.parameters import VirtualSiteHandler\n\n force_field = ForceField()\n force_field.get_parameter_handler(\"Electrostatics\")\n\n vsite_handler: VirtualSiteHandler = force_field.get_parameter_handler(\n \"VirtualSites\"\n )\n vsite_handler.add_parameter(\n {\n \"smirks\": \"[#6:1][#9:2]\",\n \"name\": \"EP\",\n \"type\": \"BondCharge\",\n \"distance\": 1.0 * unit.angstrom,\n \"match\": \"all_permutations\",\n \"charge_increment1\": 0.2 * unit.elementary_charge,\n \"charge_increment2\": 0.1 * unit.elementary_charge,\n \"sigma\": 1.0 * unit.angstrom,\n \"epsilon\": 0.0 * unit.kilocalorie_per_mole,\n }\n )\n\n library_handler: LibraryChargeHandler = force_field.get_parameter_handler(\n \"LibraryCharges\"\n )\n library_handler.add_parameter(\n {\n \"smirks\": \"[F:2][C:1]([H:3])([H:4])([H:5])\",\n \"charge\": [\n 0.3 * unit.elementary_charge,\n -0.15 * unit.elementary_charge,\n -0.05 * unit.elementary_charge,\n -0.05 * unit.elementary_charge,\n -0.05 * unit.elementary_charge,\n ],\n }\n )\n force_field.label_molecules(Molecule.from_smiles(\"CF\").to_topology())", "def describe(self):\n if not self.name:\n raise ValueError(\"Sorry! id_type must be 'name'\")\n r = requests.get(f\"https://api.fda.gov/drug/ndc.json?search=brand_name:{self.drug_id}\")\n response = r.json()\n data = response['results'][0]\n self.brand_name = data['brand_name']\n self.generic_name = data['generic_name']\n self.active_ingredients = [i['name'] for i in data['active_ingredients']]\n self.pharm_class = get_pharm_class(self.drug_id, as_df=False)\n self.route = data['route']\n self.ndc = data['product_ndc']\n self.product_type = data['product_type']\n\n print(f\"Generic name: {self.generic_name}\")\n print(f\"Brand name: {self.brand_name}\")\n print(f\"Active ingredients: {self.active_ingredients}\")\n print(f\"Routes of administration: {self.route}\")\n print(f\"Pharmacologic Classes: {self.pharm_class}\")\n print(f\"NDC: {self.ndc}\")\n print(f\"Product type: {self.product_type}\")", "def calculations():\r\n\t\r\n\tpayload, avionics, booster = weight_input()\r\n\r\n\tdrogue_size, drogue_force = drogue_calc()\r\n\tmain_size, main_force = main_calc(avionics, booster, drogue_force) #total mass, payload detaches\r\n\r\n\tprint(\"Drogue is diameter is \" + str(drogue_size) + \" inches\")\r\n\tprint(\"Main is diameter is \" + str(main_size) + \" inches\")", "def prepare_sushi_input(self):\n \n for higgsname, higgstype in {'H': 12, 'A': 21}.iteritems():\n \n # Parse LHA file\n lha = LHA(self.lhafile)\n \n # Add SusHi-specific blocks\n sushi = Block('SUSHI', comment='SusHi specific')\n sushi.add(Entry([1, 2], comment='Select 2HDM'))\n sushi.add(Entry([2, higgstype], comment='h / H / A'))\n sushi.add(Entry([3, 0], comment='p-p collisions'))\n sushi.add(Entry([4, 13000], comment='E_cm'))\n sushi.add(Entry([5, 2], comment='ggH at NNLO'))\n sushi.add(Entry([6, 2], comment='bbH at NNLO'))\n sushi.add(Entry([7, 2], comment='SM EW content'))\n sushi.add(Entry([19, 1], comment='Verbosity'))\n sushi.add(Entry([20, 0], comment='All processes'))\n lha.add_block(sushi)\n \n # 2HDM block\n thdm = Block('2HDM', '2HDM parameters')\n thdm.add(Entry([1], comment='Type I'))\n lha.add_block(thdm)\n \n # Kinematic distribution parameters\n distrib = Block('DISTRIB', comment='Kinematic requirements')\n distrib.add(Entry([1, 0], comment='Sigma total'))\n distrib.add(Entry([2, 0], comment='Disable pT cut'))\n #distrib.add(Entry([21, GENER_SETTINGS['higgs_pt_min']], comment='Min higgs pT'))\n distrib.add(Entry([3, 0], comment='Disable eta cut'))\n #distrib.add(Entry([32, GENER_SETTINGS['higgs_eta_max']], comment='Max eta'))\n distrib.add(Entry([4, 1], comment='Use eta, not y'))\n lha.add_block(distrib)\n \n # PDF selection\n pdfspec = Block('PDFSPEC')\n pdfspec.add(Entry([1, 'MMHT2014lo68cl.LHgrid'], comment='Name of pdf (lo)'))\n pdfspec.add(Entry([2, 'MMHT2014nlo68cl.LHgrid'], comment='Name of pdf (nlo)'))\n pdfspec.add(Entry([3, 'MMHT2014nnlo68cl.LHgrid'], comment='Name of pdf (nnlo)'))\n pdfspec.add(Entry([4, 'MMHT2014nnlo68cl.LHgrid'], comment='Name of pdf (n3lo)'))\n pdfspec.add(Entry([10, 0], comment='Set number'))\n lha.add_block(pdfspec)\n \n # Add charm mass\n lha.get_block('SMINPUTS').add(Entry([8, 1.275], comment='m_c'))\n \n # Write output\n suffix = '_%s_sushi.in' % higgsname\n outname = self.lhafile.replace('.lha', suffix)\n self.sushi_input[higgsname] = outname\n \n lha.write(outname)\n \n return 0", "def hardware(*args, brdType: bool=True, cpuType: bool=True, graphicsType: bool=True, megaHertz:\n bool=True, numProcessors: bool=True, **kwargs)->AnyStr:\n pass", "def wypisz_info(self):\n print(f\"Samochód: {self.producent} {self.model}\")", "def metallicity(method, emsystem):\n if method == 'PG16':\n # Requires Hbeta, [OII], [OIII], [NII], [SII]\n R2 = (emsystem.get_emline('[OII] 3726').attrib['flux'] +\n emsystem.get_emline('[OII] 3729').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n R3 = (emsystem.get_emline('[OIII] 4959').attrib['flux'] +\n emsystem.get_emline('[OIII] 5007').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n N2 = (emsystem.get_emline('[NII] 6548').attrib['flux'] +\n emsystem.get_emline('[NII] 6584').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n S2 = (emsystem.get_emline('[SII] 6716').attrib['flux'] +\n emsystem.get_emline('[SII] 6731').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n # Proceed\n if np.log10(N2) < -0.6:\n r_val = 7.932 + 0.944*np.log10(R3/R2) + 0.695*np.log10(N2) + \\\n ((0.97 - 0.291*np.log10(R3/R2)) - 0.019*np.log10(N2))*np.log10(R2)\n\n s_val = 8.072 + 0.789*np.log10(R3/S2) + 0.726*np.log10(N2) + \\\n (1.069 - 0.170*np.log10(R3/S2) +0.022*np.log10(N2))*np.log10(S2)\n else:\n r_val = 8.589 + 0.022*np.log10(R3/R2) + 0.399*np.log10(N2) + \\\n (-0.137 + 0.164*np.log10(R3/R2) + 0.589*np.log10(N2))*np.log10(R2)\n\n s_val = 8.424 + 0.030*np.log10(R3/S2) + 0.751*np.log10(N2) + \\\n (-0.349 + 0.182*np.log10(R3/S2) +0.508*np.log10(N2))*np.log10(S2)\n return r_val.decompose().value, s_val.decompose().value", "def device_class(self):\n return \"problem\"", "def usefulFunction():\n print(platform.uname()) #displayed this computer's specifications", "def __init__(self):\n self.ram = [0]*256\n self.reg = [0]*8 # general-purpose registers\n # R5 reserved for interrupt mask (IM)\n # R6 reserved for interrupt status(IS)\n # R7 reserved for stack pointer(SP)\n self.reg[7] = int(\"F4\", 16)\n self.sp = self.reg[7] # Stack Pointer\n self.pc = 0 # program counter\n self.mar = None # Memory address register where reading/writing\n self.mdr = None # Memory Data Register holds value to write/read\n self.fl = {} # holds current flags\n self.fl[\"E\"] = 0\n self.fl[\"L\"] = 0\n self.fl[\"G\"] = 0\n \"\"\"Interput Vector Table\"\"\"\n self.ram[int(\"FF\", 16)] = \"I7\"\n self.ram[int(\"FE\", 16)] = \"I6\"\n self.ram[int(\"FD\", 16)] = \"I5\"\n self.ram[int(\"FC\", 16)] = \"I4\"\n self.ram[int(\"FB\", 16)] = \"I3\"\n self.ram[int(\"FA\", 16)] = \"I2\"\n self.ram[int(\"F9\", 16)] = \"I1\"\n self.ram[int(\"F8\", 16)] = \"I0\"\n \"\"\"Interput Vector Table\"\"\"\n self.dispatch_table = {}\n self.dispatch_table[LDI] = self.handle_LDI\n self.dispatch_table[PRN] = self.handle_PRN\n self.dispatch_table[HLT] = self.handle_HLT\n self.dispatch_table[ADD] = self.alu\n self.dispatch_table[SUB] = self.alu\n self.dispatch_table[MUL] = self.alu\n self.dispatch_table[DIV] = self.alu\n self.dispatch_table[CMP] = self.alu\n self.dispatch_table[AND] = self.alu\n self.dispatch_table[OR] = self.alu\n self.dispatch_table[XOR] = self.alu\n self.dispatch_table[NOT] = self.alu\n self.dispatch_table[SHL] = self.alu\n self.dispatch_table[SHR] = self.alu\n self.dispatch_table[MOD] = self.alu\n self.dispatch_table[PUSH] = self.handle_PUSH\n self.dispatch_table[POP] = self.handle_POP\n self.dispatch_table[CALL] = self.handle_CALL\n self.dispatch_table[RET] = self.handle_RET\n self.dispatch_table[JMP] = self.handle_JMP\n self.dispatch_table[JEQ] = self.handle_JEQ\n self.dispatch_table[JNE] = self.handle_JNE", "def InternalStructure(self):\n s = self.internalStructure\n assert s in range(1,5), \"Internal structure score out of bounds.\"\n if s == 1: return 'Soft Tissue'\n elif s == 2: return 'Fluid'\n elif s == 3: return 'Fat'\n elif s == 4: return 'Air'", "def engine():\r\n document.add_heading('Engine details', 1)\r\n\r\n engine_metrics = ['customProperties','listenerPorts','autosaveInterval', 'tableFilesDirectory', 'genericUndoBufferMaxSize', 'documentTimeout','documentDirectory',\r\n 'allowDataLineage', 'qrsHttpNotificationPort', 'standardReload',\r\n 'workingSetSizeLoPct', 'workingSetSizeHiPct', 'workingSetSizeMode','cpuThrottlePercentage', 'maxCoreMaskPersisted', 'maxCoreMask',\r\n 'maxCoreMaskHiPersisted', 'maxCoreMaskHi','objectTimeLimitSec', 'exportTimeLimitSec', 'reloadTimeLimitSec',\r\n 'hyperCubeMemoryLimit', 'exportMemoryLimit', 'reloadMemoryLimit', 'createSearchIndexOnReloadEnabled', 'hostname',\r\n 'globalLogMinuteInterval','auditActivityLogVerbosity','auditSecurityLogVerbosity','serviceLogVerbosity','systemLogVerbosity','performanceLogVerbosity',\r\n 'qixPerformanceLogVerbosity','auditLogVerbosity','sessionLogVerbosity','trafficLogVerbosity']\r\n\r\n enginenodes = get_qlik_sense.get_engine()\r\n num_of_engines = len(enginenodes)\r\n num_of_engine_metrics = len(engine_metrics)\r\n table = document.add_table(rows=num_of_engine_metrics+1, cols=num_of_engines+1)\r\n table.style = 'Grid Table 1 Light Accent 1'\r\n row = table.rows[0]\r\n row.cells[0].text = 'Metric'\r\n for item in range(0, num_of_engines):\r\n row.cells[item+1].text = enginenodes[item][36]\r\n for item in range(num_of_engine_metrics):\r\n row = table.rows[item+1]\r\n row.cells[0].text = str(engine_metrics[item])\r\n for enginenode in range(num_of_engines):\r\n row.cells[enginenode+1].text = str(enginenodes[enginenode][item])\r\n\r\n document.add_page_break()", "def main():\n\n #\n # Generate waveform\n #\n\n print 'generating waveoform...'\n waveform = pmns_utils.Waveform('shen_135135_lessvisc')\n\n # Pick some extrinsic parameters\n ext_params = ExtParams(distance=1, ra=0.0, dec=0.0, polarization=0.0,\n inclination=0.0, phase=0.0, geocent_peak_time=0.0+5.0)\n\n # Construct the time series for these params\n waveform.make_wf_timeseries(theta=ext_params.inclination,\n phi=ext_params.phase)\n\n #\n # Generate IFO data\n #\n det1_data = DetData(waveform=waveform, ext_params=ext_params)\n\n from scipy import signal\n import pylab as pl\n\n pl.figure()\n pl.plot(det1_data.td_response.sample_times,det1_data.td_response.data)\n pl.plot(det1_data.td_signal.sample_times,det1_data.td_signal.data)\n\n pl.figure()\n f,p = signal.welch(det1_data.td_response.data, fs=1./det1_data.delta_t,\n nperseg=512)\n pl.loglog(f,np.sqrt(p))\n\n f,p = signal.welch(det1_data.td_signal.data, fs=1./det1_data.delta_t,\n nperseg=512)\n pl.loglog(f,np.sqrt(p))\n pl.ylim(1e-25,1e-21)\n pl.show()", "def __init__(self, fluid_domain, structure, io_data):\n super().__init__(fluid_domain, structure, io_data)\n\n self.intersector = Intersector(self.fluid_domain, self.structure)\n #self.ghost_nodes = np.empty(shape=[self.fluid_domain.nverts, 8],dtype=float)\n self.ghost_nodes = np.empty(shape=[self.fluid_domain.nverts, 6], dtype=float)", "def setup(self):\n super().setup()\n\n # prepare scratch directory\n unix.mkdir(PATH.ORTHO)\n\n # get data file names from solver\n solver = sys.modules['seisflows_solver']\n\n nevt = PAR.NEVT # number of encoded sources\n ntpss = PAR.NTPSS # number of timesteps after steady state\n dt = PAR.DT # total number of timesteps\n nrec = PAR.NREC # number of stations\n # ntrace = len(solver.data_filenames)\n freq_min = float(PAR.FREQ_MIN) # minimium frequency of interest\n freq_max = float(PAR.FREQ_MAX) # maximium frequency of interest\n \n #create a mask on relevant frequencies\n freq_full = fftfreq(ntpss, dt) # full frequency compunent\n freq_thresh = 1 / (ntpss * dt) / 200 # threshold for frequency alignment\n freq_idx = np.squeeze(np.where((freq_min <= (freq_full)) & ((freq_full) < freq_max - freq_thresh))) # frequency band of interest\n freq = freq_full[freq_idx] # index of frequencies within the frequency band\n nfreq = len(freq_idx) # number of frequency within the frequency band\n print('Number of frequencies considered: ' +str(nfreq)+' / '+str(len(freq_full)))\n\n # converts time data to Fourier domain\n sff_obs = np.zeros((nfreq, nevt), dtype=complex) # fourier transform of observed source time function\n ft_obs = np.zeros((nfreq, nevt, nrec), dtype=complex) # TODO ntrace fourier transform of observed seismogram\n\n for isrc in range(nevt):\n source_name = solver.source_names_all[isrc] # name of source\n stf_file = solver.stf_files_all[isrc] # name of source file\n with open(stf_file) as f:\n lines = f.readlines()\n stf_obs = []\n for line in lines:\n stf_obs.append(float(line.split()[1]))\n\n sff_obs[:, isrc] = fft(stf_obs, n=ntpss)[freq_idx]\n # for itrace in range(ntrace):\n # trace = self.reader(PATH.DATA + '/' + source_name, solver.data_filenames[itrace])\n # for irec in range(nrec):\n # ft_obs[:, isrc, irec, itrace] = fft(trace[irec].data, n=ntpss)[freq_idx]\n for irec in range(nrec):\n trace = self.reader(PATH.DATA + '/' + source_name, solver.data_filenames[0])\n ft_obs[:, isrc, irec] = fft(trace[irec].data, n=ntpss)[freq_idx]\n \n self.save('freq_idx', freq_idx)\n self.save('freq', freq)\n self.save('sff_obs', sff_obs)\n self.save('ft_obs', ft_obs)", "def main():\n try:\n session = Api()\n plate = session.active_document()\n print(\"Part: {:^30s}\\n\".format(plate.name))\n\n # Check if part is sheetmetal.\n assert plate.name.endswith(\n \".psm\"\n ), \"This macro only works on .psm not {:^30s}\".format(plate.name[-4:])\n\n # Get a reference to the variables collection.\n holes = HoleCollection(plate)\n\n # Display the quantites of different types of holes.\n quantites(\n holes.count,\n holes.count_threaded,\n holes.count_imperial_threaded,\n holes.count_metric_threaded,\n )\n\n # Prototyping table of holes. (helper for drafter)\n ## qty_size = dict(len(holes.all_holes())) # >>> 'M6x1':3\n ## print_table(qty_size)\n\n # Prompt user selection\n units = prompt_units_selection()\n\n if units == \"metric\": # if metric\n for hole in holes.threaded():\n o = Hole(hole)\n if o.is_metric():\n continue\n imperial = o.size\n holedata = Hole.get_equivalence(o, mapping=mappingToMetric)\n o.conversion_to_metric(holedata)\n metric = o.size\n header()\n print(\" {:<30s} {:<30s}\".format(imperial, metric))\n footer()\n\n elif units == \"imperial\": # if imperial\n for hole in holes.threaded():\n o = Hole(hole)\n if o.is_imperial():\n continue\n metric = o.size\n holedata = Hole.get_equivalence(o, mapping=mappingToImp) # correction\n o.conversion_to_metric(holedata) # correction\n imperial = o.size\n header()\n print(\" {:<30s} {:<30s}\".format(metric, imperial))\n footer()\n\n elif units == \"debug\":\n for hole in holes.threaded():\n o = Hole(hole)\n print(o.__repr__())\n\n else:\n sys.exit()\n\n # Display a second time the quantites of different types of holes.\n quantites(\n holes.count,\n holes.count_threaded,\n holes.count_imperial_threaded,\n holes.count_metric_threaded,\n state=\"(Changed state)\",\n )\n\n except AssertionError as err:\n print(err.args)\n\n except Exception as ex:\n print(ex.args)\n\n else:\n pass\n\n finally:\n raw_input(\"\\nPress any key to exit...\")\n sys.exit()", "def type(self):\n return self.INEQUALITY", "def type(self):\n return self.INEQUALITY", "def hilfe(self):\n sKreis_hilfe(3)", "def mode(self):\r\n pass", "def freq_optimization(self):\n index = identify_scale(self.vz, True)\n # In case the patient is limping\n if index > 35:\n index = index / 2\n print(f\"Scale used is {index}\")", "def form_segment(self,aShapers = 'numShapers',aSubShapers = 'numSubShapers',\n loftShape=None,l_basePos = None, baseSize=1.0,\n sizeWidth = 1.0, sizeLoft=1.0,\n side = None,orientHelperPlug = 'orientHelper',formAim='toEnd',\n mFormNull = None,mNoTransformNull = None,\n mDefineEndObj=None):\n _str_func = 'form_segment'\n log.debug(\"|{0}| >> self: {1}\".format(_str_func,self)+ '-'*80)\n _short = self.p_nameShort\n mc.select(cl=1)#...why maya....\n #_size_handle = baseSize\n #_size_loft = sizeLoft\n _size_width = sizeWidth\n \n _size_handle = 1.0\n _size_loft = 1.0\n \n _side = side\n _loftShape = loftShape\n _l_basePos = l_basePos\n md_handles = {}\n ml_handles = []\n ml_loftHandles = []\n md_loftHandles ={}\n ml_shapers = []\n ml_handles_chain = []\n _formAim = formAim\n \n _short = self.mNode \n _int_shapers = self.getMayaAttr(aShapers)\n _int_sub = self.getMayaAttr(aSubShapers) \n _loftSetup = self.getEnumValueString('loftSetup')\n _loftShape = self.getEnumValueString('loftShape')\n \n _baseName = self.cgmName\n if not _baseName:\n _baseName = self.blockType\n \n #Loft Shapes...-----------------------------------------------------------------------\n if _loftSetup == 'loftList':\n _l_loftShapes = ATTR.datList_get(_short,'loftList',enum=True) or []\n if len(_l_loftShapes) != _int_shapers:\n log.warning(\"|{0}| >> Not enough shapes in loftList. Padding with loftShape\".format(_str_func,i,_loftShape))\n while len(_l_loftShapes) < _int_shapers:\n _l_loftShapes.append(self.loftShape)\n else:\n _l_loftShapes = [_loftShape for i in range(_int_shapers)]\n\n log.debug(\"|{0}| >> loftShapes: {1}\".format(_str_func,_l_loftShapes)) \n \n #Subshaper count -------------------------------------------------------------------------\n l_numSubShapers = self.datList_get('numSubShapers')\n int_shapers = self.getMayaAttr(aShapers)\n int_sub = self.getMayaAttr(aSubShapers)\n if not l_numSubShapers:\n l_numSubShapers = [int_sub for i in xrange(int_shapers-1)]\n log.info(\"|{0}| >> l_numSubShapers: {1}\".format(_str_func,l_numSubShapers)) \n\n \n mHandleFactory = self.asHandleFactory()\n mRootUpHelper = self.vectorUpHelper\n #_mVectorAim = MATH.get_obj_vector(self.vectorEndHelper.mNode,asEuclid=True)\n _mVectorUp = MATH.get_obj_vector(mRootUpHelper.mNode,'y+',asEuclid=True) \n #pprint.pprint(vars())\n for i,n in enumerate(['start','end']):\n log.debug(\"|{0}| >> {1}:{2}...\".format(_str_func,i,n)) \n #mHandle = mHandleFactory.buildBaseShape('sphere2',baseSize = _size_handle, shapeDirection = 'y+')\n crv = CURVES.create_fromName('sphere2', [_size_handle,_size_handle,.2* _size_handle], direction = 'y+',baseSize=1)\n mHandle = cgmMeta.validateObjArg(crv, 'cgmObject', setClass=True)\n \n mHandle.p_parent = mFormNull\n \n mHandle.resetAttrs()\n \n self.copyAttrTo('cgmName',mHandle.mNode,'cgmName',driven='target')\n mHandle.doStore('cgmType','formHandle')\n mHandle.doStore('cgmNameModifier',n)\n \n mHandle.doName()\n \n #Convert to loft curve setup ----------------------------------------------------\n mHandleFactory.setHandle(mHandle.mNode)\n #mHandleFactory = self.asHandleFactory(mHandle.mNode)\n if n == 'start':\n _shape = 'loft' + _l_loftShapes[0][0].capitalize() + ''.join(_l_loftShapes[0][1:])\n else:\n _shape = 'loft' + _l_loftShapes[-1][0].capitalize() + ''.join(_l_loftShapes[-1][1:])\n \n mLoftCurve = mHandleFactory.rebuildAsLoftTarget(_shape, _size_loft, shapeDirection = 'z+',rebuildHandle = False)\n #mc.makeIdentity(mHandle.mNode,a=True, s = True)#...must freeze scale once we're back parented and positioned\n \n mHandleFactory.color(mHandle.mNode) \n mHandle.p_position = _l_basePos[i]\n \n md_handles[n] = mHandle\n ml_handles.append(mHandle)\n \n md_loftHandles[n] = mLoftCurve \n ml_loftHandles.append(mLoftCurve)\n \n mLoftCurve.p_parent = mFormNull\n mTransformedGroup = mLoftCurve.getMessageAsMeta('transformedGroup')\n if not mTransformedGroup:\n mTransformedGroup = mLoftCurve.doGroup(True,True,asMeta=True,typeModifier = 'transformed',setClass='cgmObject')\n mHandle.doConnectOut('scale', \"{0}.scale\".format(mTransformedGroup.mNode))\n mc.pointConstraint(mHandle.mNode,mTransformedGroup.mNode,maintainOffset=False)\n #mc.scaleConstraint(mHandle.mNode,mTransformedGroup.mNode,maintainOffset=True)\n \n mBaseAttachGroup = mHandle.doGroup(True,True, asMeta=True,typeModifier = 'attach')\n \n #Constrain the define end to the end of the form handles\n if mDefineEndObj:\n mc.pointConstraint(md_handles['end'].mNode,mDefineEndObj.mNode,maintainOffset=False)\n\n\n #>> Base Orient Helper ============================================================================\n log.debug(\"|{0}| >> Base orient helper...\".format(_str_func) + '-'*40) \n\n mHandleFactory = self.asHandleFactory(md_handles['start'].mNode)\n mBaseOrientCurve = mHandleFactory.addOrientHelper(baseSize = _size_width,\n shapeDirection = 'y+',\n setAttrs = {'ty':_size_width})\n\n self.copyAttrTo('cgmName',mBaseOrientCurve.mNode,'cgmName',driven='target')\n mBaseOrientCurve.doName()\n\n mBaseOrientCurve.p_parent = mFormNull\n mOrientHelperAimGroup = mBaseOrientCurve.doGroup(True,asMeta=True,typeModifier = 'aim')\n mc.pointConstraint(md_handles['start'].mNode, mOrientHelperAimGroup.mNode )\n \n _const = mc.aimConstraint(ml_handles[1].mNode, mOrientHelperAimGroup.mNode, maintainOffset = False,\n aimVector = [0,0,1], upVector = [0,1,0], \n worldUpObject = mRootUpHelper.mNode,\n worldUpType = 'objectrotation', \n worldUpVector = [0,1,0])\n #worldUpType = 'vector',\n #worldUpVector = [_worldUpVector.x,_worldUpVector.y,_worldUpVector.z]) \n\n self.connectChildNode(mBaseOrientCurve.mNode,orientHelperPlug)\n\n mBaseOrientCurve.setAttrFlags(['ry','rx','translate','scale','v'])\n mHandleFactory.color(mBaseOrientCurve.mNode,controlType='sub')\n mc.select(cl=True)\n\n ml_handles_chain = copy.copy(ml_handles)\n #reload(CORERIG)\n if _int_shapers > 2:\n log.debug(\"|{0}| >> more handles necessary...\".format(_str_func)) \n #Mid Track curve ============================================================================\n log.debug(\"|{0}| >> TrackCrv...\".format(_str_func)) \n _midTrackResult = CORERIG.create_at([mObj.mNode for mObj in ml_handles],'cubicTrack',#'linearTrack',\n baseName='midTrack')\n \n _midTrackCurve = _midTrackResult[0]\n mMidTrackCurve = cgmMeta.validateObjArg(_midTrackCurve,'cgmObject')\n mMidTrackCurve.rename(_baseName + 'midHandlesTrack_crv')\n mMidTrackCurve.parent = mNoTransformNull\n\n for s in _midTrackResult[1]:\n ATTR.set(s[1],'visibility',False)\n\n #>>> mid main handles =====================================================================\n l_scales = []\n for mHandle in ml_handles:\n l_scales.append(mHandle.scale)\n mHandle.scale = 1,1,1\n\n _l_posMid = CURVES.returnSplitCurveList(mMidTrackCurve.mNode,_int_shapers,markPoints = False)\n #_l_pos = [ DIST.get_pos_by_vec_dist(_pos_start, _vec, (_offsetDist * i)) for i in range(self.numControls-1)] + [_pos_end]\n\n\n #Sub handles... -----------------------------------------------------------------------------------\n log.debug(\"|{0}| >> Mid Handle creation...\".format(_str_func))\n ml_aimGroups = []\n ml_midHandles = []\n ml_midLoftHandles = []\n for i,p in enumerate(_l_posMid[1:-1]):\n log.debug(\"|{0}| >> mid handle cnt: {1} | p: {2}\".format(_str_func,i,p))\n crv = CURVES.create_fromName('sphere2', [_size_handle,_size_handle,.2* _size_handle], direction = 'y+',baseSize=1)\n mHandle = cgmMeta.validateObjArg(crv, 'cgmObject', setClass=True)\n\n self.copyAttrTo('cgmName',mHandle.mNode,'cgmName',driven='target')\n mHandle.doStore('cgmType','formHandle')\n mHandle.doStore('cgmNameModifier',\"form_{0}\".format(i+1))\n mHandle.doName() \n\n _short = mHandle.mNode\n ml_midHandles.append(mHandle)\n mHandle.p_position = p\n\n mHandle.p_parent = mFormNull\n #mHandle.resetAttrs()\n\n mHandleFactory.setHandle(mHandle.mNode)\n mLoftCurve = mHandleFactory.rebuildAsLoftTarget('loft' + _l_loftShapes[i+1][0].capitalize() + ''.join(_l_loftShapes[i+1][1:]),#_loftShape,\n _size_loft,\n shapeDirection = 'z+',rebuildHandle = False)\n #mc.makeIdentity(mHandle.mNode,a=True, s = True)#...must freeze scale once we're back parented and positioned\n ml_midLoftHandles.append(mLoftCurve)\n\n mTransformedGroup = mHandle.getMessageAsMeta('transformedGroup')\n if not mTransformedGroup:\n mTransformedGroup = mHandle.doGroup(True,True,asMeta=True,typeModifier = 'transformed')\n #mGroup = mHandle.doGroup(True,True,asMeta=True,typeModifier = 'master')\n #mAimGroup = mHandle.doGroup(True,True,asMeta=True,typeModifier = 'aim')\n\n\n _vList = DIST.get_normalizedWeightsByDistance(mTransformedGroup.mNode,\n [ml_handles[0].mNode,ml_handles[-1].mNode])\n\n #_scale = mc.scaleConstraint([ml_handles[0].mNode,ml_handles[-1].mNode],\n # mTransformedGroup.mNode,maintainOffset = False)\n \n BLOCKSHAPES.attachToCurve(mHandle, mMidTrackCurve, parentTo = mNoTransformNull, trackLink='transformedGroup')\n \n #_res_attach = RIGCONSTRAINT.attach_toShape(mTransformedGroup.mNode, mMidTrackCurve.mNode, 'conPoint')\n #TRANS.parent_set(_res_attach[0], mNoTransformNull.mNode)\n\n mTransformedGroup.resetAttrs('rotate')\n\n\n mLoftCurve.p_parent = mFormNull\n mLoftTransformedGroup = mLoftCurve.getMessageAsMeta('transformedGroup')\n if not mLoftTransformedGroup:\n mLoftTransformedGroup = mLoftCurve.doGroup(True,asMeta=True,typeModifier = 'transformed')\n\n #mTransformedGroup = mLoftCurve.doGroup(True,True,asMeta=True,typeModifier = 'transformed')\n #mHandle.doConnectOut('scale', \"{0}.scale\".format(mScaleGroup.mNode))\n mc.scaleConstraint(mHandle.mNode,\n mLoftTransformedGroup.mNode,maintainOffset = False) \n mc.pointConstraint(mHandle.mNode,mLoftTransformedGroup.mNode,maintainOffset=False)\n\n\n #for c in [_scale]:\n #CONSTRAINT.set_weightsByDistance(c[0],_vList)\n\n mHandleFactory = self.asHandleFactory(mHandle.mNode)\n\n CORERIG.colorControl(mHandle.mNode,_side,'main',transparent = True)\n CORERIG.colorControl(mLoftCurve.mNode,_side,'main',transparent = True)\n\n #Push scale back...\n for i,mHandle in enumerate(ml_handles):\n mHandle.scale = l_scales[i]\n\n\n\n #Main Track curve ============================================================================\n ml_handles_chain = [ml_handles[0]] + ml_midHandles + [ml_handles[-1]]\n\n log.debug(\"|{0}| >> Main TrackCrv...\".format(_str_func)) \n _mainTrackResult = CORERIG.create_at([mObj.mNode for mObj in ml_handles_chain],'linearTrack',\n baseName='mainTrack')\n\n mMainTrackCurve = cgmMeta.validateObjArg(_mainTrackResult[0],'cgmObject')\n mMainTrackCurve.rename(_baseName+ 'mainHandlesTrack_crv')\n mMainTrackCurve.parent = mNoTransformNull\n\n for s in _mainTrackResult[1]:\n ATTR.set(s[1],'visibility',False) \n\n\n\n log.debug(\"|{0}| >> Aim main handles...\".format(_str_func)+'-'*40) \n\n #AimEndHandle ============================================================================\n log.debug(\"|{0}| >> Aim end...\".format(_str_func)) \n mGroup = md_handles['end'].doGroup(True,True,asMeta=True,typeModifier = 'aim') \n _const = mc.aimConstraint(self.mNode, mGroup.mNode,\n maintainOffset = False,\n aimVector = [0,0,-1],\n upVector = [0,1,0], \n worldUpObject = mRootUpHelper.mNode,\n worldUpType = 'objectrotation', \n worldUpVector = [0,1,0]) \n #mAimGroup = md_handles['end'].doGroup(True, asMeta=True,typeModifier = 'aim')\n #...not doing this now...\n #SNAP.go(md_handles['end'].mNode, self.mNode, position=False)\n\n \"\"\"\n _const = mc.aimConstraint(self.mNode, md_handles['end'].mNode, maintainOffset = False,\n aimVector = [0,0,-1], upVector = [0,1,0], \n worldUpObject = mBaseOrientCurve.mNode,\n worldUpType = 'objectrotation', \n worldUpVector = [0,1,0])\"\"\"\n\n #cgmMeta.cgmNode(_const[0]).doConnectIn('worldUpVector','{0}.baseUp'.format(self.mNode))\n\n\n #AimStartHandle ============================================================================\n log.debug(\"|{0}| >> Aim main handles...\".format(_str_func)) \n mGroup = md_handles['start'].doGroup(True,True,asMeta=True,typeModifier = 'aim') \n _const = mc.aimConstraint(md_handles['end'].mNode, mGroup.mNode,\n maintainOffset = False,\n aimVector = [0,0,1],\n upVector = [0,1,0], \n worldUpObject = mRootUpHelper.mNode,\n worldUpType = 'objectrotation', \n worldUpVector = [0,1,0])\n\n\n\n #>>> Aim Main loft curves ================================================================== \n log.debug(\"|{0}| >> Aim main loft curves...\".format(_str_func)) \n\n\n #Aim the segment -------------------------------------------------------------------------\n \"\"\"\n if _formAim == 'toEnd':\n for i,mHandle in enumerate(ml_handles):\n if mHandle != ml_handles[0] and mHandle != ml_handles[-1]:\n #if i > 0 and i < len(ml_handles) - 1:\n mAimGroup = mHandle.doGroup(True,asMeta=True,typeModifier = 'aim')\n\n mc.aimConstraint(ml_handles[-1].mNode, mAimGroup.mNode, maintainOffset = True, #skip = 'z',\n aimVector = [0,0,1], upVector = [0,1,0], worldUpObject = mBaseOrientCurve.mNode,\n worldUpType = 'objectrotation', worldUpVector = [0,1,0])\n else:#chain\n for i,mHandle in enumerate(ml_handles):\n if mHandle != ml_handles[0] and mHandle != ml_handles[-1]:\n #if i > 0 and i < len(ml_handles) - 1:\n mAimGroup = mHandle.doGroup(True,asMeta=True,typeModifier = 'aim')\n\n mc.aimConstraint(ml_handles[i+1].mNode, mAimGroup.mNode,\n maintainOffset = True,\n aimVector = [0,0,1],\n upVector = [0,1,0],\n worldUpObject = mHandle.masterGroup.mNode,\n worldUpType = 'objectrotation', worldUpVector = [0,1,0])\"\"\"\n\n\n for i,mHandle in enumerate(ml_handles_chain):\n mLoft = mHandle.loftCurve\n _str_handle = mHandle.mNode\n\n mTransformedGroup = mLoft.getMessageAsMeta('transformedGroup')\n if not mTransformedGroup:\n mTransformedGroup = mLoft.doGroup(True,asMeta=True,typeModifier = 'transformed')\n \n mLoft.visibility = 1\n #mLoft.setAttrFlags(['translate'])\n\n for mShape in mLoft.getShapes(asMeta=True):\n mShape.overrideDisplayType = 0\n \n if _formAim == 'orientToHandle':\n mc.orientConstraint([mHandle.mNode],\n mTransformedGroup.mNode, maintainOffset = False)\n else:\n _worldUpType = 'objectrotation'\n _worldUpBack = 'objectrotation'\n \n \n _aimBack = None\n _aimForward = None\n _backUpObj = None\n \n if mHandle == ml_handles_chain[0]:\n _aimForward = ml_handles_chain[i+1].mNode\n elif mHandle == ml_handles_chain[-1]:\n if len(ml_handles_chain)>2:\n _aimBack = ml_handles_chain[-2].mNode#md_handles['start'].mNode#ml_handles_chain[].mNode\n else:\n _aimBack = md_handles['start'].mNode\n else:\n _aimForward = ml_handles_chain[i+1].mNode\n _aimBack = ml_handles_chain[i-1].mNode\n \n if _aimBack and md_handles.get('lever'):\n if _aimBack == md_handles.get('lever').mNode:\n _backUpObj = md_handles.get('lever').mNode\n \n if _aimForward and _aimBack is None:\n mc.aimConstraint(_aimForward, mTransformedGroup.mNode, maintainOffset = False,\n aimVector = [0,0,1], upVector = [0,1,0], \n worldUpObject = mBaseOrientCurve.mNode,\n worldUpType = _worldUpType, \n worldUpVector = [0,1,0])\n elif _aimBack and _aimForward is None:\n mc.aimConstraint(_aimBack, mTransformedGroup.mNode, maintainOffset = False,\n aimVector = [0,0,-1], upVector = [0,1,0], \n worldUpObject = mBaseOrientCurve.mNode,\n worldUpType = _worldUpBack, \n worldUpVector = [0,1,0])\n else:\n mAimForward = mLoft.doCreateAt()\n mAimForward.p_parent = mHandle.p_parent#mLoft\n mAimForward.doStore('cgmName',mHandle) \n mAimForward.doStore('cgmTypeModifier','forward')\n mAimForward.doStore('cgmType','aimer')\n mAimForward.doName()\n \n mAimBack = mLoft.doCreateAt()\n mAimBack.p_parent = mHandle.p_parent\n mAimBack.doStore('cgmName',mHandle) \n mAimBack.doStore('cgmTypeModifier','back')\n mAimBack.doStore('cgmType','aimer')\n mAimBack.doName()\n \n mc.aimConstraint(_aimForward, mAimForward.mNode, maintainOffset = False,\n aimVector = [0,0,1], upVector = [0,1,0], \n worldUpObject = mBaseOrientCurve.mNode,\n worldUpType = _worldUpType, \n worldUpVector = [0,1,0])\n \n if _backUpObj == None:\n _backUpObj = mBaseOrientCurve.mNode\n \n mc.aimConstraint(_aimBack, mAimBack.mNode, maintainOffset = False,\n aimVector = [0,0,-1], upVector = [0,1,0], \n worldUpObject = _backUpObj,\n worldUpType = _worldUpType, \n worldUpVector = [0,1,0]) \n \n const = mc.orientConstraint([mAimForward.mNode, mAimBack.mNode],\n mTransformedGroup.mNode, maintainOffset = False)[0]\n \n ATTR.set(const,'interpType',2)#.shortest...\n \n #...also aim our main handles...\n \n if mHandle not in [md_handles['end'],md_handles['start']]:\n log.debug(\"|{0}| >> {2} | Aiming Handle: {1}\".format(_str_func,mHandle,_formAim))\n _aimForward = ml_handles_chain[i+1].mNode\n \n mHandleAimGroup = mHandle.getMessageAsMeta('transformedGroup')\n if not mHandleAimGroup:\n mHandleAimGroup = mHandle.doGroup(True,asMeta=True,typeModifier = 'transformed')\n\n if _formAim == 'toEnd':\n mc.aimConstraint(md_handles['end'].mNode,\n mHandleAimGroup.mNode, maintainOffset = False,\n aimVector = [0,0,1], upVector = [0,1,0], \n worldUpObject = mBaseOrientCurve.mNode,\n worldUpType = 'objectrotation', \n worldUpVector = [0,1,0])\n elif _formAim == 'chain':\n mc.aimConstraint(_aimForward, mHandleAimGroup.mNode, maintainOffset = False,\n aimVector = [0,0,1], upVector = [0,1,0], \n worldUpObject = mBaseOrientCurve.mNode,\n worldUpType = 'objectrotation', \n worldUpVector = [0,1,0])\n\n \"\"\"\n if mHandle in [md_handles['start'],md_handles['end']]:\n _lock = []\n #if mHandle == md_handles['start']:\n # _lock.append('rotate')\n\n ##ATTR.set_alias(mHandle.mNode,'sy','handleScale') \n ##ATTR.set_standardFlags( mHandle.mNode, _lock)\n ##mHandle.doConnectOut('sy',['sx','sz'])\n #ATTR.set_standardFlags( mHandle.mNode, _lock)\n\n else:\n ATTR.set_standardFlags( mHandle.mNode, ['sz'])\n ATTR.connect('{0}.sy'.format(mHandle.mNode), '{0}.sz'.format(mHandle.mNode))\"\"\"\n\n\n ml_shapers = copy.copy(ml_handles_chain)\n #>>> shaper handles =======================================================================\n if _int_sub or l_numSubShapers:\n _numSubShapers = _int_sub\n ml_shapers = []\n log.debug(\"|{0}| >> Sub shaper handles: {1}\".format(_str_func,_numSubShapers))\n \n\n mOrientHelper = mBaseOrientCurve\n\n log.debug(\"|{0}| >> pairs...\".format(_str_func))\n\n\n ml_handlesToShaper = ml_handles_chain\n ml_shapers = [ml_handlesToShaper[0]]\n\n ml_pairs = LISTS.get_listPairs(ml_handlesToShaper)\n #pprint.pprint(ml_pairs)\n\n\n for i,mPair in enumerate(ml_pairs):\n log.debug(cgmGEN._str_subLine)\n ml_shapersTmp = []\n \n _numSubShapers = l_numSubShapers[i]\n\n _mStart = mPair[0]\n _mEnd = mPair[1]\n _end = _mEnd.mNode\n log.debug(\"|{0}| >> pairs: {1} | end: {2}\".format(_str_func,i,_end))\n\n _pos_start = _mStart.p_position\n _pos_end = _mEnd.p_position \n\n _leverLoftAimMode = False\n\n \n\n _vec = MATH.get_vector_of_two_points(_pos_start, _pos_end)\n _offsetDist = DIST.get_distance_between_points(_pos_start,_pos_end) / (_numSubShapers+1)\n _l_pos_seg = [ DIST.get_pos_by_vec_dist(_pos_start,\n _vec,\n (_offsetDist * ii)) for ii in range(_numSubShapers+1)] + [_pos_end]\n\n _mVectorAim = MATH.get_vector_of_two_points(_pos_start, _pos_end,asEuclid=True)\n #_mVectorUp = _mVectorAim.up()\n #_worldUpVector = [_mVectorUp.x,_mVectorUp.y,_mVectorUp.z] \n\n\n #Linear track curve ----------------------------------------------------------------------\n _linearCurve = mc.curve(d=1,p=[_pos_start,_pos_end])\n mLinearCurve = cgmMeta.validateObjArg(_linearCurve,'cgmObject')\n\n l_clusters = []\n for ii,cv in enumerate(mLinearCurve.getComponents('cv')):\n _res = mc.cluster(cv, n = 'seg_{0}_{1}_cluster'.format(mPair[0].p_nameBase,ii))\n mCluster = cgmMeta.asMeta(_res[1])\n mCluster.p_parent = mFormNull\n mCluster.v = 0\n mc.pointConstraint(mPair[ii].mNode,\n mCluster.mNode,maintainOffset=True)\n l_clusters.append(_res)\n\n mLinearCurve.parent = mNoTransformNull\n mLinearCurve.rename('seg_{0}_trackCrv'.format(i))\n\n\n\n #Tmp loft mesh -------------------------------------------------------------------\n _l_targets = [mObj.loftCurve.mNode for mObj in mPair]\n log.debug(_l_targets)\n _res_body = mc.loft(_l_targets, o = True, d = 3, po = 0 )\n _str_tmpMesh =_res_body[0]\n\n l_scales_seg = []\n\n #Sub handles... --------------------------------------------------------------------------\n for ii,p in enumerate(_l_pos_seg[1:-1]):\n #mHandle = mHandleFactory.buildBaseShape('circle', _size, shapeDirection = 'y+')\n mHandle = cgmMeta.cgmObject(name = 'subHandle_{0}_{1}'.format(i,ii))\n _short = mHandle.mNode\n ml_handles.append(mHandle)\n mHandle.p_position = p\n if _leverLoftAimMode:\n SNAP.aim_atPoint(_short,_l_pos_seg[ii+2],'z+', 'y+', mode='vector',\n vectorUp = _mVectorLeverUp)\n else:\n SNAP.aim_atPoint(_short,_l_pos_seg[ii+2],'z+', 'y+', mode='vector', vectorUp = _mVectorUp)\n\n #...Make our curve\n _d = RAYS.cast(_str_tmpMesh, _short, 'x+')\n #pprint.pprint(_d)\n log.debug(\"|{0}| >> Casting {1} ...\".format(_str_func,_short))\n cgmGEN.log_info_dict(_d)\n _v = _d['uvs'][_str_tmpMesh][0][0]\n log.debug(\"|{0}| >> v: {1} ...\".format(_str_func,_v))\n\n #>>For each v value, make a new curve ----------------------------------------------------------------- \n #duplicateCurve -ch 1 -rn 0 -local 0 \"loftedSurface2.u[0.724977270271534]\"\n _crv = mc.duplicateCurve(\"{0}.u[{1}]\".format(_str_tmpMesh,_v), ch = 0, rn = 0, local = 0)\n log.debug(\"|{0}| >> created: {1} ...\".format(_str_func,_crv)) \n\n CORERIG.shapeParent_in_place(_short, _crv, False)\n\n #self.copyAttrTo(_baseNameAttrs[1],mHandle.mNode,'cgmName',driven='target')\n self.copyAttrTo('cgmName',mHandle.mNode,'cgmName',driven='target')\n mHandle.doStore('cgmNameModifier','form_{0}_sub_{1}'.format(i,ii))\n mHandle.doStore('cgmType','shapeHandle')\n mHandle.doName()\n\n mHandle.p_parent = mFormNull\n\n mGroup = mHandle.doGroup(True,True,asMeta=True,typeModifier = 'master')\n mGroup.p_parent = mFormNull\n\n _vList = DIST.get_normalizedWeightsByDistance(mGroup.mNode,[mPair[0].mNode,mPair[1].mNode])\n\n\n if _leverLoftAimMode:\n upObj = md_handles['lever'].mNode\n else:\n upObj = mBaseOrientCurve.mNode\n\n\n\n \n BLOCKSHAPES.attachToCurve(mHandle, mLinearCurve, parentTo = mNoTransformNull, trackLink='masterGroup')\n \"\"\"\n _res_attach = RIGCONSTRAINT.attach_toShape(mGroup.mNode, \n mLinearCurve.mNode,\n 'conPoint')\n TRANS.parent_set(_res_attach[0], mNoTransformNull.mNode)\"\"\"\n # Has to be after the bind\n _scale = mc.scaleConstraint([mPair[0].mNode,mPair[1].mNode],mGroup.mNode,maintainOffset = False)#Point contraint loc to the object\n\n for c in [_scale]:\n CONSTRAINT.set_weightsByDistance(c[0],_vList)\n \n mc.aimConstraint([_end], mGroup.mNode, maintainOffset = False, #skip = 'z',\n aimVector = [0,0,1], upVector = [0,1,0],\n worldUpObject = upObj,\n worldUpType = 'objectrotation', worldUpVector = [0,1,0]) \n\n #Convert to loft curve setup ----------------------------------------------------\n mHandleFactory = self.asHandleFactory(mHandle.mNode)\n #mHandleFactory.rebuildAsLoftTarget('self', None, shapeDirection = 'z+')\n mHandle.doStore('loftCurve',mHandle)\n\n \n CORERIG.colorControl(mHandle.mNode,_side,'sub',transparent = True) \n #LOC.create(position = p)\n ml_shapers.append(mHandle)\n ml_shapersTmp.append(mHandle)\n\n\n ml_shapers.append(mPair[1])\n mc.delete(_res_body)\n\n _mStart.msgList_connect('subShapers',[mObj.mNode for mObj in ml_shapersTmp]) \n\n #Push scale back...\n #for mHandle in mPair:\n #mHandle.scale = l_scales_seg[i]\n\n #Form Loft Mesh -------------------------------------\n #mFormLoft = self.getMessage('formLoftMesh',asMeta=True)[0] \n #for s in mFormLoft.getShapes(asMeta=True):\n #s.overrideDisplayType = 1 \n\n\n #Aim the segment\n \"\"\"\n for ii,mHandle in enumerate(ml_shapersTmp):\n mAimGroup = mHandle.doGroup(True,asMeta=True,typeModifier = 'aim')\n log.debug(\"|{0}| >> seg constrain: {1} {2} | end: {3}\".format(_str_func,i,ii,_end))\n\n mc.aimConstraint([_end], mAimGroup.mNode, maintainOffset = True, #skip = 'z',\n aimVector = [0,0,1], upVector = [0,1,0],\n worldUpObject = mBaseOrientCurve.mNode,\n worldUpType = 'objectrotation', worldUpVector = [0,1,0])\"\"\" \n \n \n controller_wireHandles(self,ml_handles + ml_shapers,'form')\n controller_walkChain(self,ml_handles_chain,'form')\n \n \"\"\"\n ml_done = []\n if cgmGEN.__mayaVersion__ >= 2018:\n \n for mHandle in ml_handles + ml_shapers:\n if mHandle in ml_done:\n continue\n if not mHandle:\n continue\n mLoft = mHandle.getMessageAsMeta('loftCurve')\n if mLoft:\n mLoft = cgmMeta.controller_get(mLoft)\n mLoft.visibilityMode = 2\n ml_done.append(mLoft)\n mController = cgmMeta.controller_get(mHandle)\n mController.visibilityMode = 2 \n ml_done.append(mController)\n \n \n \n for mObj in ml_done:\n try:\n ATTR.connect(\"{0}.visProximityMode\".format(self.mNode),\n \"{0}.visibilityMode\".format(mObj.mNode)) \n except Exception,err:\n log.error(err)\n\n self.msgList_append('formStuff',mObj)\n \"\"\"\n return md_handles,ml_handles,ml_shapers,ml_handles_chain", "def OnAbout(self, event):\n messtr = u\"单细胞簇识别系统\\n\"\n messtr =messtr +u\"这是个识别单细胞簇的层次聚类系统,使用方法如下:\\n\" \n messtr =messtr +u\"1.将你的数据处理成三个文件expr.h5,features.txt,labels.txt,分别是用h5py.create_dataset创建的N细胞*M基因的细胞表达式矩阵和用np.savetxt函数保存的基因文件和标签文件(注意一行一个不能有空格)放在一个文件夹即可,可以参考prosess_sys.py\\n\"\n messtr =messtr +u\"2.点击选择文件按钮选择文件夹,此时右边会提示成功与否\\n\"\n messtr =messtr +u\"3.thresh表示过滤掉某个基因表达的细胞数少于百分比细胞数的,范围为0-1,为零时不过滤低表达基因\\n\" \n messtr =messtr +u\"z_cutoff是离散值,bins是分为几份,是整数,将基因按照在所有细胞表达均值分成bins份,然后去掉每一份zscore小于z_cutoff的基因\\n\"\n messtr =messtr +u\"4.可以选择不同的降维算法进行降维\\n\"\n messtr =messtr +u\"5.split_score和merge_score是聚类的两个超参数,一般后者是前者的一半,基于韦尔奇t检查的两个集群之间的距离度量如果大于这个split_score就分裂,小于merge_score就合并(采用的聚类方法是先分裂再合并的)\\n\"\n messtr =messtr +u\"6.ys是层次聚类分裂的结果,ym是分裂再凝聚后的结果,ySC3是SC3算法的结果,ySafe是SAFE算法的结果,yclf是一致性聚类的结果,yKmean是kmeans算法的结果\"\n wx.MessageBox(messtr,\n \"About System\",\n wx.OK|wx.ICON_INFORMATION)", "def get_description(self):\n d = {}\n d[\"type\"] = \"SymbolicIntegral0\"\n return d", "def print_summary(self, print_level = 0):\n\n print(\"==========================\")\n print(\"= FUNtoFEM model summary =\")\n print(\"==========================\")\n print(\"Model name:\", self.name)\n print(\"Number of bodies:\", len(self.bodies))\n print(\"Number of scenarios:\", len(self.scenarios))\n print(\" \")\n print(\"------------------\")\n print(\"| Bodies summary |\")\n print(\"------------------\")\n for body in self.bodies:\n print(\"Body:\", body.id, body.name)\n print(\" coupling group:\", body.group)\n print(\" transfer scheme:\", type(body.transfer))\n print(\" shape parameteration:\", type(body.shape))\n for vartype in body.variables:\n print(' variable type:', vartype)\n print(' number of ', vartype, ' variables:', len(body.variables[vartype]))\n if print_level >= 0:\n for var in body.variables[vartype]:\n print(' variable:', var.name, ', active?', var.active,', coupled?', var.coupled)\n print(' value and bounds:', var.value, var.lower, var.upper)\n\n print(\" \")\n print(\"--------------------\")\n print(\"| Scenario summary |\")\n print(\"--------------------\")\n for scenario in self.scenarios:\n print(\"scenario:\", scenario.id, scenario.name)\n print(\" coupling group:\", scenario.group)\n print(\" steps:\", scenario.steps)\n print(\" steady?:\", scenario.steady)\n for func in scenario.functions:\n print(' function:', func.name, ', analysis_type:', func.analysis_type)\n print(' adjoint?', func.adjoint)\n if not scenario.steady:\n print(' time range', func.start, ',', func.stop)\n print(' averaging', func.averaging)\n\n\n for vartype in scenario.variables:\n print(' variable type:', vartype)\n print(' number of ', vartype, ' variables:', len(scenario.variables[vartype]))\n if print_level >= 0:\n for var in scenario.variables[vartype]:\n print(' variable:', var.id, var.name, ', active?', var.active,', coupled?', var.coupled)\n print(' value and bounds:', var.value, var.lower, var.upper)", "def stoich_flue_gas(self, nw):\n lamb = 1\n n_fuel = 1\n m_fuel = 1 / molar_mass_flow(self.fuel.val) * n_fuel\n m_fuel_fg = m_fuel\n m_co2 = 0\n m_h2o = 0\n molar_masses[self.h2o] = CP.PropsSI('M', self.h2o)\n molar_masses[self.co2] = CP.PropsSI('M', self.co2)\n molar_masses[self.o2] = CP.PropsSI('M', self.o2)\n\n self.fg = {}\n self.fg[self.co2] = 0\n self.fg[self.h2o] = 0\n\n for f, x in self.fuel.val.items():\n fl = set(list(self.fuels())).intersection(\n set([a.replace(' ', '') for a in CP.get_aliases(f)]))\n\n if len(fl) == 0:\n if f in self.fg:\n self.fg[f] += x * m_fuel\n else:\n self.fg[f] = x * m_fuel\n else:\n n_fluid = x * m_fuel / molar_masses[f]\n m_fuel_fg -= n_fluid * molar_masses[f]\n structure = fluid_structure(f)\n n = {}\n for el in ['C', 'H', 'O']:\n if el in structure:\n n[el] = structure[el]\n else:\n n[el] = 0\n\n m_co2 += n_fluid * n['C'] * molar_masses[self.co2]\n m_h2o += n_fluid * n['H'] / 2 * molar_masses[self.h2o]\n\n self.fg[self.co2] += m_co2\n self.fg[self.h2o] += m_h2o\n\n n_o2 = (m_co2 / molar_masses[self.co2] +\n 0.5 * m_h2o / molar_masses[self.h2o]) * lamb\n m_air = n_o2 * molar_masses[self.o2] / self.air.val[self.o2]\n\n self.air_min = m_air / m_fuel\n\n for f, x in self.air.val.items():\n if f != self.o2:\n if f in self.fg:\n self.fg[f] += m_air * x\n else:\n self.fg[f] = m_air * x\n\n m_fg = m_fuel + m_air\n\n for f in self.fg.keys():\n self.fg[f] /= m_fg\n\n if not self.path.is_set:\n self.path.val = None\n\n TESPyFluid(\n self.fuel_alias.val, self.fuel.val, [1000, nw.p_range_SI[1]],\n path=self.path.val)\n TESPyFluid(\n self.fuel_alias.val + '_fg', self.fg, [1000, nw.p_range_SI[1]],\n path=self.path.val)\n msg = (\n 'Generated lookup table for ' + self.fuel_alias.val + ' and for '\n 'stoichiometric flue gas at component ' + self.label + '.')\n logging.debug(msg)\n\n if self.air_alias.val not in ['Air', 'air']:\n TESPyFluid(\n self.air_alias.val, self.air.val, [1000, nw.p_range_SI[1]],\n path=self.path.val)\n msg = ('Generated lookup table for ' + self.air_alias.val +\n ' at stoichiometric combustion chamber ' + self.label + '.')\n else:\n msg = ('Using CoolProp air at stoichiometric combustion chamber ' +\n self.label + '.')\n logging.debug(msg)", "def _featurize_one(self, system: ProteinSystem) -> universe:\n import MDAnalysis as mda\n\n from ..modeling.OEModeling import (\n get_expression_tags,\n delete_residue,\n select_chain,\n )\n from ..utils import LocalFileStorage\n\n logging.debug(\"Interpreting kinase of interest ...\")\n self._interpret_kinase(system.protein)\n\n # select structure\n if hasattr(system.protein, \"pdb_id\"):\n kinase_details = self._select_kinase_structure_by_pdb_id(\n system.protein.pdb_id,\n system.protein.klifs_kinase_id,\n system.protein.chain_id,\n system.protein.alternate_location\n )\n else:\n kinase_details = self._select_kinase_structure_by_klifs_kinase_id(\n system.protein.klifs_kinase_id,\n system.protein.dfg,\n system.protein.ac_helix\n )\n\n if not all([\n hasattr(system.protein, \"pdb_id\"),\n hasattr(system.protein, \"path\"),\n hasattr(system.protein, \"electron_density_path\")\n ]):\n logging.debug(f\"Adding attributes to BaseProtein ...\") # TODO: bad idea in a library\n system.protein.pdb_id = kinase_details[\"structure.pdb_id\"]\n system.protein.path = LocalFileStorage.rcsb_structure_pdb(\n kinase_details[\"structure.pdb_id\"], self.cache_dir\n )\n system.protein.electron_density_path = LocalFileStorage.rcsb_electron_density_mtz(\n kinase_details[\"structure.pdb_id\"], self.cache_dir\n )\n\n logging.debug(\"Interpreting system ...\")\n kinase_structure, electron_density = self._interpret_system(system)[1:]\n\n logging.debug(f\"Preparing kinase template structure of {kinase_details['structure.pdb_id']} ...\")\n try:\n design_unit = self._get_design_unit(\n kinase_structure,\n structure_identifier=kinase_details[\"structure.pdb_id\"],\n electron_density=electron_density,\n ligand_name=kinase_details[\"ligand.expo_id\"],\n chain_id=kinase_details[\"structure.chain\"],\n alternate_location=kinase_details[\"structure.alternate_model\"],\n )\n except ValueError:\n logging.debug(\n f\"Could not generate design unit for PDB entry \"\n f\"{kinase_details['structure.pdb_id']} with alternate location \"\n f\"{kinase_details['structure.alternate_model']} and chain ID \" \n f\"{kinase_details['structure.chain']}. Returning empty universe ...\"\n )\n return mda.Universe.empty(0)\n\n logging.debug(\"Extracting kinase and solvent from design unit ...\")\n prepared_kinase, prepared_solvent = self._get_components(design_unit)[:-1]\n\n logging.debug(\"Selecting chain of solvent ...\")\n try:\n prepared_solvent = select_chain(prepared_solvent, kinase_details[\"structure.chain\"])\n except ValueError:\n logging.debug(f\"No solvent in chain {kinase_details['structure.chain']}...\")\n pass\n\n logging.debug(\"Deleting expression tags ...\")\n expression_tags = get_expression_tags(kinase_structure)\n for expression_tag in expression_tags:\n try:\n prepared_kinase = delete_residue(\n prepared_kinase,\n chain_id=expression_tag[\"chain_id\"],\n residue_name=expression_tag[\"residue_name\"],\n residue_id=expression_tag[\"residue_id\"]\n )\n except ValueError:\n pass # wrong chain or not resolved\n\n logging.debug(\"Processing kinase domain ...\")\n processed_kinase_domain = self._process_kinase_domain(\n prepared_kinase,\n system.protein.sequence,\n kinase_details[\"structure.chain\"]\n )\n\n logging.debug(\"Assembling components ...\")\n solvated_kinase = self._assemble_components(processed_kinase_domain, prepared_solvent, None)\n\n logging.debug(\"Updating pdb header ...\")\n solvated_kinase = self._update_pdb_header(\n solvated_kinase,\n kinase_details[\"kinase.klifs_name\"],\n None,\n [(\"COMPND\", f\"\\tKinase template: {kinase_details['structure.pdb_id']}\")]\n )\n\n logging.debug(\"Writing results ...\")\n file_path = self._write_results(\n solvated_kinase,\n \"_\".join([\n f\"{kinase_details['kinase.klifs_name']}\",\n f\"{kinase_details['structure.pdb_id']}\",\n f\"chain{kinase_details['structure.chain']}\",\n f\"altloc{kinase_details['structure.alternate_model']}\"\n ]),\n None\n )\n\n logging.debug(\"Generating new MDAnalysis universe ...\")\n structure = mda.Universe(file_path, in_memory=True)\n\n if not self.output_dir:\n logging.debug(\"Removing structure file ...\")\n file_path.unlink()\n\n return structure", "def post_process_trap(): \n #################### 0) assign internal values #################### \n from project_parameters import trapType,debug,trapFile,name,driveAmplitude,driveFrequency,Omega,dcplot,weightElectrodes,coefs,ax,az,phi,save,scale\n #from all_functions import find_saddle,plot_potential,dc_potential,set_voltages,exact_saddle,spher_harm_bas,spher_harm_exp,pfit,plotN\n import pickle\n\n with open(trapFile,'rb') as f:\n trap = pickle.load(f)\n\n qe = trap.configuration.charge\n mass = trap.configuration.mass\n Zval = trap.configuration.position\n r0 = trap.configuration.r0\n RFampl = driveAmplitude \n V0 = mass*(2*np.pi*Omega)**2*(r0*10**-3)**2/qe\n X,Y,Z=trap.instance.X,trap.instance.Y,trap.instance.Z \n data = trap.configuration\n dcVoltages = set_voltages()\n ne = len(weightElectrodes)\n E = trap.instance.E\n out = trap.configuration\n if debug.post_process_trap:\n print dcVoltages,np.max(dcVoltages)#np.sum(abs(dcVoltages))\n plotN(dcVoltages,trap,'set DC voltages') \n Vdc = dc_potential(trap,dcVoltages,E)\n #[IDC,JDC,KDC] = find_saddle(Vdc,X,Y,Z,3,Zval) \n #[XDC,YDC,ZDC] = exact_saddle(Vdc,X,Y,Z,3,Zval)\n #XDC,YDC,ZDC = X[IDC],150/scale,Z[KDC]\n #print XDC,YDC,ZDC,IDC,JDC,KDC\n #dcbasis,dcscale= spher_harm_bas(XDC,YDC,ZDC,X,Y,Z,4)\n #QQ = spher_harm_exp(Vdc,dcbasis,dcscale) \n #print QQ[0:9].T\n #1) RF Analysis\n print('RF Analysis') \n Vrf = RFampl*data.EL_RF\n [Irf,Jrf,Krf] = find_saddle(Vrf,X,Y,Z,2,Zval)\n if debug.post_process_trap:\n plot_potential(Vrf,X,Y,Z,dcplot,'weighted RF potential','V_{rf} (eV)',[Irf,Jrf,Krf])\n #2) DC Analysis\n print('DC Analysis')\n trap = dc_potential(trap,dcVoltages,E,update=None)\n Vdc = trap.instance.DC\n [Idc,Jdc,Kdc] = find_saddle(Vdc,X,Y,Z,3,Zval) # only used to calculate error at end\n if debug.post_process_trap:\n plot_potential(Vdc,X,Y,Z,'1D plots','full DC potential')\n #3) determine the exact saddles of the RF and DC\n trap = dc_potential(trap,dcVoltages,E)\n Vdc = trap.instance.DC\n print('Determining exact RF saddle...')\n [Xrf,Yrf,Zrf] = exact_saddle(Vrf,X,Y,Z,2,Zval) \n print('Determining exact DC saddle...')\n [Xdc,Ydc,Zdc] = exact_saddle(Vdc,X,Y,Z,3,Zval)\n #4) determine stray field (beginning of justAnalyzeTrap)\n print('Determining compensation due to E field...')\n nx,ny,nz=X.shape[0],Y.shape[0],Z.shape[0]\n x,y,z = np.zeros((nx,ny,nz)),np.zeros((nx,ny,nz)),np.zeros((nx,ny,nz))\n for i in range(nx):\n for j in range(ny):\n for k in range(nz):\n x[i,j,k] = X[i]\n y[i,j,k] = Y[j]\n z[i,j,k] = Z[k]\n VlessE = Vdc-E[0]*x-E[1]*y-E[2]*z\n [Xdc,Ydc,Zdc] = exact_saddle(VlessE,X,Y,Z,3) \n dist = np.sqrt((Xrf-Xdc)**2+(Yrf-Ydc)**2+(Zrf-Zdc)**2) \n #5) call pfit to built teh total field and determine the trap characteristics\n [fx,fy,fz,theta,Depth,Xe,Ye,Ze] = pfit(Vrf,Vdc,X,Y,Z,Irf,Jrf,Krf)#pfit(trap,E,Freq,RFampl)\n print('Stray field is ({0},{1},{2}) V/m.'.format(scale*E[0],scale*E[1],scale*E[2]))\n print('With this field, the compensation is optimized to {} micron.'.format(scale*dist))\n print('RF saddle: ({0},{1},{2})\\nDC saddle ({3},{4},{5}).'.format(Xrf,Yrf,Zrf,Xdc,Ydc,Zdc)) \n if debug.trap_depth:\n print('The trap escape position is at ({0},{1},{2}) microns, for a trap depth of {3} mV'.format(Xe*scale,Ye*scale,Ze*scale,Depth*scale))\n print('The trap frequencies are fx = {0} MHz, fy = {1} MHz, and fz = {2} MHz'.format(fx*10**-6,fy*10**-6,fz*10**-6))\n #6) Sanity testing; quality check no longer used\n if debug.post_process_trap:\n rfbasis,rfscale= spher_harm_bas(Xrf,Yrf,Zrf,X,Y,Z,2)\n Qrf = spher_harm_exp(Vrf,rfbasis,rfscale) \n if np.sqrt((Xrf-Xdc)**2+(Yrf-Ydc)**2+(Zrf-Zdc)**2)>0.008: \n print('Expanding DC with RF for saniy checking.')\n Qdc = spher_harm_exp(Vdc,rfbasis,rfscale) \n else:\n print('Expanding DC without RF for sanity checking.')\n dcbasis,dcscale= spher_harm_bas(Xdc,Ydc,Zdc,X,Y,Z,2)\n Qdc = spher_harm_exp(Vdc,dcbasis,dcscale) \n Arf = 2*np.sqrt( (3*Qrf[7])**2+(3*Qrf[8])**2 )\n Thetarf = 45*(Qrf[8]/abs(Qrf[8]))-90*np.arctan((3*Qrf[7])/(3*Qrf[8]))/np.pi\n Adc = 2*np.sqrt( (3*Qdc[7])**2+(3*Qdc[8])**2 )\n Thetadc = 45*(Qrf[8]/abs(Qrf[8]))-90*np.arctan((3*Qdc[7])/(3*Qdc[8]))/np.pi\n out.E = E\n out.miscompensation = dist\n out.ionpos = [Xrf,Yrf,Zdc]\n out.ionposIndex = [Irf,Jrf,Krf]\n out.frequency = [fx,fy,fz]\n out.theta = theta\n out.trap_depth = Depth/qe \n out.escapepos = [Xe,Ye,Ze]\n out.Quadrf = 2*np.array([Qrf[7]*3,Qrf[4]/2,Qrf[8]*6,-Qrf[6]*3,-Qrf[5]*3])\n out.Quaddc = 2*np.array([Qdc[7]*3,Qdc[4]/2,Qdc[8]*6,-Qdc[6]*3,-Qdc[5]*3])\n out.Arf = Arf\n out.Thetarf = Thetarf\n out.Adc = Adc\n out.Thetadc = Thetadc\n T = np.array([[2,-2,0,0,0],[-2,-2,0,0,0],[0,4,0,0,0],[0,0,1,0,0],[0,0,0,1,0],[0, 0,0,0,1]])\n Qdrf = out.Quadrf.T\n Qddc = out.Quaddc.T\n out.q = (1/V0)*T*Qdrf\n out.alpha = (2/V0)*T*Qddc\n out.Error = [X[Idc]-Xdc,Y[Jdc]-Ydc,Z[Kdc]-Zdc]\n #7) update the trapping field data structure with instance attributes\n trap.configuration=out\n trap.instance.driveAmplitude = driveAmplitude\n trap.instance.driveFrequency = driveFrequency\n trap.instance.coefs = coefs\n trap.instance.ax = ax\n trap.instance.az = az\n trap.instance.phi = phi\n trap.instance.ppt = True\n trap.instance.out = out\n if save==True:\n print('Saving '+trapFile+' as a data structure...')\n with open(trapFile,'wb') as f:\n pickle.dump(trap,f)\n return 'post_proccess_trap complete' #out # no output needed really", "def setup_fpa():\n # it is a silicon detector. Based on the graph, the quantum efficiency\n # at 1.06 um is ~50%.\n fpa = {}\n fpa[\"quantum_efficiency\"] = 0.5\n return fpa", "def mfrinfo(index):\n supported_psu = list(range(1, _wrapper_get_num_psus() + 1))\n psu_ids = []\n if (index < 0):\n psu_ids = supported_psu\n else:\n psu_ids = [index]\n\n for psu in psu_ids:\n psu_name = _wrapper_get_psu_name(psu)\n if psu not in supported_psu:\n click.echo(\"Error! The {} is not available on the platform.\\n\" \\\n \"Number of supported PSU - {}.\".format(psu_name, len(supported_psu)))\n continue\n status = _wrapper_get_psu_status(psu)\n if not status:\n click.echo(\"{} is Not OK\\n\".format(psu_name))\n continue\n\n model_name = _wrapper_get_psu_model(psu)\n mfr_id = _wrapper_get_psu_mfr_id(psu)\n serial_num = _wrapper_get_psu_serial(psu)\n airflow_dir = _wrapper_get_psu_direction(psu)\n \n click.echo(\"{} is OK\\nManufacture Id: {}\\n\" \\\n \"Model: {}\\nSerial Number: {}\\n\" \\\n \"Fan Direction: {}\\n\".format(psu_name, mfr_id, model_name, serial_num, airflow_dir.capitalize()))", "def __init__(self, fluid_domain, structure, io_data):\n super().__init__(fluid_domain, structure, io_data)\n\n self.intersector = Intersector(self.fluid_domain, self.structure)\n #self.ghost_nodes = np.empty(shape=[self.fluid_domain.nverts, 8],dtype=float)\n self.ghost_nodes = np.empty(shape=[self.fluid_domain.nverts, 3], dtype=float)", "def _corresponding_simu(self):\n return SimuHawkes()", "def isFim(self):\r\n return", "def DetermineProcessingSystem(self, problemManager, mineDataManager):\n\n self.CalculateProcessingCapacity(problemManager, mineDataManager)\n \n #Todo(\"determine processing method based on amount and type of ore mined\")\n referenceMetalStr = mineDataManager.theOreBody.type[:2] \n # first two letters of orebody type is assumed to be reference metal for determining processing grade\n # eg AuCu -> gold is reference metal, \n self.processingMethod = referenceMetalStr \n \n #processing loss is fixed\n \n if(referenceMetalStr == \"Au\"):\n self.refiningTake = 0.01\n elif(referenceMetalStr == \"Cu\"):\n self.refiningTake = 0.10\n elif(referenceMetalStr == \"Ni\"):\n self.refiningTake = 0.30\n elif(referenceMetalStr == \"Ag\"):\n self.refiningTake = 0.05\n elif(referenceMetalStr == \"Pb\"):\n self.refiningTake = 0.17\n \n \n self.CalculateProcessingCapex(problemManager, mineDataManager)\n self.CalculateProcessingOpex(problemManager, mineDataManager)\n \n return self", "def displayHNF(self):\n self.heuPlotOverUncertainty()\n self.printHNFTable()", "def stoichiometry(self, fluid):\n air = self.air_alias.val\n fuel = self.fuel_alias.val\n flue_gas = self.fuel_alias.val + '_fg'\n\n ###################################################################\n # calculate fuel and air mass flow\n m_fuel = 0\n for i in self.inl:\n m_fuel += i.m.val_SI * i.fluid.val[fuel]\n\n m_air = 0\n for i in self.inl:\n m_air += i.m.val_SI * i.fluid.val[air]\n\n m_air_min = self.air_min * m_fuel\n\n ###################################################################\n # calculate lambda if not specified\n if not self.lamb.is_set:\n self.lamb.val = m_air / (self.air_min * m_fuel)\n\n ###################################################################\n # calculate excess fuel if lambda is smaller than 1\n m_fuel_exc = 0\n if self.lamb.val < 1:\n m_fuel_exc = m_fuel - m_air / (self.lamb.val * self.air_min)\n\n ###################################################################\n # equation for air\n if fluid == air:\n if self.lamb.val >= 1:\n dm = -m_air_min\n else:\n dm = -m_air\n\n ###################################################################\n # equation for fuel\n elif fluid == fuel:\n dm = -(m_fuel - m_fuel_exc)\n\n ###################################################################\n # equation for flue gas\n elif fluid == flue_gas:\n dm = m_air_min + m_fuel\n\n ###################################################################\n # equation for other components\n else:\n dm = 0\n\n res = dm\n for i in self.inl:\n res += i.fluid.val[fluid] * i.m.val_SI\n for o in self.outl:\n res -= o.fluid.val[fluid] * o.m.val_SI\n return res", "def main(idrun):\n int_type = numpy.int32\n double_type = numpy.float64\n float_type = numpy.float32\n complex_type = numpy.complex64\n\n ns = 7\n iudm = 19; iuv = 12\n dname = numpy.array([\"LONGITUDINAL EFIELD \",\"ELEC CURRENT DENSITY\",\n \"VECTOR POTENTIAL \",\"TRANSVERSE EFIELD \",\n \"MAGNETIC FIELD \",\"RADIATIVE VPOTENTIAL\",\n \"ION CURRENT DENSITY \"],dtype=str)\n\n# create string from idrun\n if (idrun < 0):\n cdrun = \"Unknown\"\n while (cdrun.isdigit() == False):\n cdrun = input(\"enter integer idrun: \")\n idrun = int(cdrun)\n cdrun = str(idrun)\n fname = \"diag3.\" + cdrun\n cmfield3.ffopen3(iudm,fname)\n\n# nscalars = table of available diagnostics\n nscalars = numpy.zeros((ns),int_type,'F')\n\n# determine which vector diagnostics are available\n cmfield3.readvdiags3(iudm,nscalars)\n\n nts = numpy.zeros((1),int_type,'F')\n modesx = numpy.zeros((1),int_type,'F')\n modesy = numpy.zeros((1),int_type,'F')\n modesz = numpy.zeros((1),int_type,'F')\n mrec = numpy.zeros((1),int_type,'F')\n fname = numpy.array([\"\"],'S32')\n\n# select diagnostic\n m = numpy.sum(nscalars)\n while True:\n if (m > 0):\n n = -1\n while True:\n if (n < 0):\n for i in range(0,ns):\n if (nscalars[i]==1):\n print (\"enter \", i+1,\" for\", \n numpy.str.rstrip(dname[i]))\n print (\"enter \", 0,\" for EXIT\")\n c = input(\"\")\n if (c.isdigit()):\n n = int(c)\n if (n==0):\n break\n if ((n >= 1) and (n <= ns)):\n if (nscalars[n-1]==0):\n n = -1\n else:\n n = -1\n if (n > 0):\n break\n print (\"invalid entry, try again or enter 0 to quit\")\n else:\n print (\"no vector diagnostic files found\")\n n = 0\n# exit procedure\n if (n==0):\n if (\"vfield\" in globals()):\n vfield = None\n cmfield3.closeff3(iudm)\n return\n\n print (numpy.str.rstrip(dname[n-1]), \" diagnostic selected\")\n\n# return parameters for selected vector diagnostic:\n# nts, modesx, modesy, modesz, nrec, fname\n cmfield3.vdiagparams3(iudm,n,nts,modesx,modesy,modesz,mrec,fname)\n nrec = mrec[0]\n\n# nx/ny/nz = number of global grid points in x/y/z direction\n nx = int(math.pow(2,in3.indx)); ny = int(math.pow(2,in3.indy))\n nz = int(math.pow(2,in3.indz))\n# kyp/kzp = number of real grids in each field partition in y/z\n kyp = int((ny - 1)/in3.nvpy) + 1; kzp = int((nz - 1)/in3.nvpz) + 1\n# kyb/kzb = minimum number of processors in distributed array in y/z\n kyb = int((ny - 1)/kyp) + 1; kzb = int((nz - 1)/kzp) + 1\n# nyv = second dimension of scalar field array, >= ny\n# nzv = third dimension of scalar field array, >= nz\n nyv = kyp*kyb; nzv = kzp*kzb\n\n# allocate vector array\n if (\"vfield\" not in globals()):\n vfield = numpy.empty((in3.ndim,nx,nyv,nzv),float_type,'F')\n dt = in3.dt*float(nts[0])\n\n# open stream file for vector field\n cmfield3.fsopen3(iuv,fname)\n\n# nrec = number of complete records\n nrec = int(nrec/(kyb*kzb))\n print (\"records found: nrec = \", nrec)\n\n# read and transpose vector data\n for ii in range(0,nrec):\n# read real vector field\n cmfield3.freadv3(iuv,vfield,in3.ndim,nx,kyp,kyb,kzp,kzb)\n it = nts[0]*ii\n time = dt*float(ii)\n# show time\n print (\"it,time=\",it,time)\n cmfield3.closeff3(iuv)\n print()", "def create_flux_vector_pf_gr_bif_1(self):\n # volumes_in_primal_set = self.mb.tag_get_data(self.volumes_in_primal_tag, 0, flat=True)[0]\n # volumes_in_primal_set = self.mb.get_entities_by_handle(volumes_in_primal_set)\n lim = 1e-4\n self.dfdsmax = 0\n self.fimin = 10\n self.qmax = 0\n self.store_velocity_pf = {}\n store_flux_pf = {}\n for primal in self.primals:\n #1\n primal_id1 = self.mb.tag_get_data(self.primal_id_tag, primal, flat=True)[0]\n primal_id = self.ident_primal[primal_id1]\n fine_elems_in_primal = self.mb.get_entities_by_handle(primal)\n for volume in fine_elems_in_primal:\n #2\n list_keq = []\n list_p = []\n list_gid = []\n list_keq3 = []\n list_gidsadj = []\n list_qw = []\n qw3 = []\n qw = 0\n flux = {}\n velocity = {}\n fi = self.mb.tag_get_data(self.fi_tag, volume, flat=True)[0]\n if fi < self.fimin:\n self.fimin = fi\n kvol = self.mb.tag_get_data(self.perm_tag, volume).reshape([3, 3])\n lamb_w_vol = self.mb.tag_get_data(self.lamb_w_tag, volume, flat=True)[0]\n lamb_o_vol = self.mb.tag_get_data(self.lamb_o_tag, volume, flat=True)[0]\n lbt_vol = lamb_w_vol + lamb_o_vol\n fw_vol = self.mb.tag_get_data(self.fw_tag, volume, flat=True)[0]\n sat_vol = self.mb.tag_get_data(self.sat_tag, volume, flat=True)[0]\n centroid_volume = self.mesh_topo_util.get_average_position([volume])\n z_vol = self.tz - centroid_volume[2]\n adjs_vol = self.mesh_topo_util.get_bridge_adjacencies(volume, 2, 3)\n gid_vol = self.mb.tag_get_data(self.global_id_tag, volume, flat=True)[0]\n pvol = self.mb.tag_get_data(self.pf_tag, volume, flat=True)[0]\n for adj in adjs_vol:\n #3\n gid_adj = self.mb.tag_get_data(self.global_id_tag, adj, flat=True)[0]\n sat_adj = self.mb.tag_get_data(self.sat_tag, adj, flat=True)[0]\n padj = self.mb.tag_get_data(self.pf_tag, adj, flat=True)[0]\n kadj = self.mb.tag_get_data(self.perm_tag, adj).reshape([3, 3])\n centroid_adj = self.mesh_topo_util.get_average_position([adj])\n z_adj = self.tz - centroid_adj[2]\n direction = centroid_adj - centroid_volume\n unit = direction/np.linalg.norm(direction)\n #unit = vetor unitario na direcao de direction\n uni = self.unitary(direction)\n # uni = valor positivo do vetor unitario\n kvol = np.dot(np.dot(kvol,uni),uni)\n kadj = np.dot(np.dot(kadj,uni),uni)\n lamb_w_adj = self.mb.tag_get_data(self.lamb_w_tag, adj, flat=True)[0]\n lamb_o_adj = self.mb.tag_get_data(self.lamb_o_tag, adj, flat=True)[0]\n lbt_adj = lamb_w_adj + lamb_o_adj\n fw_adj = self.mb.tag_get_data(self.fw_tag, adj, flat=True)[0]\n\n keq3 = (kvol*lamb_w_vol + kadj*lamb_w_adj)/2.0\n\n # kvol = kvol*(lamb_w_vol + lamb_o_vol)\n # kadj = kadj*(lamb_w_adj + lamb_o_adj)\n\n keq = self.kequiv(kvol, kadj)*((lbt_adj + lbt_vol)/2.0)\n grad_p = (padj - pvol)/float(abs(np.dot(direction, uni)))\n grad_z = (z_adj - z_vol)/float(abs(np.dot(direction, uni)))\n q = ((grad_p) - grad_z*self.gama)*(np.dot(self.A, uni))*keq\n\n list_keq.append(keq)\n list_p.append(padj)\n list_gid.append(gid_adj)\n\n keq2 = keq\n\n qw += q*(fw_adj + fw_vol)/2.0\n\n #keq = keq*(np.dot(self.A, uni))\n #pvol2 = self.mb.tag_get_data(self.pms_tag, volume, flat=True)[0]\n #padj2 = self.mb.tag_get_data(self.pms_tag, adj, flat=True)[0]\n\n #grad_p2 = (padj2 - pvol2)/float(abs(np.dot(direction, uni)))\n #q = (grad_p)*keq\n #qw3.append(grad_p*keq3*(np.dot(self.A, uni)))\n # if grad_p < 0:\n # #4\n # fw = fw_vol\n # qw += (fw*grad_p*kvol*(np.dot(self.A, uni)))\n # list_qw.append(fw*grad_p*kvol*(np.dot(self.A, uni)))\n #\n # else:\n # fw = fw_adj\n # qw += (fw*grad_p*kadj*(np.dot(self.A, uni)))\n # list_qw.append(fw*grad_p*kadj*(np.dot(self.A, uni)))\n\n\n # if gid_adj > gid_vol:\n # v = -(grad_p)*keq2\n # else:\n # v = (grad_p)*keq2\n\n flux[tuple(unit)] = q\n #velocity[tuple(unit)] = v\n kvol = self.mb.tag_get_data(self.perm_tag, volume).reshape([3, 3])\n if abs(sat_adj - sat_vol) < lim or abs(fw_adj -fw_vol) < lim:\n continue\n dfds = abs((fw_adj - fw_vol)/(sat_adj - sat_vol))\n # print('aqui')\n # print(gid_vol)\n # print(gid_adj)\n # print(fw_adj - fw_vol)\n # print(sat_adj - sat_vol)\n # print(dfds)\n if dfds > self.dfdsmax:\n self.dfdsmax = dfds\n\n #2\n # list_keq.append(-sum(list_keq))\n # list_p.append(pvol)\n # list_gid.append(gid_vol)\n #\n # list_keq = np.array(list_keq)\n # list_p = np.array(list_p)\n # resultado = sum(list_keq*list_p)\n\n # print(gid_vol)\n # print(velocity)\n # print('\\n')\n # import pdb; pdb.set_trace()\n #self.store_velocity_pf[volume] = velocity\n store_flux_pf[volume] = flux\n flt = sum(flux.values())\n print('gid')\n print(gid_vol)\n print('flux')\n print(flt)\n print('\\n')\n import pdb; pdb.set_trace()\n self.mb.tag_set_data(self.flux_fine_pf_tag, volume, flt)\n\n if abs(sum(flux.values())) > lim and volume not in self.wells:\n print('nao esta dando conservativo na malha fina')\n print(gid_vol)\n print(sum(flux.values()))\n import pdb; pdb.set_trace()\n\n qmax = max(list(map(abs, flux.values())))\n if qmax > self.qmax:\n self.qmax = qmax\n if volume in self.wells_prod:\n qw_out = sum(flux.values())*fw_vol\n #qw3.append(-qw_out)\n qo_out = sum(flux.values())*(1 - fw_vol)\n self.prod_o.append(qo_out)\n self.prod_w.append(qw_out)\n qw = qw - qw_out\n\n if abs(qw) < lim and qw < 0.0:\n qw = 0.0\n\n elif qw < 0 and volume not in self.wells_inj:\n print('gid')\n print(gid_vol)\n print('qw < 0')\n print(qw)\n import pdb; pdb.set_trace()\n\n else:\n pass\n\n\n # if (qw < 0.0 or sum(qw3) < 0.0) and volume not in self.wells_inj:\n # print('qw3')\n # print(sum(qw3))\n # print('qw')\n # print(qw)\n # import pdb; pdb.set_trace()\n self.mb.tag_set_data(self.flux_w_tag, volume, qw)\n\n # print(self.dfdsmax)\n # print(sum(flux.values()))\n # print(sum(qw))\n # print(sum(qw3))\n # print('\\n')\n\n soma_inj = []\n soma_prod = []\n soma2 = 0\n with open('fluxo_malha_fina_bif_gr{0}.txt'.format(self.loop), 'w') as arq:\n for volume in self.wells:\n gid = self.mb.tag_get_data(self.global_id_tag, volume, flat = True)[0]\n values = self.store_flux_pf[volume].values()\n arq.write('gid:{0} , fluxo:{1}\\n'.format(gid, sum(values)))\n\n # print('gid:{0}'.format(gid))\n # print('valor:{0}'.format(sum(values)))\n if volume in self.wells_inj:\n soma_inj.append(sum(values))\n else:\n soma_prod.append(sum(values))\n # print('\\n')\n soma2 += sum(values)\n arq.write('\\n')\n arq.write('soma_inj:{0}\\n'.format(sum(soma_inj)))\n arq.write('soma_prod:{0}\\n'.format(sum(soma_prod)))\n arq.write('tempo:{0}'.format(self.tempo))\n\n return store_flux_pf", "def __init__(__self__, *,\n desktop_type: pulumi.Input[str],\n image_id: pulumi.Input[str],\n root_disk_size_gib: pulumi.Input[int],\n user_disk_size_gibs: pulumi.Input[Sequence[pulumi.Input[int]]],\n bundle_name: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n language: Optional[pulumi.Input[str]] = None,\n root_disk_performance_level: Optional[pulumi.Input[str]] = None,\n user_disk_performance_level: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"desktop_type\", desktop_type)\n pulumi.set(__self__, \"image_id\", image_id)\n pulumi.set(__self__, \"root_disk_size_gib\", root_disk_size_gib)\n pulumi.set(__self__, \"user_disk_size_gibs\", user_disk_size_gibs)\n if bundle_name is not None:\n pulumi.set(__self__, \"bundle_name\", bundle_name)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if language is not None:\n pulumi.set(__self__, \"language\", language)\n if root_disk_performance_level is not None:\n pulumi.set(__self__, \"root_disk_performance_level\", root_disk_performance_level)\n if user_disk_performance_level is not None:\n pulumi.set(__self__, \"user_disk_performance_level\", user_disk_performance_level)", "def initialise_fluids(self):\n air = self.air_alias.val\n flue_gas = self.fuel_alias.val + '_fg'\n\n for c in self.outl:\n if not c.fluid.val_set[air]:\n c.fluid.val[air] = 0.8\n if not c.fluid.val_set[flue_gas]:\n c.fluid.val[flue_gas] = 0.2\n c.target.propagate_fluid_to_target(c, c.target)", "def define(self):\n self.E1.v_str = f'{self._E1.name} + (1 - {self.name}_zE1)'\n self.E2.v_str = f'{self._E2.name} + 2*(1 - {self.name}_zE2)'\n\n self.SE1.v_str = f'{self._SE1.name} + (1 - {self.name}_zSE1)'\n self.SE2.v_str = f'{self._SE2.name} + 2*(1 - {self.name}_zSE2)'\n\n self.A.v_str = f'{self.name}_zE1*{self.name}_zE2 * ' \\\n f'{self.name}_E1*{self.name}_SE1*' \\\n f'exp({self.name}_E1*log({self.name}_E2*{self.name}_SE2/' \\\n f'({self.name}_E1*{self.name}_SE1))/({self.name}_E1-{self.name}_E2))'\n\n self.B.v_str = f'-log({self.name}_E2*{self.name}_SE2/({self.name}_E1*{self.name}_SE1))/' \\\n f'({self.name}_E1 - {self.name}_E2)'" ]
[ "0.55242515", "0.5524179", "0.5290922", "0.52189064", "0.51642203", "0.5117331", "0.5084165", "0.50247896", "0.49878523", "0.49760827", "0.49751097", "0.49734625", "0.49661058", "0.4918144", "0.49163127", "0.49124214", "0.4886242", "0.4886242", "0.48820427", "0.48705277", "0.48552313", "0.48475525", "0.48284176", "0.48101288", "0.48031855", "0.4790572", "0.47900525", "0.47822973", "0.4772492", "0.47518587", "0.4749005", "0.47428617", "0.47412667", "0.47405538", "0.47289157", "0.4728022", "0.47265294", "0.47258925", "0.4718521", "0.47112894", "0.46979567", "0.4692562", "0.46903113", "0.46893245", "0.46850747", "0.46842417", "0.4684124", "0.4669672", "0.46652943", "0.46651447", "0.4664147", "0.46630824", "0.46557915", "0.4651086", "0.46505183", "0.46444583", "0.46409887", "0.463422", "0.46307564", "0.4630706", "0.4624819", "0.46234238", "0.46223325", "0.46167585", "0.4611907", "0.46116766", "0.46107268", "0.46098202", "0.4603217", "0.45948428", "0.45931563", "0.4593035", "0.45860997", "0.45816258", "0.4581322", "0.45778105", "0.45758197", "0.45758197", "0.45757857", "0.45734423", "0.45701796", "0.45657563", "0.45635262", "0.45610043", "0.4559966", "0.45593995", "0.45582795", "0.45574158", "0.4556891", "0.45562863", "0.45462507", "0.45461252", "0.4537728", "0.45325536", "0.45305902", "0.45271426", "0.45256793", "0.4522539", "0.45218694", "0.45163712", "0.45152646" ]
0.0
-1
describes each segment of tubing
def __init__(self, inner_diameter=None, length=None, material=None): self.inner_diameter = inner_diameter self.length = length self.material = material
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def segment(data):", "def info(self):\n c = 0\n for s in self.segments:\n c+= len(s.points)\n return \"Nodes : %5i\\nSegments : %5i\\nPoints : %5i\" % (len(self.nodes), len(self.segments), c)", "def test_getting_segments(self):\n pass", "def _get_seg_repr(self, set_speakers=True):\n result = str(self.get_seg_header())\n for seg in self._segments:\n line = seg.get_line()\n if set_speakers:\n line[-1] = self._speaker\n else:\n line[-1] = self._label\n result += \"%s %s %s %s %s %s %s %s\\n\" % tuple(line)\n return result", "def segment_heads(classes, data):\n\n segmented_data =[]\n\n # gather and organize needed data\n output_dir = PROJECT_ROOT + \"/data/segmented_head_images/\"\n if not os.path.exists(output_dir):\n os.makedirs(output_dir)\n\n img_ids_file = open(PROJECT_ROOT + '/data/CUB_200_2011/images.txt').readlines()\n img_ids_file = [i.strip().split(' ') for i in img_ids_file]\n\n parts_file = open(PROJECT_ROOT +'/data/CUB_200_2011/parts/part_locs.txt').readlines()\n parts_file = [i.strip().split(' ') for i in parts_file]\n\n # <image_id> <x> <y> <width> <height>\n bounding_file = open(PROJECT_ROOT +'/data/CUB_200_2011/bounding_boxes.txt').readlines()\n bounding_file = [i.strip().split(' ') for i in bounding_file]\n\n img_ids = {}\n for i in img_ids_file:\n img_ids[i[1]] = int(i[0])\n\n part_ids = {}\n for i in parts_file:\n part_ids[(int(i[0]), int(i[1]))] = list(map(lambda x:int(float(x)), i[2:]))\n\n boudning_ids = {}\n for i in bounding_file:\n boudning_ids[int(i[0])] = list(map(lambda x:int(float(x)), i[1:]))\n\n for r in data:\n # print(\"~~~SEGMENTING HEAD: \", r[1])\n\n img_id = r[1].split('/')\n img_id = img_id[len(img_id)-2] + '/' + img_id[len(img_id)-1].replace('png', 'jpg')\n img_id = img_ids[img_id]\n\n # get location of bird parts\n # [x, y, visible or not]\n nape = part_ids[(img_id, 10)]\n tail = part_ids[(img_id, 14)]\n throat = part_ids[(img_id, 15)]\n bounds = boudning_ids[img_id]\n\n # if any of that parts not visible\n if nape[2] == 0 or tail[2] == 0 or throat[2] == 0 or nape[1] - throat[1] == 0:\n continue\n\n #A=(x1,y1) to B=(x2,y2) a point P=(x,y) f\n #d=(x−x1)(y2−y1)−(y−y1)(x2−x1)\n\n # compute on what side of nape-throat line tail is on\n tail_side = (tail[0] - nape[0])*(throat[1] - nape[1])-(tail[1] - nape[1])*(throat[0]-nape[0])\n\n img = cv2.imread(r[1])\n (rows, cols, _) = img.shape\n\n # all pixels on same side of nape-throat line as tail turn off\n for y in range(0,rows):\n for x in range(0,cols):\n side = (x - nape[0])*(throat[1] - nape[1])-(y - nape[1])*(throat[0]-nape[0])\n\n if np.sign(tail_side) == np.sign(side):\n img[y, x, :] = 0\n\n # img = cv2.circle(img, (nape[0], nape[1]), 3, (255, 0, 0))\n # img = cv2.circle(img, (tail[0], tail[1]), 3, (0, 255, 0))\n # img = cv2.circle(img, (throat[0], throat[1]), 3, (0, 0, 255))\n\n # crop by boudning box\n img = img[bounds[1]:bounds[1]+bounds[3], bounds[0]:bounds[0]+bounds[2], :]\n\n # save\n filename = r[1].split(\"/\")\n filename = filename[len(filename)-1].split(\".\")[0]\n if not os.path.exists(output_dir+classes[r[0]]):\n os.makedirs(output_dir+classes[r[0]])\n cv2.imwrite(output_dir+classes[r[0]]+\"/\"+filename+\".png\", img)\n segmented_data.append((r[0],output_dir+classes[r[0]]+\"/\"+filename+\".png\"))\n\n return segmented_data", "def iter_segments(self):\n return\n yield", "def construct_segments(self):\n for strand in self.strand_list:\n strand.construct_segment()", "def getSegments(self) -> List[int]:\n ...", "def test_getting_segment_details(self):\n pass", "def outputSegmentStat(self, param_combo2array_id2no_of_segments):\n\t\tparam_combo_ls = param_combo2array_id2no_of_segments.keys()\n\t\tparam_combo_ls.sort()\n\t\tfor param_combo in param_combo_ls:\n\t\t\tarray_id2no_of_segments = param_combo2array_id2no_of_segments.get(param_combo)\n\t\t\tno_of_arrays = len(array_id2no_of_segments)\n\t\t\tno_of_segments_ls = array_id2no_of_segments.values()\n\t\t\tno_of_segments_per_array = sum(no_of_segments_ls)/float(no_of_arrays)\n\t\t\tsys.stderr.write(\"Param-combo (a, T, M) %s: %s segments per array.\\n\"%(repr(param_combo), no_of_segments_per_array))", "def sample(self, seg_logit, seg_label):", "def generate(self, diagram):", "def generate_test_segments(training_set: [Video], n=100, duration=20):\n \n test_set = []\n labels = []\n \n i = 0\n while i < n:\n \n # Find random video\n video = random.choice(training_set)\n \n # Skip videos that are not long enough\n if video.duration() < duration:\n continue\n \n # Calculate required number of frames in test segment\n frames_in_selection = int(duration * video.frame_rate)\n \n # Calculate start / end frames of segment\n max_start_frame = video.num_frames - frames_in_selection - 1\n selection_start_frame = int(max_start_frame * random.random())\n selection_end_frame = selection_start_frame + frames_in_selection\n \n # Get histograms\n histograms = []\n \n for segment in video.segments:\n \n if segment.frame_end < selection_start_frame or segment.frame_start > selection_end_frame:\n # Pass if fragment outside bounds\n pass\n \n elif segment.frame_start >= selection_start_frame and segment.frame_end <= selection_end_frame:\n \n # Add the entire segment to the selection\n histograms += segment.histograms\n \n else:\n \n # Add only part of this segment to the selection\n if segment.frame_start >= selection_start_frame:\n slice_start = None\n slice_end = selection_end_frame - segment.frame_start # The last part of the selection\n \n elif segment.frame_end <= selection_end_frame:\n slice_start = selection_start_frame - segment.frame_start # The first part of the selection\n slice_end = None\n \n else:\n slice_start = selection_start_frame - segment.frame_start # Entirely inside this segment\n slice_end = selection_end_frame - segment.frame_start\n \n histograms += segment.histograms[slice_start:slice_end]\n \n # TODO Sometimes selected range seems to have no histogram? \n if len(histograms) == 0:\n continue\n \n # Add histogram list to test set\n test_set.append(histograms)\n \n # Add labels\n labels.append( (video.name, selection_start_frame, selection_end_frame) )\n \n i += 1\n return test_set, labels", "def __repr__(self):\n segment_str = str(self.segment)\n shortname_str = str(self.shortname)\n return '{}: {}'.format(shortname_str, segment_str)", "def __str__(self):\n return '%i traces in the SEG Y structure.' % len(self.traces)", "def pick_test_segments(training_set: [Video], n_segments=100):\n \n test_set = []\n labels = []\n \n for i in range(n_segments):\n \n # Find random video\n video = random.choice(training_set)\n\n # Select random segment and add histogram to test set\n segment = random.choice(video.segments)\n test_set.append(segment.histograms)\n labels.append(segment)\n \n return test_set, labels", "def substantiate():", "def partition_Basic(segfile):\n scenelist = Recording.read_segs(segfile)\n segcount = 0\n for l in scenelist.values():\n segcount += len(l)\n return scenelist, segcount", "def showSegments(self, surface):\n for segment in self.segments:\n segment.show(surface)", "def label_to_segments(utters, labels):\n segment_list = []\n for i, utterence in enumerate(utters):\n segments = []\n seg = \"\"\n for j, char in enumerate(utterence):\n if labels[i][j] >= 0.5:\n if len(seg) > 0:\n segments.append(seg)\n seg = \"\"\n seg = seg + char\n else:\n seg = seg + char\n if j == (len(utterence) - 1):\n segments.append(seg)\n segment_list.append(segments)\n return segment_list", "def __init__(self):\n super().__init__()\n self._points = 0\n self._segments = []\n self.fill_list()\n # i = random.randint(0, len(self._segments) - 1)\n # self.set_text(self._segments[i])\n self.reset()", "def getSegmentCount(self) -> int:\n ...", "def getTimeSegments(segments,bounds,radius,starttime,endtime,magrange,catalog,contributor):\n stime = starttime\n etime = endtime\n \n dt = etime - stime\n dtseconds = dt.days*86400 + dt.seconds\n #segment 1\n newstime = stime\n newetime = stime + timedelta(seconds=dtseconds/2)\n nevents,maxevents = getEventCount(bounds=bounds,radius=radius,starttime=newstime,endtime=newetime,\n magrange=magrange,catalog=catalog,contributor=contributor)\n if nevents < maxevents:\n segments.append((newstime,newetime))\n else:\n segments = getTimeSegments(segments,bounds,radius,newstime,newetime,\n magrange,catalog,contributor)\n #segment 2\n newstime = newetime\n newetime = etime\n nevents,maxevents = getEventCount(bounds=bounds,radius=radius,\n starttime=newstime,endtime=newetime,\n magrange=magrange,catalog=catalog,\n contributor=contributor)\n if nevents < maxevents:\n segments.append((newstime,newetime))\n else:\n segments = getTimeSegments(segments,bounds,radius,newstime,newetime,\n magrange,catalog,contributor)\n\n return segments", "def segments(self):\n return self._segments", "def __init__(self):\n self.g_sect = []", "def diarization(self):\n self._status = 1\n if self._single:\n try:\n os.mkdir(self.get_file_basename())\n except OSError, err:\n if err.errno != 17:\n raise err\n fm._silence_segmentation(self._basename)\n fm._gender_detection(self._basename)\n segname = self._basename + '.seg'\n f_seg = open(segname, 'r')\n headers = []\n values = []\n differ = False\n basic = None\n gen = {'M': 0, 'F': 0, 'U': 0}\n for line in f_seg.readlines():\n if line.startswith(';;'):\n headers.append(line[line.index('['):])\n else:\n a_line = line.split(' ')\n if basic == None:\n basic = a_line[4]\n if a_line[4] != basic:\n differ = True\n gen[a_line[4]] += int(a_line[3])\n values.append(a_line)\n header = \";; cluster:S0 %s\" % headers[0]\n from operator import itemgetter\n index = 0\n while index < len(values):\n values[index][2] = int(values[index][2])\n index += 1\n values = sorted(values, key=itemgetter(2))\n index = 0\n while index < len(values):\n values[index][2] = str(values[index][2])\n index += 1\n newfile = open(segname + '.tmp', 'w')\n newfile.write(header)\n if differ: #in case the gender of the single segments differ \n# then set the prevailing\n# print 'transgender :-D'\n if gen[ 'M' ] > gen[ 'F' ]:\n basic = 'M'\n elif gen[ 'M' ] < gen[ 'F' ] :\n basic = 'F'\n else:\n basic = 'U'\n\n for line in values:\n line[4] = basic #same gender for all segs\n newfile.write(' '.join(line[:-1]) + ' S0\\n')\n f_seg.close()\n newfile.close()\n shutil.copy(self.get_file_basename() + '.wav',\n os.path.join(self.get_file_basename(), 'S0' + '.wav'))\n shutil.move(segname + '.tmp', segname)\n shutil.copy(self.get_file_basename() + '.seg',\n os.path.join(self.get_file_basename(), 'S0' + '.seg'))\n utils.ensure_file_exists(segname)\n else:\n# print str(self._diar_conf[0])\n# print str(self._diar_conf[1])\n fm.diarization(self._basename, str(self._diar_conf[0]),\n str(self._diar_conf[1]))\n self._status = 2", "def metadata(self):\n metadata_dict = {}\n for lbl_value, seginfo in self.infos.items():\n prefix = f'Segment{lbl_value}_'\n metadata_dict.update(seginfo.to_dict('slicer', prefix))\n return metadata_dict", "def print_seg_row_col(sp) : \n s, r, c = src_from_rc8x8(sp.peak_y_raw, sp.peak_x_raw)\n print('seg: %d, row: %.1f, col: %.1f' % (s, r, c))", "def hierarchy_code_volume_by_segments(self, chart=\"sunburst\"):\n\n title = chart + _(' chart of coded audio/video segments - milliseconds')\n owner, subtitle = self.owner_and_subtitle_helper()\n # Get all the coded data\n case_file_name, file_ids = self.get_file_ids()\n if case_file_name != \"\":\n subtitle += case_file_name\n coded_data = []\n cur = self.app.conn.cursor()\n sql = \"select cid, pos1-pos0 from code_av where owner like ?\"\n if file_ids != \"\":\n sql = \"select cid, pos1-pos0 from code_av where owner like ? and id\" + file_ids\n cur.execute(sql, [owner])\n result = cur.fetchall()\n for row in result:\n coded_data.append(row)\n # Calculate the frequency of each code\n for code_ in self.codes:\n for coded_item in coded_data:\n if coded_item[0] == code_['cid']:\n code_['count'] += coded_item[1]\n # Add the code count directly to each parent category, add parentname to each code\n for category in self.categories:\n for code_ in self.codes:\n if code_['catid'] == category['catid']:\n category['count'] += code_['count']\n code_['parentname'] = category['name']\n # Find leaf categories, add to parent categories, and gradually remove leaves\n # Until only top categories remain\n sub_categories = copy(self.categories)\n counter = 0\n while len(sub_categories) > 0 or counter < 5000:\n # Identify parent categories\n parent_list = []\n for super_cat in sub_categories:\n for child_cat in sub_categories:\n if super_cat['catid'] == child_cat['supercatid']:\n child_cat['parentname'] = super_cat['name']\n parent_list.append(super_cat)\n # Identify leaf categories\n leaf_list = []\n for category in sub_categories:\n if category not in parent_list:\n leaf_list.append(category)\n # Add counts for each leaf category to higher category\n for leaf_category in leaf_list:\n for cat in self.categories:\n if cat['catid'] == leaf_category['supercatid']:\n cat['count'] += leaf_category['count']\n sub_categories.remove(leaf_category)\n counter += 1\n combined = self.categories + self.codes\n items = []\n values = []\n parents = []\n for sb_combined in combined:\n items.append(sb_combined['name'])\n values.append(sb_combined['count'])\n parents.append(sb_combined['parentname'])\n # Create pandas DataFrame and Figure\n data = {'item': items, 'value': values, 'parent': parents}\n df = pd.DataFrame(data)\n cutoff = self.ui.lineEdit_filter.text()\n mask = df['value'] != 0\n if cutoff != \"\":\n mask = df['value'] >= int(cutoff)\n subtitle += _(\"Values\") + \" >= \" + cutoff\n if chart == \"sunburst\":\n fig = px.sunburst(df[mask], names='item', parents='parent', values='value',\n title=title + subtitle)\n fig.show()\n self.helper_export_html(fig)\n if chart == \"treemap\":\n fig = px.treemap(df[mask], names='item', parents='parent', values='value',\n title=title + subtitle)\n fig.show()\n self.helper_export_html(fig)", "def run(self):\n for l in self.uniqueSkel:\n mask = np.arange(len(self.skel))[self.skelLabels == l]\n counts = self.findNearest(mask)\n self.memberships[l] = counts\n\n #self.memberships is an array of as many rows as skeleton labels and as many columns as Voronoi cluster labels,\n #where the i-th row shows for all skeleton points of cluster label i, how many belong to each of the Voronoi\n #cluster labels. More precisely, the j-th column of the i-th row of this array shows how many skeleton points\n #of cluster label i have a closest Voronoi cell center of label j.\n\n print('Out of ' + str(len(self.skel)) + ' skeleton points, ' + str(sum(self.memberships[:, 0])) + ' (' + str(round(sum(self.memberships[:, 0]) * 100/len(self.skel), 3)) + ' %) appear in areas classified as void areas by Voronoi')\n\n for l in self.uniqueSkel:\n members = sum(self.skelLabels == l)\n topVor = np.argsort(self.memberships[l])[::-1][:5] - 1\n counts = np.sort(self.memberships[l])[::-1][:5]\n print('For the ' + str(members) + ' skeleton points with label ' + str(l) + ': ')\n for i in range(5):\n if counts[i] > 0:\n if topVor[i] == -1:\n add = ' ' + str(counts[i]) + ' ( ' + str(round(counts[i] * 100 / members, 3)) + ' %) are not associated with a Voronoi cluster cell'\n else:\n add = ' ' + str(counts[i]) + ' ( ' + str(round(counts[i] * 100/ members, 3)) + ' %) belong to the Voronoi Cluster with label ' + str(topVor[i])\n print(add)\n\n self.plotResults()", "def plot_mass_flow(self,\n watershed, \n output, \n title = 'Subbasin Reach Mass Flow Diagram',\n fontsize = 6, \n theight = 0.2, \n l = 8.5, \n w = 11, \n verbose = True, \n overwrite = True,\n ):\n\n if os.path.exists(output) and not overwrite:\n if verbose: print('file %s exists' % output)\n return\n elif verbose: print('generating a mass linkage plot\\n')\n\n fontheight = fontsize / 72.\n rheight = 3 * fontheight\n rwidth = 12 * fontheight\n xgap = fontheight\n ygap = rheight\n awidth = rheight / 4\n aheight = rheight / 3\n\n # set up a sheet to write the image\n\n fig = pyplot.figure(figsize = (w, l))\n\n ax = fig.add_subplot(111, aspect = 'equal')\n ax.get_xaxis().set_visible(False)\n ax.get_yaxis().set_visible(False)\n t = ax.set_title(title)\n\n # divide the subbasins into rows and put them on the chart\n # start at the bottom to organize the linkages better\n\n rows = [watershed.outlets, ['outlet']]\n\n top = False\n while not top:\n row = []\n for next in rows[0]:\n for subbasin in watershed.updown:\n if watershed.updown[subbasin] == next: row.append(subbasin)\n if len(row) > 0: \n rows.insert(0, row)\n else: \n top = True\n\n # add an inlet box in the row above each inlet\n\n for inlet in watershed.inlets: \n\n i = 0\n while i < len(rows) - 1:\n\n for subbasin in rows[i]:\n\n if subbasin == inlet:\n \n # find the position of the subbasin in the chart\n\n j = rows[i].index(inlet)\n\n if i > 0:\n\n # figure out where the subbasins point\n \n updowns = [watershed.updown[s] for s in rows[i-1]]\n \n # if first or last, add it there in the row above\n\n if j == 0: \n rows[i-1].insert(0, 'inlet')\n elif j == len(rows[i]) - 1: \n rows[i-1].append('inlet')\n else:\n\n # find the place to add in the preceeding row \n\n n = updowns.index(rows[i][j-1]) + 1\n rows[i-1].insert(n, 'inlet')\n\n i += 1\n\n # write the subbasin boxes to the chart\n\n middle = math.ceil(w // (rwidth + xgap)) // 2\n last = 0\n\n # keep track of the bounding box of the plot\n\n xmin, ymin, xmax, ymax = middle, 0, middle, 0\n\n for i in range(len(rows)):\n\n row = rows[i]\n \n y = (ygap + rheight) * i + theight\n\n # figure out which cell to put in the main column\n\n if i == 0:\n main = row[(len(row) - 1) // 2]\n elif i < len(rows) - 1:\n main = watershed.updown[rows[i-1][last]]\n else: main = 'outlet'\n\n start = middle - row.index(main)\n\n if i < len(rows) - 1: next_row = rows[i + 1]\n\n for subbasin in row:\n x = (rwidth + xgap) * (start + row.index(subbasin))\n r = patches.Rectangle((x, y), rwidth, rheight, fill = False)\n\n # adjust the bounding box\n\n if x < xmin: xmin = x\n if x + rwidth > xmax: xmax = x + rwidth\n if y < ymin: ymin = y\n if y + rheight > ymax: ymax = y + rheight\n\n if subbasin != 'outlet': ax.add_patch(r)\n\n b = ax.text(x + rwidth / 2, y + rheight / 2, subbasin,\n horizontalalignment = 'center',\n verticalalignment = 'center')\n\n # draw the arrow\n\n if i < len(rows) - 1:\n\n x1 = x + rwidth / 2\n\n if i < len(rows) - 2 and subbasin != 'inlet':\n next = watershed.updown[subbasin]\n next_start = (middle - \n next_row.index(watershed.updown[main]))\n x2 = ((rwidth + xgap) * \n (next_start + next_row.index(next))\n + rwidth / 2)\n\n elif subbasin == 'inlet':\n next = watershed.inlets[0]\n next_start = (middle - \n next_row.index(watershed.updown[main]))\n\n x2 = ((rwidth + xgap) * \n (next_start + next_row.index(next))\n + rwidth / 2)\n\n else:\n next_start = middle\n x2 = ((rwidth + xgap) * (middle) + rwidth / 2)\n\n a = pyplot.arrow(x1, y + rheight, x2 - x1, ygap, \n head_width = awidth, head_length = aheight,\n fc = 'k', ec = 'k', \n length_includes_head = True)\n ax.add_patch(a)\n\n last = row.index(main)\n i += 1\n \n pad = 0.02\n\n xmin = xmin - (xmax - xmin) * pad\n xmax = xmax + (xmax - xmin) * pad\n ymin = ymin - (ymax - ymin) * pad\n ymax = ymax + (ymax - ymin) * pad\n\n ax.set_xlim(xmin, xmax)\n ax.set_ylim(ymax, ymin)\n pyplot.axis('off')\n pyplot.savefig(output, dpi = 200)\n\n pyplot.clf()\n pyplot.close()", "def ttest_region():\n\n cks = cksmet.io.load_table('cks-cuts',cache=1)\n cks = cks[~cks.isany]\n lamo = cksmet.io.load_table('lamost-cal-cuts',cache=2)\n lamo = lamo[~lamo.isany]\n #lamo = lamo.query('-1 < lamo_smet < 0.5')\n\n boxes = _boxes()\n boxes = boxes.copy()\n #boxes = boxes.sort_values(by=['per1','prad1'])\n boxes2 = []\n lines = []\n for i,row in boxes.iterrows():\n cut = cks[\n cks.koi_period.between(row.per1,row.per2) &\n cks.iso_prad.between(row.prad1,row.prad2)\n ]\n d = calculate_statistics(cut.cks_smet,lamo.lamo_smet)\n d = dict(d, **row)\n lines.append( to_string(d))\n \n return lines", "def displayNeedle(self,i):\n #obsolete\n profbox()\n modelNodes = slicer.util.getNodes('vtkMRMLModelNode*')\n for modelNode in modelNodes.values():\n if modelNode.GetAttribute(\"nth\")==str(i) and modelNode.GetAttribute(\"segmented\")=='1' :\n displayNode = modelNode.GetModelDisplayNode()\n nVisibility = displayNode.GetVisibility()\n \n if nVisibility:\n displayNode.SliceIntersectionVisibilityOff()\n displayNode.SetVisibility(0)\n else:\n displayNode.SliceIntersectionVisibilityOn()\n displayNode.SetVisibility(1)", "def __repr__(self):\n s = self.regular_neighborhood()\n return 'Train track on the ' + repr(s).lower()", "def __analyze(self):\n\n\t\t'''\n\t\ttodo: bSlabList.analyze() needs to step through each edge, not slabs !!!\n\t\t'''\n\n\t\tfor edgeIdx, edge in enumerate(self.edgeDictList):\n\t\t\tlen2d = 0\n\t\t\tlen3d = 0\n\t\t\tlen3d_nathan = 0\n\n\t\t\tslabList = edge['slabList']\n\t\t\tfor j, slabIdx in enumerate(slabList):\n\n\t\t\t\tx1 = self.x[slabIdx]\n\t\t\t\ty1 = self.y[slabIdx]\n\t\t\t\tz1 = self.z[slabIdx]\n\n\t\t\t\t#print('pointIdx:', pointIdx)\n\t\t\t\torig_x = self.orig_x[slabIdx]\n\t\t\t\torig_y = self.orig_y[slabIdx]\n\t\t\t\torig_z = self.orig_z[slabIdx]\n\n\t\t\t\tif j>0:\n\t\t\t\t\tlen3d = len3d + self.euclideanDistance(prev_x1, prev_y1, prev_z1, x1, y1, z1)\n\t\t\t\t\tlen2d = len2d + self.euclideanDistance(prev_x1, prev_y1, None, x1, y1, None)\n\t\t\t\t\tlen3d_nathan = len3d_nathan + self.euclideanDistance(prev_orig_x1, prev_orig_y1, prev_orig_z1, orig_x, orig_y, orig_z)\n\n\t\t\t\t# increment\n\t\t\t\tprev_x1 = x1\n\t\t\t\tprev_y1 = y1\n\t\t\t\tprev_z1 = z1\n\n\t\t\t\tprev_orig_x1 = orig_x\n\t\t\t\tprev_orig_y1 = orig_y\n\t\t\t\tprev_orig_z1 = orig_z\n\n\t\t\tedge['Len 2D'] = round(len2d,2)\n\t\t\tedge['Len 3D'] = round(len3d,2)\n\t\t\tedge['Len 3D Nathan'] = round(len3d_nathan,2)\n\n\t\t\t# diameter, pyqt does not like to display np.float, cast to float()\n\t\t\tmeanDiameter = round(float(np.nanmean(self.d[edge['slabList']])),2)\n\t\t\tedge['Diam'] = meanDiameter", "def create_from_segments(self, segment, origin=0):\r\n n = origin\r\n if segment[origin]['T'] != 'soma': # if it's a soma, only one compartment\r\n while (len(segment[n]['children']) == 1) and (segment[n]['T'] != 'soma'): # Go to the end of the branch\r\n n += 1\r\n # End of branch\r\n branch = segment[origin:n + 1]\r\n # Set attributes\r\n self.diameter, self.length, self.area, self.x, self.y, self.z = \\\r\n zip(*[(seg['diameter'], seg['length'], seg['area'], seg['x'], seg['y'], seg['z']) for seg in branch])\r\n self.diameter, self.length, self.area, self.x, self.y, self.z = array(self.diameter), array(self.length), \\\r\n array(self.area), array(self.x), array(self.y), array(self.z)\r\n self.type = segment[n]['T'] # normally same type for all compartments in the branch\r\n # Create children (list)\r\n self.children = [Morphology().create_from_segments(segment, origin=c) for c in segment[n]['children']]\r\n # Create dictionary of names (enumerates children from number 1)\r\n for i, child in enumerate(self.children):\r\n self._namedkid[str(i + 1)] = child\r\n # Name the child if possible\r\n if child.type in ['soma', 'axon', 'dendrite']:\r\n if child.type in self._namedkid:\r\n self._namedkid[child.type] = None # two children with the same name: erase (see next block)\r\n else:\r\n self._namedkid[child.type] = child\r\n # Erase useless names\r\n for k in self._namedkid.keys():\r\n if self._namedkid[k] is None:\r\n del self._namedkid[k]\r\n # If two kids, name them L (left) and R (right)\r\n if len(self.children) == 2:\r\n self._namedkid['L'] = self._namedkid['1']\r\n self._namedkid['R'] = self._namedkid['2']\r\n return self", "def get_sed_thk(self):\n\t\tfor period in self.attrs['prd_arr']:\n\t\t\tgroup = self['%g_sec'%( period )]\n\t\t\tsed_Arr = self.Rbf_func(group['latArr'].value, group['lonArr'].value)\n\t\t\tgroup.create_dataset(name='sed_Arr', data=sed_Arr)\n\t\t\tgroup.create_dataset(name='sed_Arr_msk', data=group['tomo_data_msk'].value)\n\t\tpass", "def get_segments(input_path):\n with open(input_path, 'r') as segments_file:\n segments = []\n for line in segments_file:\n words = line.split('\\t')\n sg_dict = {}\n sg_dict['start'] = float(words[0].replace(',', '.'))\n sg_dict['end'] = float(words[1].replace(',', '.'))\n sg_dict['class'] = words[2][:-1]\n segments.append(sg_dict)\n return segments", "def generate_seg_file(self, set_speakers=True):\n result = ''\n for clu in self._clusters:\n result += self._clusters[clu]._get_seg_repr(set_speakers)\n f_seg = open(self.get_file_basename() + '.seg', 'w')\n f_seg.write(result)\n f_seg.close()", "def showAnns(self, anns):\n if len(anns) == 0:\n return 0\n if 'segmentation' in anns[0] or 'keypoints' in anns[0]:\n datasetType = 'instances'\n elif 'caption' in anns[0]:\n datasetType = 'captions'\n else:\n raise Exception('datasetType not supported')\n if datasetType == 'instances':\n ax = plt.gca()\n ax.set_autoscale_on(False)\n polygons = []\n color = []\n for ann in anns:\n c = (np.random.random((1, 3))*0.6+0.4).tolist()[0]\n if 'segmentation' in ann:\n if type(ann['segmentation']) == list:\n # polygon\n for seg in ann['segmentation']:\n poly = np.array(seg).reshape((int(len(seg)/2), 2))\n polygons.append(Polygon(poly))\n color.append(c)\n else:\n # mask\n t = self.imgs[ann['image_id']]\n if type(ann['segmentation']['counts']) == list:\n rle = maskUtils.frPyObjects([ann['segmentation']], t['height'], t['width'])\n else:\n rle = [ann['segmentation']]\n m = maskUtils.decode(rle)\n img = np.ones( (m.shape[0], m.shape[1], 3) )\n if ann['iscrowd'] == 1:\n color_mask = np.array([2.0,166.0,101.0])/255\n if ann['iscrowd'] == 0:\n color_mask = np.random.random((1, 3)).tolist()[0]\n for i in range(3):\n img[:,:,i] = color_mask[i]\n ax.imshow(np.dstack( (img, m*0.5) ))\n if 'keypoints' in ann and type(ann['keypoints']) == list:\n # turn skeleton into zero-based index\n sks = np.array(self.loadCats(ann['category_id'])[0]['skeleton'])-1\n kp = np.array(ann['keypoints'])\n x = kp[0::3]\n y = kp[1::3]\n v = kp[2::3]\n for sk in sks:\n if np.all(v[sk]>0):\n plt.plot(x[sk],y[sk], linewidth=3, color=c)\n plt.plot(x[v>0], y[v>0],'o',markersize=8, markerfacecolor=c, markeredgecolor='k',markeredgewidth=2)\n plt.plot(x[v>1], y[v>1],'o',markersize=8, markerfacecolor=c, markeredgecolor=c, markeredgewidth=2)\n p = PatchCollection(polygons, facecolor=color, linewidths=0, alpha=0.4)\n ax.add_collection(p)\n p = PatchCollection(polygons, facecolor='none', edgecolors=color, linewidths=2)\n ax.add_collection(p)\n elif datasetType == 'captions':\n for ann in anns:\n print(ann['caption'])", "def print_metrics(self):\n # num times regular barcodes appear in a simulated doublet nearest neighbors, grouped by value\n # TODO: this list is 2 dimensional... need to extract dimensione with counts for the counter\n frequencies = [i[1] for i in self.num_times_knn]\n counter = collections.Counter(frequencies)\n print(\"##\\nNumber time barcoded in sim doub KNN: {}\".format(counter))\n\n # artificial fraction\n print(\"##\\nArtificial fraction: {}\".format(self.artificial_fraction))\n\n # num doublets\n print(\"##\\nNumber of doublets called: {}\".format(self.num_doublets))", "def testViewOccData(self):\n try:\n entryD = self.__mU.doImport(self.__instanceSavePath, fmt=\"pickle\")\n segmentCountList = []\n segmentLengthList = []\n entryCountD = {}\n for entryId in entryD:\n for _, eD in entryD[entryId][\"selected_polymer_entities\"].items():\n\n analD = eD[\"anal_instances\"] if \"anal_instances\" in eD else {}\n\n for _, aD in analD.items():\n entryCountD[entryId] = True\n segmentCount = len(aD[\"owabRegiond\"])\n segmentLengths = [d[\"length\"] for sId, d in aD[\"owabRegiond\"].items()]\n\n segmentCountList.append(segmentCount)\n segmentLengthList.extend(segmentLengths)\n #\n logger.info(\"gaps %d gap lengths %d\", len(segmentCountList), len(segmentLengthList))\n #\n cu = DisorderChartUtils()\n cu.doIntegerBarChart(\n segmentCountList,\n plotPath=self.__plotOwabSegmentCount,\n yPlotScale=\"log\",\n yPlotMax=6,\n xPlotMax=100,\n xPlotLabel=\"Segment Count\",\n yPlotLabel=\"Protein Instances (log)\",\n plotTitle=\"Segment counts (OWAB > 2 * mean OWAB)\",\n )\n self.__writeLegend(\n self.__plotOwabSegmentCount,\n \"Segment counts for all (%d) protein sequences (OWAB > 2 * mean OWAB and X-ray resolution limit < 3.5 Angstoms (entries=%d)) \"\n % (len(segmentCountList), len(entryCountD)),\n )\n cu.doIntegerBarChart(\n segmentLengthList,\n plotPath=self.__plotOwabSegmentLength,\n yPlotScale=\"log\",\n yPlotMax=6,\n xPlotMax=100,\n xPlotLabel=\"Segment width (residues)\",\n yPlotLabel=\"Segment Instances (log)\",\n plotTitle=\"Segment widths (OWAB > 2 * mean OWAB)\",\n )\n self.__writeLegend(\n self.__plotOwabSegmentLength,\n \"Segment widths for all (%d) protein sequences (OWAB > 2 * mean OWAB and X-ray resolution limit < 3.5 Angstoms (entries=%d)) \"\n % (len(segmentLengthList), len(entryCountD)),\n )\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n self.fail()", "def get_model_summary(self):\n\n summary = self._model[0].get_model_summary()\n lower_bound = self._FLOAT_STRING_FORMAT.format(self._break_points[0])\n upper_bound = self._FLOAT_STRING_FORMAT.format(self._break_points[1])\n summary_title = 'Segment model range: ' \\\n + lower_bound \\\n + ' <= ' + self._explanatory_variables[0] \\\n + ' < ' + upper_bound\n summary.tables[0].title = summary_title\n\n number_of_segments = self.get_number_of_segments()\n\n spacer_table = SimpleTable(data=['=' * 50])\n\n for i in range(1, number_of_segments):\n segment_model_summary = self._model[i].get_model_summary()\n lower_bound = self._FLOAT_STRING_FORMAT.format(self._break_points[i])\n upper_bound = self._FLOAT_STRING_FORMAT.format(self._break_points[i + 1])\n summary_title = 'Segment model range: ' \\\n + lower_bound \\\n + ' <= ' + self._explanatory_variables[0] \\\n + ' < ' + upper_bound\n segment_model_summary.tables[0].title = summary_title\n summary.tables.extend([spacer_table] + segment_model_summary.tables)\n\n return summary", "def create_summary(filename, regions):\n subclips = []\n input_video = VideoFileClip(filename)\n last_end = 0\n for (start, end) in regions:\n subclip = input_video.subclip(start, end)\n subclips.append(subclip)\n last_end = end\n return concatenate_videoclips(subclips)", "def segment_func2(self):\n # computing neighboors graph\n A = self.boundaryprob_graph()\n\n # SpectralClustering segmentation\n sc = SpectralClustering(3, affinity='precomputed', n_init=10, assign_labels='discretize')\n labels = sc.fit_predict(A)\n\n return labels", "def get_station_boroughs(self):\\", "def create_activity_list(station,data_order):\n # Generate an ASCII representation of the GPS timestamped segments of time covered by the input data\n seglist = segmentlist(data_order.keys())\n # Sort the segment list\n seglist.sort()\n # Initialise dictionary for segment information\n full_seglist = DataQualityDict()\n # Save time span for each segment in ASCII file\n with open(\"segments.txt\", \"w\") as fout:\n for seg in seglist:\n print >>fout, \"%10.9f %10.9f\" % seg\n # FIXME: Active should be masked from the sanity channel\n full_seglist[station] = DataQualityFlag(station,active=seglist.coalesce(),known=seglist.coalesce())\n return full_seglist", "def get_test_segments(data):\n n_channels = data.shape[0]\n n_steps = data.shape[1]\n factor = 2\n n_segments = n_steps // factor\n\n segments = []\n for i_segment in range(n_segments):\n for i_channel in range(n_channels):\n segment = {\n 'index': i_segment + i_channel * n_segments,\n 'start': i_segment,\n 'stop': i_segment + 1,\n 'weight': data[i_channel, factor * i_segment],\n }\n if n_channels > 1:\n segment['channel'] = i_channel\n segments.append(segment)\n\n return segments", "def __repr__(self):\n output = \"\"\n output +=\"V:\\n\"\n for row in self.V:\n output += \"\\t\"\n for el in row:\n output += str(el) + \" \" \n output += \"\\n\" \n \n output += \"\\nW:\\n\"\n for row in self.W:\n output += \"\\t\"\n for el in row:\n output += str(el) + \" \" \n output += \"\\n\"\n return output", "def _get_out_segments(self):\n return self.__out_segments", "def get_segments(self):\n\t\tos.chdir(self.segment_path)\n\t\tfor path in glob.glob(\"%s/*.seg\" % self.segment_path):\n\t\t\t_file = os.path.split(path)[1]\n\t\t\tdae = DiscreetArchiveElement(self,_file,element_type='segment')\n\t\t\tself.elements.append(dae)\n\t\treturn True", "def render(self): # pragma: no cover\n from graphviz import Digraph\n dot = Digraph(name=\"top\")\n for block in self.blocks:\n if isinstance(block, Branch):\n label = \"if \" + astor.to_source(block.cond)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"invhouse\"})\n elif isinstance(block, Yield):\n label = astor.to_source(block.value)\n # label += \"\\nLive Ins : \" + str(block.live_ins)\n # label += \"\\nLive Outs : \" + str(block.live_outs)\n # label += \"\\nGen : \" + str(block.gen)\n # label += \"\\nKill : \" + str(block.kill)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"oval\"})\n elif isinstance(block, BasicBlock):\n label = \"\\n\".join(astor.to_source(stmt).rstrip() for stmt in block.statements)\n # label += \"\\nLive Ins : \" + str(block.live_ins)\n # label += \"\\nLive Outs : \" + str(block.live_outs)\n # label += \"\\nGen : \" + str(block.gen)\n # label += \"\\nKill : \" + str(block.kill)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"box\"})\n elif isinstance(block, HeadBlock):\n label = \"Initial\"\n dot.node(str(id(block)) + \"_start\", label.rstrip(), {\"shape\": \"doublecircle\"})\n label = \"\\n\".join(astor.to_source(stmt).rstrip() for stmt in block.initial_statements)\n # label += \"\\nLive Ins : \" + str(block.live_ins)\n # label += \"\\nLive Outs : \" + str(block.live_outs)\n # label += \"\\nGen : \" + str(block.gen)\n # label += \"\\nKill : \" + str(block.kill)\n dot.node(str(id(block)), label.rstrip(), {\"shape\": \"box\"})\n dot.edge(str(id(block)) + \"_start\", str(id(block)))\n else:\n raise NotImplementedError(type(block))\n # for source, sink, label in self.edges:\n for sink, label in block.outgoing_edges:\n dot.edge(str(id(block)), str(id(sink)), label)\n\n\n file_name = tempfile.mktemp(\"gv\")\n dot.render(file_name, view=True)\n # with open(\"cfg.dot\", \"w\") as file:\n # file.write(dot.source)\n # exit()", "def __str__(self):\n return '%i traces in the SU structure.' % len(self.traces)", "def substructure(self, mol):\n info = dict()\n # key: fp_str, val: ((atom_idx, radius),...,)\n AllChem.GetMorganFingerprint(mol, self.r, bitInfo=info)\n atomidx2fp = [[None for _ in range(self.r + 1)] for __ in range(mol.GetNumAtoms())]\n for fp_int, frag in info.items():\n for atom_idx, r in frag:\n atomidx2fp[atom_idx][r] = fp_int\n sentence = list()\n for atom_idx in range(mol.GetNumAtoms()):\n for r in range(self.r + 1):\n if atomidx2fp[atom_idx][r]:\n sentence.append(atomidx2fp[atom_idx][r])\n return list(map(str, sentence))", "def unit_cell_info(sub_clusters):\n from libtbx.utils import plural_s\n # 3. print out some information that is useful.\n out_str = \"\\n\\n{:<16} {:<8} {:<13} {:<13} {:<13} {:<12} {:<12} {:<12}{:<8}\\n\".format(\n \"Cluster_id\",\n \"N_xtals\",\n \"Med_a\", \"Med_b\", \"Med_c\",\n \"Med_alpha\", \"Med_beta\", \"Med_gamma\",\"Delta(deg)\")\n singletons = []\n for cluster in sub_clusters:\n if len(cluster.members) != 1:\n # New approach, takes niggli setting of the cluster median and converts\n # back to reference setting for cluster report. Fixes cctbx#97.\n from cctbx import crystal\n from cctbx.uctbx import unit_cell\n from cctbx.sgtbx.lattice_symmetry import metric_subgroups\n\n input_symmetry = crystal.symmetry(\n unit_cell=unit_cell(cluster.medians[0:6]),\n space_group_symbol=\"P 1\")\n groups = metric_subgroups(input_symmetry, 3.00,\n enforce_max_delta_for_generated_two_folds=True)\n group = groups.result_groups[0]\n print(\" Unit cell:\", group['best_subsym'].unit_cell())\n uc_params_conv = group['best_subsym'].unit_cell().parameters()\n\n sorted_pg_comp = sorted(list(cluster.pg_composition.items()),\n key=lambda x: -1 * x[1])\n pg_strings = [\"{} in {}\".format(pg[1], pg[0])\n for pg in sorted_pg_comp]\n point_group_string = \", \".join(pg_strings) + \".\"\n out_str += point_group_string\n out_str += (\"\\n{:<16} {:<8} {:<6.2f}({:<5.2f}) {:<6.2f}({:<5.2f})\"\n \" {:<6.2f}({:<5.2f}) {:<6.2f}({:<4.2f}) {:<6.2f}\"\n \"({:<4.2f}) {:<6.2f}({:<4.2f})\").format(\n cluster.cname,\n len(cluster.members),\n cluster.medians[0], cluster.stdevs[0],\n cluster.medians[1], cluster.stdevs[1],\n cluster.medians[2], cluster.stdevs[2],\n cluster.medians[3], cluster.stdevs[3],\n cluster.medians[4], cluster.stdevs[4],\n cluster.medians[5], cluster.stdevs[5])\n out_str += (\"\\n{:>24} {:<6.2f}{:<7} {:<6.2f}{:<7}\"\n \" {:<6.2f}{:<7} {:<6.2f}{:<6} {:<6.2f}\"\n \"{:<6} {:<6.2f}{:<6} {:<6.2}\").format(\n group['best_subsym'].space_group_info().symbol_and_number(),\n uc_params_conv[0], \"\",\n uc_params_conv[1], \"\",\n uc_params_conv[2], \"\",\n uc_params_conv[3], \"\",\n uc_params_conv[4], \"\",\n uc_params_conv[5], \"\",\n group[\"max_angular_difference\"]) + \"\\n\\n\"\n\n else:\n singletons.append(\"\".join([(\"{:<14} {:<11.2f} {:<11.2f} {:<11.2f}\"\n \"{:<12.1f} {:<12.1f} {:<12.1f}\").format(\n list(cluster.pg_composition.keys())[0],\n cluster.members[0].uc[0], cluster.members[0].uc[1],\n cluster.members[0].uc[2], cluster.members[0].uc[3],\n cluster.members[0].uc[4], cluster.members[0].uc[5]),\n '\\n']))\n out_str += \"\\nStandard deviations are in brackets.\"\n explanation = \"\"\"\\nEach cluster:\nInput lattice count, with integration Bravais setting space group.\nCluster median with Niggli cell parameters (std dev in brackets).\nHighest possible metric symmetry and unit cell using LePage (J Appl Cryst 1982, 15:255) method, maximum delta 3deg.\"\"\"\n out_str += explanation\n singleton_str = \"\\n%i singleton%s:\" %plural_s(len(singletons))\n singleton_str += \"\\n\\n{:<14} {:<11} {:<11} {:<11}{:<12} {:<12} {:<12}\\n\".format(\n \"Point group\",\n \"a\", \"b\", \"c\", \"alpha\", \"beta\", \"gamma\")\n singleton_str += \"\".join(singletons)\n n_clusters = len(sub_clusters) - len(singletons)\n out_str = \"\\n%i cluster%s:\" %plural_s(n_clusters) + out_str\n return singleton_str + out_str", "def browse(self):\n res = \"PID[\" + str(PID) + \"] \"\n for (start, offset) in \\\n self.__global_index[self.__start_index: self.__start_index + self.__nb_segs]:\n seg = Segment(self.__content[start:start + offset])\n res = res + \"\\n \" + str(seg)\n return res", "def run(self):\n self.track_len = []\n # debug\n while self._segment_index <= self._segment_cnt:\n if self._segment_index < 0: # Uncomment this block to debug specific segment\n self._segment_index += 1\n continue\n # run association\n print \"[Tracking]\\tSegment index:\\t{} Total segment num:\\t{}\".format(self._segment_index, self._segment_cnt)\n start = cv2.getTickCount()\n \n self._run_segment()\n print \"[Tracking]\\tSegment start:\\t{} Segment end\\t{}\".format(self._segment_start_fid,\n self._segment_end_fid)\n # dump into file\n \"\"\"\n seg_name = 'segment_{}.track'.format(self._segment_index)\n seg_file = os.path.join(self._segment_dir, seg_name)\n self._segments_path.append(seg_file)\n Track.dump_to_track_file(self._high_level_tracks, save_name=seg_file)\n print \"Track contains {} high level tracks\".format(len(self._high_level_tracks))\n \"\"\"\n self._segment_index += 1\n end = cv2.getTickCount()\n print \"[Tracking]\\tTime:\\t{} seconds\".format(float(end - start) / cv2.getTickFrequency())\n if P['debug']:\n pos_feature_num = self.pos_arr.shape[0]\n neg_feature_num = self.neg_arr.shape[0]\n pos_arr = np.hstack((self.pos_arr, np.ones(shape=(pos_feature_num, 1))))\n neg_arr = np.hstack((self.neg_arr, np.zeros(shape=(neg_feature_num, 1))))\n np.savetxt(os.path.join(\"../feature_classifier/\", \"{}_pos_feature.txt\".format(self._video_name)), pos_arr)\n np.savetxt(os.path.join(\"../feature_classifier/\", \"{}_neg_feature.txt\".format(self._video_name)), neg_arr)\n\n final_track_save_file = os.path.join(self._save_dir, self._video_name + \"_final_merged.track\")\n mot_track_save_file = os.path.join(self._save_dir, self._video_name + \".txt\")\n Track.dump_to_track_file_no_feature(self._final_tracks, final_track_save_file, self._calib_w, self._calib_h)\n Track.dump_track_with_mot_format(self._final_tracks, mot_track_save_file,)\n print(\"there are {} tracklet in final merged track\".format(len(self._final_tracks)))", "def segment(raw_sents:List[str], segment=\"jieba\") -> List[List[str]]:\n\t# segment_list = [\"pkuseg\", \"jieba\"]\n\t# if segment.strip() not in segment_list:\n\t# \treturn []\n\n\tseg_sents = []\n\tif segment == \"pkuseg\":\n\t\timport pkuseg\n\n\t\t## init the seg\n\t\tseg = pkuseg.pkuseg()\n\n\t\t## segment the sentence by pkuseg\n\t\tfor sent in raw_sents:\n\t\t\tres_seg = seg.cut(sent)\n\t\t\tseg_sents.append(res_seg)\n\t\t# print(seg_sents)\n\telif segment == \"jieba\":\n\t\timport jieba\n\t\tfor sent in raw_sents:\n\t\t\tres_seg = jieba.lcut(sent)\n\t\t\tsentence = \" \".join(res_seg)\n\t\t\tpattern4 = re.compile(\" +\", re.S)\n\t\t\tsentence = pattern4.sub(\" \", sentence)\n\t\t\tres_seg = sentence.split(\" \")\n\t\t\tseg_sents.append(res_seg)\n\n\treturn seg_sents", "def segment_euclidean_length(segmented_img, objects):\n # Store debug\n debug = params.debug\n params.debug = None\n\n x_list = []\n y_list = []\n segment_lengths = []\n rand_color = color_palette(len(objects))\n\n\n labeled_img = segmented_img.copy()\n\n for i, cnt in enumerate(objects):\n # Store coordinates for labels\n x_list.append(objects[i][0][0][0])\n y_list.append(objects[i][0][0][1])\n\n # Draw segments one by one to group segment tips together\n finding_tips_img = np.zeros(segmented_img.shape[:2], np.uint8)\n cv2.drawContours(finding_tips_img, objects, i, (255, 255, 255), 1, lineType=8)\n segment_tips = find_tips(finding_tips_img)\n tip_objects, tip_hierarchies = find_objects(segment_tips, segment_tips)\n points = []\n if not len(tip_objects) == 2:\n fatal_error(\"Too many tips found per segment, try pruning again\")\n for t in tip_objects:\n # Gather pairs of coordinates\n x, y = t.ravel()\n coord = (x, y)\n points.append(coord)\n\n # Draw euclidean distance lines\n cv2.line(labeled_img, points[0], points[1], rand_color[i], 1)\n\n # Calculate euclidean distance between tips of each contour\n segment_lengths.append(euclidean(points[0], points[1]))\n\n segment_ids = []\n # Put labels of length\n for c, value in enumerate(segment_lengths):\n text = \"{:.2f}\".format(value)\n w = x_list[c]\n h = y_list[c]\n cv2.putText(img=labeled_img, text=text, org=(w, h), fontFace=cv2.FONT_HERSHEY_SIMPLEX,\n fontScale=params.text_size, color=(150, 150, 150), thickness=params.text_thickness)\n segment_label = \"ID\" + str(c)\n segment_ids.append(c)\n\n outputs.add_observation(variable='segment_eu_length', trait='segment euclidean length',\n method='plantcv.plantcv.morphology.segment_euclidean_length', scale='pixels', datatype=list,\n value=segment_lengths, label=segment_ids)\n\n # Reset debug mode\n params.debug = debug\n # Auto-increment device\n params.device += 1\n\n if params.debug == 'print':\n print_image(labeled_img, os.path.join(params.debug_outdir, str(params.device) + '_segment_eu_lengths.png'))\n elif params.debug == 'plot':\n plot_image(labeled_img)\n\n return labeled_img", "def __init__(self, segments, display_res=\"1920x1080\", stream_id=None):\n self.segments = segments\n self.display_res = display_res\n self.stream_id = stream_id\n self.o22 = []\n self.mode = None", "def labels(self):\n\n param=self\n\n l=len(param)\n\n sweep_label=[]\n\n for index,name in enumerate(param.names):\n\n sweep_label.append((\\\n ''.join([c for c in name if c.isupper()]))\\\n .replace(\"IDT\",\"\")\\\n .replace(\"S\",\"\")\\\n .replace(\"M\",\"\"))\n\n stringout=[]\n\n unique={name:list(dict.fromkeys(values)) for name,values in zip(param.names,param.values)}\n\n for i in range(l):\n\n tmp_lab=''\n\n for lab,name in zip(sweep_label,self.names):\n\n tmp_lab=tmp_lab+lab+str(unique[name].index(param()[name][i]))\n\n stringout.append(tmp_lab)\n\n return stringout", "def __init__(self):\n self.s_sect = []", "def __repr__(self):\n return 'LineSegment({0}, {1})'.format(self.p1, self.p2)", "def show(self):\n if self.nodes_ is None:\n logging.debug(\"Segment - Nothing to show. Skipping.\")\n return\n\n if len(self.polygons_) != 0:\n logging.debug(\"Segment - Showing 3D Segments using `vedo`.\")\n logging.warning(\"Segment - Showing 3D Segments can be slow!.\")\n\n import vedo\n\n points = vedo.Points(self.nodes)\n lines = []\n for p in self.polygons:\n p = np.asarray(p).astype(np.int32)\n lines.append(vedo.Line(self.nodes[p]))\n\n vedo.show([points, *lines]).show().close()\n\n else:\n logging.debug(\"Segment - Showing 2D Segments using `matplotlib`.\")\n\n import matplotlib.pyplot as plt\n\n plt.scatter(\n self.nodes_[:, 0],\n self.nodes_[:, 1],\n c=\"pink\",\n zorder=1000,\n )\n\n for c in self.connectivity_:\n plt.plot(\n self.nodes_[c][:,0],\n self.nodes_[c][:,1],\n c=\"grey\",\n lw=2,\n zorder=10,\n )\n\n plt.show()", "def stp_detail(switch):\n\n\tifloop = False\n\tstp_split = []\n\n\tgetdata = switch.conf('show spanning-tree detail | inc Number')\n\n\tif debug:\n\t\tprint getdata\n\n\tshow_stp = xmltodict.parse(getdata[1])\n\n\tstp = show_stp ['ins_api']['outputs']['output']['body']\n\n\ttcn_change = re.findall('(?<=occurred\\s).*(?=\\:)', stp)\n\tfor each in tcn_change:\n\t\tfor time in tcn_change:\n\n\t\t\tfirst_hour = re.findall(r'^(.*):',time)\n\t\t\tfor hour in first_hour:\n\t\t\t\tif int(hour) == 0:\n\t\t\t\t\tifloop = True\n\t\t\t#pulls the hour as an integer from the time listed in the message body\n\n\t\t\tfirst_minute = re.findall(r'\\:(.*)',time)\n\t\t\tfor minute in first_minute:\n\t\t\t\tif int(minute) <= 5:\n\t\t\t\t\tifloop = True\n\t\t\t#pulls the minute as an integer from the time listed in the message body\n\n\t\t\tstp_time = hour + ':' + minute\n\t\t\tif debug:\n\t\t\t\tprint stp_time\n\n\t\tif debug:\n\t\t\tprint \"Last topology change happened \" + stp_time + \" hours ago\"\n\n\ttcn_number = re.findall('(?<=changes\\s).*(?=\\last)', stp)\n\tfor number in tcn_number:\n\t\tstp_number = number\n\t#pulls ths number of topology changes that have occurred if tcn_change returns a value in the specified range\n\n\t\tif debug:\n\t\t\tprint \"Number of topology changes = \" + stp_number\n\n\tif ifloop:\n\t\tprint \"Last topology change happened \" + stp_time + \" hours ago\"\n\t\tprint \"Number of topology changes = \" + stp_number\n\telse:\n\t\tprint \"No STP topology changes.\"", "def consecutive_sections(): # noqa: D416", "def stats(self):", "def test_plot_segments(sersic_2d_image, segm_and_cat):\n cat, segm, segm_deblend = segm_and_cat\n\n pf.plot_segments(segm, vmax=1, vmin=0)\n plt.show()\n\n pf.plot_segments(segm_deblend, vmax=1, vmin=0)\n plt.show()\n\n pf.plot_segment_residual(segm, sersic_2d_image, vmax=1, vmin=0)\n plt.show()", "def tabulate(self) -> str:\n items = [\n ('Number of stations', self._num_stations),\n ('Loss probability', self.drop_prob),\n ]\n\n for node in range(self._num_stations):\n items.append((f'[[ STATION #{node} ]]', ''))\n\n ssize = self.system_size[node]\n qsize = self.queue_size[node]\n busy = self.busy[node]\n\n ssize_pmf = [ssize.pmf(x) for x in range(ssize.truncated_at + 1)]\n qsize_pmf = [qsize.pmf(x) for x in range(qsize.truncated_at + 1)]\n busy_pmf = [busy.pmf(x) for x in range(busy.truncated_at + 1)]\n\n items.extend([\n ('System size PMF', str_array(ssize_pmf)),\n ('System size average', ssize.mean),\n ('System size std.dev.', ssize.std),\n ('Queue size PMF', str_array(qsize_pmf)),\n ('Queue size average', qsize.mean),\n ('Queue size std.dev.', qsize.std),\n ('Busy PMF', str_array(busy_pmf)),\n ('Utilization', self.get_utilization(node)),\n ('Drop probability', self.drop_prob[node]),\n ('Delivery probability', self.delivery_prob[node]),\n ('Departures, average', self.departures[node].avg),\n ('Departures, std.dev.', self.departures[node].std),\n ('Response time, average', self.response_time[node].avg),\n ('Response time, std.dev.', self.response_time[node].std),\n ('Wait time, average', self.wait_time[node].avg),\n ('Wait time, std.dev.', self.wait_time[node].std),\n ('End-to-end delays, average', self.delivery_delays[node].avg),\n ('End-to-end delays, std.dev.', self.delivery_delays[node].std),\n ])\n return tabulate(items, headers=('Param', 'Value'))", "def segments(self):\n return (self._subset((i,i+1)) for i in range(len(self)-1))", "def onSegmentButton(self):\n markupsNode = slicer.mrmlScene.GetFirstNodeByName(\"MarkupsFiducial\")\n\n seedsFileName = self.fileNameSeedsLineEdit.text\n marginMask = int(self.marginMask.value)\n distance = int(self.distance.value)\n gamma = float(self.gammaSpinBox.value)\n regularizationDiameter = int(self.regularizationDiameter.value)\n minThreshold = int(self.minThresholdSlider.value)\n maxThreshold = int(self.maxThresholdSlider.value)\n \n self.markupsList = []\n if markupsNode != None:\n for i in range(markupsNode.GetNumberOfFiducials()):\n point_ras = [0, 0, 0]\n markupsNode.GetNthFiducialPosition(i, point_ras)\n name = markupsNode.GetNthFiducialLabel(i)\n label = int(markupsNode.GetNthControlPointDescription(i))\n self.markupsList.append([name, point_ras, label])\n \n if len(self.markupsList) == 0:\n fileName = self.seedsPath + self.fileNameSeedsLineEdit.text\n self.markupsList = self.loadMarkupsFromSeedFile(fileName)\n \n if len(self.markupsList) == 0:\n print(\"There is no fiducial markups !\")\n return\n \n self.logic.setGlobalPath(self.globalPath)\n self.logic.setSeedsFileName(seedsFileName)\n self.logic.setRemoveLastSegmentation(self.removeLastSegmentationCheckBox.isChecked())\n self.logic.setShowBackGround(self.showBackGroundCheckBox.isChecked())\n result = self.logic.run(self.inputSelector.currentNode(), self.labelColorsList, self.markupsList,\n marginMask, distance, gamma, regularizationDiameter, [minThreshold, maxThreshold])\n\n if result: # Run succeed\n # Set the segmentation file UI name with this seeds file name and the used paramaters\n segmentationFileName = getSegmentationFileName(seedsFileName, distance, gamma, marginMask, regularizationDiameter)\n self.saveSegmentationName.text = segmentationFileName \n self.outputVolume = result", "def read_segmentation_gt(gt_file):\n with open(gt_file, 'rt') as f_handle:\n reader = csv.reader(f_handle, delimiter='\\t')\n start_times = []\n end_times = []\n labels = []\n for row in reader:\n if len(row) == 3:\n start_times.append(float(row[0]))\n end_times.append(float(row[1]))\n labels.append((row[2]))\n return np.array(start_times), np.array(end_times), labels", "def coregister_formatted():\r\n\r\n print(\"begin coregister_formatted\")\r\n\r\n # check all records for pairs using the recordBegin time\r\n pair_records()\r\n\r\n # establish the beginning and end of the coregistered records\r\n define_pairedRecords()\r\n\r\n # coregister paired data in a single csv\r\n format_coregister()\r\n\r\n\r\n print(\"completed segment_formatted\")", "def divide_microstructure_unit(self,point1,point2,dimensions):\n\t\tnew_sections = []\n\t\tif len(point1) < 4:\n\t\t\tdone = 0.0\n\t\t\tdtwo = 0.0\n\t\telse:\n\t\t\tdone = point1[-1]\n\t\t\tdtwo = point2[-1]\n\t\t\n\t\tp1 = np.array(point1[:3])\n\t\tp2 = np.array(point2[:3])\n\t\tvec = p2-p1\n\t\tdimslength = float(np.sum(dimensions))\n\t\tfor d,dim in enumerate(dimensions[:-1]):\n\t\t\tnearsideproportion = np.sum(dimensions[:d])/dimslength\n\t\t\tfarsideproportion = np.sum(dimensions[:d+1])/dimslength\n\t\t\tnew_sections.append([\t\n\t\t\t\t\t\tlist(np.append(p1+vec*nearsideproportion,done)),\n\t\t\t\t\t\tlist(np.append(((p1+vec*nearsideproportion)+(p1+vec*farsideproportion))/2.0,(done+dtwo)/2.0)),\n\t\t\t\t\t\tlist(np.append(p1+vec*farsideproportion,dtwo))\n\t\t\t\t\t\t])\n\t\t\n\t\tnew_sections.append([\t\n\t\t\t\t\tlist(new_sections[-1][-1]),\n\t\t\t\t\tlist((np.array(new_sections[-1][-1])+np.array(list(point2[:3])+[dtwo]))/2.0),\n\t\t\t\t\tlist(point2[:3])+[dtwo]\n\t\t\t\t\t])\n\t\t\n\t\tif len(dimensions) > 2:\n\t\t\treturn(new_sections,['node','paranode1','paranode2','internode','paranode2','paranode1'][:len(new_sections)])\n\t\t\n\t\telse:\n\t\t\treturn(new_sections,['interbouton','bouton'][:len(new_sections)])", "def at_s2ncut(self):\n\n\t # Notch out the transit and recompute\n\t fmcut = self.fm.copy()\n\t fmcut.fill_value=0\n\t # Widen by twice the transit duration\n\t tmask = self.rLbl['tRegLbl'] >= 0\n\t tmask = np.convolve(\n\t tmask.astype(float),\n\t np.ones(self.header['tdurcad'] * 2),\n\t mode='same'\n\t )\n\t tmask = tmask.astype(bool)\n\t fmcut.mask = fmcut.mask | tmask\n\t grid = tfind.Grid(self.t,fmcut)\n\n\n\t pgram_params = [\n\t dict(Pcad1=self.Pcad - 1, Pcad2=self.Pcad + 1, twdG = [self.header['tdurcad']])\n\t ]\n\t pgram = grid.periodogram(pgram_params,mode='max')\n\t idxmax = pgram.s2n.idxmax()\n\n\t dkeys = 's2ncut s2ncut_t0 s2ncut_mean'.split()\n\t pkeys = 's2n t0 mean'.split()\n\n\t for dkey,pkey in zip(dkeys,pkeys):\n\t self.add_attr(dkey,pgram.ix[idxmax,pkey])", "def at_s2ncut(self):\n\n\t # Notch out the transit and recompute\n\t fmcut = self.fm.copy()\n\t fmcut.fill_value=0\n\t # Widen by twice the transit duration\n\t tmask = self.rLbl['tRegLbl'] >= 0\n\t tmask = np.convolve(\n\t tmask.astype(float),\n\t np.ones(self.header['tdurcad'] * 2),\n\t mode='same'\n\t )\n\t tmask = tmask.astype(bool)\n\t fmcut.mask = fmcut.mask | tmask\n\t grid = tfind.Grid(self.t,fmcut)\n\n\n\t pgram_params = [\n\t dict(Pcad1=self.Pcad - 1, Pcad2=self.Pcad + 1, twdG = [self.header['tdurcad']])\n\t ]\n\t pgram = grid.periodogram(pgram_params,mode='max')\n\t idxmax = pgram.s2n.idxmax()\n\n\t dkeys = 's2ncut s2ncut_t0 s2ncut_mean'.split()\n\t pkeys = 's2n t0 mean'.split()\n\n\t for dkey,pkey in zip(dkeys,pkeys):\n\t self.add_attr(dkey,pgram.ix[idxmax,pkey])", "def _writeSegmentsRealization(self, writeTo):\n pivotID = self._templateROM.pivotParameterID\n pivot = self._indexValues[pivotID]\n # realization to add eventually\n rlz = {}\n segmentNames = range(len(self._divisionInfo['delimiters']))\n # pivot for all this stuff is the segment number\n rlz['segment_number'] = np.asarray(segmentNames)\n # start indices\n varName = 'seg_index_start'\n writeTo.addVariable(varName, np.array([]), classify='meta', indices=['segment_number'])\n rlz[varName] = np.asarray(list(d[0] for d in self._divisionInfo['delimiters']))\n # end indices\n varName = 'seg_index_end'\n writeTo.addVariable(varName, np.array([]), classify='meta', indices=['segment_number'])\n rlz[varName] = np.asarray(list(d[-1] for d in self._divisionInfo['delimiters']))\n # pivot start values\n varName = 'seg_{}_start'.format(self._templateROM.pivotParameterID)\n writeTo.addVariable(varName, np.array([]), classify='meta', indices=['segment_number'])\n rlz[varName] = np.asarray(list(pivot[d[0]] for d in self._divisionInfo['delimiters']))\n # pivot end values\n varName = 'seg_{}_end'.format(self._templateROM.pivotParameterID)\n writeTo.addVariable(varName, np.array([]), classify='meta', indices=['segment_number'])\n rlz[varName] = np.asarray(list(pivot[d[-1]] for d in self._divisionInfo['delimiters']))\n return rlz", "def show_vdcs_detail(self):\n for v in self.vdcs:\n print self.vdcs[v]", "def __str__(self) -> str:\n return \"'{tag}' EDI segment: {elements}\".format(\n tag=self.tag, elements=str(self.elements)\n )", "def display_taxis(taxis):\n for i, taxi in enumerate(taxis):\n print(f\"{i} - {taxi}\")", "def get_comp_spanrels(self):", "def side_traces(x,im):\n s0 = x['side-traces'][0]\n s1 = x['side-traces'][1]\n t1 = Scatter(y=s0)\n t2 = Scatter(y=s1)\n\n #put_thing(im,x['abs-line'],(255,0,0),(0,0),3)\n\n groups = []\n diff_traces = []\n markers = []\n y3 = []\n TriangleHumps.get_dimensions(x,debug_groups=groups,debug_diffs=diff_traces,debug_markers = markers, im = im,y3=y3)\n mode = stats.mode(y3)[0][0]\n trigger = mode*2+1\n t3 = Scatter(y=y3)\n\n annotations = []\n diff_traces = [Scatter(y=v) for v in diff_traces]\n t4 = Scatter(x=markers,y=[10]*len(markers),mode = 'markers+text')\n for gru in groups:\n for hump in gru:\n annotations.append({\n 'x':hump['range'][0],\n 'y':trigger,\n 'text':'%d,%d'%(hump['area'],hump['length']),\n })\n\n name = 'mode=%d,trigger=%d,groups=%d' % (mode,trigger,len(groups))\n \n #return (t1,t2,t3,)\n #print('markers %d:' % x['id'],markers,[trigger]*len(markers))\n return [t3,t4,] + diff_traces,annotations, name", "def show_vdcs(self):\n for v in self.vdcs:\n print v", "def __str__(self):\n lines = []\n # set hsp info line\n statline = []\n # evalue\n evalue = getattr_str(self, \"evalue\", fmt=\"%.2g\")\n statline.append(\"evalue \" + evalue)\n # bitscore\n bitscore = getattr_str(self, \"bitscore\", fmt=\"%.2f\")\n statline.append(\"bitscore \" + bitscore)\n lines.append(\"Quick stats: \" + \"; \".join(statline))\n\n if len(self.fragments) == 1:\n return \"\\n\".join(\n [self._str_hsp_header(), \"\\n\".join(lines), self.fragments[0]._str_aln()]\n )\n else:\n lines.append(\n \" Fragments: %s %s %s %s\" % (\"-\" * 3, \"-\" * 14, \"-\" * 22, \"-\" * 22)\n )\n pattern = \"%16s %14s %22s %22s\"\n lines.append(pattern % (\"#\", \"Span\", \"Query range\", \"Hit range\"))\n lines.append(pattern % (\"-\" * 3, \"-\" * 14, \"-\" * 22, \"-\" * 22))\n for idx, block in enumerate(self.fragments):\n # set hsp line and table\n # alignment span\n aln_span = getattr_str(block, \"aln_span\")\n # query region\n query_start = getattr_str(block, \"query_start\")\n query_end = getattr_str(block, \"query_end\")\n query_range = \"[%s:%s]\" % (query_start, query_end)\n # max column length is 20\n query_range = (\n query_range[:20] + \"~]\" if len(query_range) > 22 else query_range\n )\n # hit region\n hit_start = getattr_str(block, \"hit_start\")\n hit_end = getattr_str(block, \"hit_end\")\n hit_range = \"[%s:%s]\" % (hit_start, hit_end)\n hit_range = hit_range[:20] + \"~]\" if len(hit_range) > 22 else hit_range\n # append the hsp row\n lines.append(pattern % (str(idx), aln_span, query_range, hit_range))\n\n return self._str_hsp_header() + \"\\n\" + \"\\n\".join(lines)", "def segment_func1(self):\n # computing neighboors graph\n A = self.normal_graph()\n\n # SpectralClustering segmentation\n sc = SpectralClustering(3, affinity='precomputed', n_init=10, assign_labels='discretize')\n labels = sc.fit_predict(A)\n\n return labels", "def generate_segments(full_filename, summary=False):\n\n\t# split filename and create folder to store segments\n\tfilename, filetype = os.path.splitext(full_filename)\n\tfolder, filename = os.path.split(filename)\n\tif len(folder) > 0:\n\t\tfolder = folder+\"/\"\n\tif not os.path.isfile(folder+filename+filetype):\n\t\tprint(\"ERROR: File\", folder+filename+filetype, \"does not exist.\")\n\t\tsys.exit()\n\tif not os.path.exists(folder+\"segments\"):\n\t\tos.makedirs(folder+\"segments\")\n\n\t# set flags\n\tlast_new_segment = 0\t# frame at which segment began\n\twas_low = False\t\t\t# if hand left image since segment began\n\tlow_count = 0\t\t\t# consecutive frames for which hand was gone\n\tsegment = 0\t\t\t\t# segment id number\n\n\t# initialize summary/segment writers, set background as first frame\n\treader = imageio.get_reader(folder+filename+filetype, 'ffmpeg')\n\tfps = reader.get_meta_data()['fps']\n\tnframes = reader.get_meta_data()['nframes']\n\tsegment_writer = imageio.get_writer(\n\t\t\tfolder+\"segments/\"+filename+str(segment)+filetype, \n\t\t\t'ffmpeg', fps=fps, macro_block_size=None)\n\tsegment_writer.close()\n\tif summary:\n\t\tsummary_writer = imageio.get_writer(folder+filename+\"_summary\"+filetype, \n\t\t\t'ffmpeg', fps=fps*2)\n\tbackground = np.array(reader.get_data(0)).astype(int)[:,:,0]\n\n\t# process video and segment\n\tfor i, image in enumerate(reader):\n\n\t\t# background subtract, threshold at zero\n\t\timage = np.array(image).astype(int)[:,:,0]\n\t\timage = np.maximum(image - background, np.zeros(image.shape))\n\n\t\t# check if at least 3 edge pixels belong to a hand\n\t\tif (np.sum(image[Y1:Y1+1, X0:X1] > HAND_CUTOFF) > 3 or \n\t\t\t\tnp.sum(image[Y0:Y0+1, X0:X1] > HAND_CUTOFF) > 3 or\n\t\t\t\tnp.sum(image[Y0:Y1, X0:X0+1] > HAND_CUTOFF) > 3 or\n\t\t\t\tnp.sum(image[Y0:Y1, X1:X1+1] > HAND_CUTOFF) > 3):\n\n\t\t\t# if hand just entered image and segment was long enough, start new segment\n\t\t\tif(i - last_new_segment > MIN_SEGMENT_LENGTH and was_low):\n\t\t\t\tif not segment_writer.closed:\n\t\t\t\t\tsegment_writer.close()\n\t\t\t\tsegment += 1\n\t\t\t\tsegment_writer = imageio.get_writer(\n\t\t\t\t\t\tfolder+\"segments/\"+filename+str(segment)+filetype, \n\t\t\t\t\t\t'ffmpeg', fps=fps, macro_block_size=None)\n\t\t\t\tlast_new_segment = i\n\t\t\t\twas_low = False\n\t\t\t\tlow_count = 0\n\n\t\telse: # hand isn't in image, after 1/10 second decide it has left\n\t\t\tlow_count += 1\n\t\t\tif low_count >= 3:\n\t\t\t\twas_low = True\n\t\t\t\n\t\t# segment has reached maximum length, end it\n\t\tif i - last_new_segment > MAX_SEGMENT_LENGTH:\n\t\t\tif not segment_writer.closed:\n\t\t\t\tsegment_writer.close()\n\n\t\t# add border for summary video around bounding area which is captured \n\t\timage[Y0-1,X0:X1] = WHITE*np.ones(X1-X0)\n\t\timage[Y1,X0:X1] = WHITE*np.ones(X1-X0)\n\t\timage[Y0:Y1,X0-1] = WHITE*np.ones(Y1-Y0)\n\t\timage[Y0:Y1,X1] = WHITE*np.ones(Y1-Y0)\n\n\t\t# record with segment/summary writers\n\t\tif not segment_writer.closed:\n\t\t\tsegment_writer.append_data(image[Y0:Y1,X0:X1].astype('uint8'))\n\t\t\tif summary:\n\t\t\t\tsummary_writer.append_data(image.astype('uint8'))\n\n\t\telse: # add lines to indicate not recording, add to summary writer\n\t\t\tfor x in range(X0, X1, 10):\n\t\t\t\timage[Y0:Y1,x] = WHITE*np.ones(Y1-Y0)\n\t\t\tif summary:\n\t\t\t\tsummary_writer.append_data(image.astype('uint8'))\n\n\t\t# display progress processing video\n\t\tif i % 100 == 0:\n\t\t\tpercent = (i / nframes)\n\t\t\tbars = percent*40\n\t\t\tsys.stdout.write(\"\\rSegmenting {0}: [{1}{2}] {3}% \".format(\n\t\t\t\tfull_filename, \"|\"*int(bars), \" \"*int(40-bars), int(percent*100)))\n\t\t\tsys.stdout.flush()\n\n\t# close writers\n\tprint(\"\")\n\tsegment_writer.close()\n\tif summary:\n\t\tsummary_writer.close()", "def form_segment_(P_, fa):\n # Determine params type:\n if \"M\" not in P_[0]:\n seg_param_keys = (*aSEG_PARAM_KEYS[:2], *aSEG_PARAM_KEYS[3:])\n Dert_keys = (*aDERT_PARAMS[:2], *aDERT_PARAMS[3:], \"L\")\n elif fa: # segment params: I G M Dy Dx Ga Dyay Dyax Dxay Dxax S Ly y0 Py_ down_fork_ up_fork_ sign\n seg_param_keys = aSEG_PARAM_KEYS\n Dert_keys = aDERT_PARAMS + (\"L\",)\n else: # segment params: I G M Dy Dx S Ly y0 Py_ down_fork_ up_fork_ sign\n seg_param_keys = gSEG_PARAM_KEYS\n Dert_keys = gDERT_PARAMS + (\"L\",)\n\n # Get a list of every segment's top P:\n P0_ = [*filter(lambda P: (len(P['up_fork_']) != 1\n or len(P['up_fork_'][0]['down_fork_']) != 1),\n P_)]\n\n # Form segments:\n seg_ = [dict(zip(seg_param_keys, # segment's params as keys\n # Accumulated params:\n [*map(sum,\n zip(*map(op.itemgetter(*Dert_keys),\n Py_))),\n len(Py_), Py_[0].pop('y'), # Ly, y0\n min(P['x0'] for P in Py_),\n max(P['x0'] + P['L'] for P in Py_),\n Py_, # Py_ .\n Py_[-1].pop('down_fork_'), Py_[0].pop('up_fork_'), # down_fork_, up_fork_ .\n Py_[0].pop('sign')]))\n # cluster_vertical(P): traverse segment from first P:\n for Py_ in map(cluster_vertical, P0_)]\n\n for seg in seg_: # Update segs' refs.\n seg['Py_'][0]['seg'] = seg['Py_'][-1]['seg'] = seg\n\n for seg in seg_: # Update down_fork_ and up_fork_ .\n seg.update(down_fork_=[*map(lambda P: P['seg'], seg['down_fork_'])],\n up_fork_=[*map(lambda P: P['seg'], seg['up_fork_'])])\n\n for i, seg in enumerate(seg_): # Remove segs' refs.\n del seg['Py_'][0]['seg']\n\n return seg_", "def printStatus(self,mod=\"\"):\n dims = \"\"\n corner_labels = {\"back_right\":\"br\",\"back_left\":\"bl\",\"front_right\":\"fr\",\\\n \"front_left\":\"fl\"}\n for x in self.four_corners:\n dims += \"{}({},{}), \".format(corner_labels[x],self.four_corners[x][0],\\\n self.four_corners[x][1])\n print(\"{}{}\\tIN: {}\\tOUT: {}\\tWIDTH: {}\\tHEIGHT: {}\".format(mod,\\\n self.label,[entry.label for entry in self.in_lanes],\\\n [entry.label for entry in self.out_lanes],\\\n round(self.width,2),round(self.length,2)))\n print(\"{}{}\\t{}\".format(mod,self.label,dims))", "def surface_segment_data_preparation(word_dictionary: {str, str}):\n X = []\n Y = []\n words = []\n for word in word_dictionary:\n word_list = []\n word_label_list = []\n for i in range(len(word)):\n gram_dict = {}\n gram_arr = []\n\n ### Unigram\n # gram_dict[word[i]] = 1\n gram_dict[\"uni_\" + word[i]] = 1\n gram_arr.append(word[i])\n\n ### BIGRAM\n try:\n tmp = word[i - 1: i + 1]\n if tmp:\n # gram_dict[tmp] = 1\n if len(tmp) == 2:\n gram_dict[\"bi_\" + tmp] = 1\n gram_arr.append(tmp)\n except IndexError:\n continue\n try:\n tmp = word[i: i + 2]\n if tmp:\n # gram_dict[tmp] = 1\n if len(tmp) == 2:\n gram_dict[\"bi_\" + tmp] = 1\n gram_arr.append(tmp)\n except IndexError:\n continue\n\n ### TRIGRAM\n try:\n tmp = word[i - 1: i + 2]\n if tmp:\n # gram_dict[tmp] = 1\n if len(tmp) == 3:\n gram_dict[\"tri_\" + tmp] = 1\n gram_arr.append(tmp)\n except IndexError:\n continue\n\n ## FourGram\n try:\n tmp = word[i - 1: i + 3]\n if tmp:\n # gram_dict[tmp] = 1\n if len(tmp) == 4:\n gram_dict[\"four_\" + tmp] = 1\n gram_arr.append(tmp)\n except IndexError:\n continue\n\n try:\n tmp = word[i - 2: i + 2]\n if tmp:\n # gram_dict[tmp] = 1\n if len(tmp) == 4:\n gram_dict[\"four_\" + tmp] = 1\n gram_arr.append(tmp)\n except IndexError:\n continue\n\n ## FiveGram\n try:\n tmp = word[i - 2: i + 3]\n if tmp:\n # gram_dict[tmp] = 1\n if len(tmp) == 5:\n gram_dict[\"five_\" + tmp] = 1\n gram_arr.append(tmp)\n except IndexError:\n continue\n\n ## SixGram\n try:\n tmp = word[i - 3: i + 3]\n if tmp:\n if len(tmp) == 6:\n # gram_dict[tmp] = 1\n gram_dict[\"six_\" + tmp] = 1\n gram_arr.append(tmp)\n except IndexError:\n continue\n\n try:\n tmp = word[i - 2: i + 4]\n if tmp:\n if len(tmp) == 6:\n # gram_dict[tmp] = 1\n gram_dict[\"six_\" + tmp] = 1\n gram_arr.append(tmp)\n except IndexError:\n continue\n\n if word[i] in 'aeiou':\n gram_dict[\"vowel\"] = 1\n else:\n gram_dict[\"const\"] = 1\n\n if word[i].isupper():\n gram_dict[\"upper\"] = 1\n else:\n gram_dict[\"lower\"] = 1\n\n word_list.append(gram_dict)\n word_label_list.append(word_dictionary[word][i])\n\n X.append(word_list)\n Y.append(word_label_list)\n words.append([char for char in word])\n return X, Y, words", "def _get_end_points(self, segmented_instances, i, stats, idx):\n\n end_points=[]\n\n # find all points intersecting the bbox\n #(tl_x, th_y, width, height, area)\n label_num=i+1\n leftmost_x = stats['bbox'][i][cv2.CC_STAT_LEFT]\n topmost_y = stats['bbox'][i][cv2.CC_STAT_TOP]\n width = stats['bbox'][i][cv2.CC_STAT_WIDTH]\n height = stats['bbox'][i][cv2.CC_STAT_HEIGHT]\n bottom_most_y = topmost_y + height-1\n right_most_x = leftmost_x + width-1\n\n segmented_instances_copy=segmented_instances.copy()\n edge_points = np.zeros(segmented_instances.shape).astype(np.uint8)\n segs = np.zeros(segmented_instances.shape).astype(np.uint8)\n segs[segmented_instances==label_num]=255\n cv2.rectangle(segmented_instances_copy,(leftmost_x, topmost_y), (right_most_x, bottom_most_y), 150, 2)\n\n #Get all points for the current stem segment\n label_points = np.argwhere(segmented_instances.copy()==label_num)\n\n # upper points from (tl_x,th_y) to (th_x, th_y) that instersect with the upper edge of the bouding box\n upper_points = [i for i in label_points if i[0]==topmost_y and i[1]>=leftmost_x and i[1]<=right_most_x]\n x_pts, segs, edge_points = self._update_imgs_and_pt_list(upper_points, edge_points, segs, 1)\n center_upper_pts = sorted(self._get_centeroids(x_pts))\n\n # left side points from (tl_x, tl_y) to (tl_x, th_y) that instersect with the left edge of the bouding box\n left_points = [i for i in label_points if i[1]==leftmost_x and i[0]<=bottom_most_y and i[0]>=topmost_y]\n x_pts, segs, edge_points = self._update_imgs_and_pt_list(left_points, edge_points, segs, 0)\n center_left_pts = sorted(self._get_centeroids(x_pts))\n\n #right side points form (th_x, tl_y) to (th_x, th_y) that instersect with the right edge of the bouding box\n right_points = [i for i in label_points if i[1]==right_most_x and i[0]<=bottom_most_y and i[0]>=topmost_y]\n x_pts, segs, edge_points = self._update_imgs_and_pt_list(right_points, edge_points, segs, 0)\n center_right_pts = sorted(self._get_centeroids(x_pts))\n\n #bottom points from (tl_x, tl_y) to (th_x,tl_y)\n bottom_points = [i for i in label_points if i[1]>=leftmost_x and i[1]<=right_most_x and i[0]==bottom_most_y]\n x_pts, segs, edge_points = self._update_imgs_and_pt_list(bottom_points, edge_points, segs, 1)\n center_bottom_pts = sorted(self._get_centeroids(x_pts))\n\n # If there are corner edges, get the centroid of that\n center_x_lb, center_y_lb, center_left_pts, center_bottom_pts = self._get_corner_centers(center_left_pts, \\\n center_bottom_pts, bottom_most_y, leftmost_x)\n if (center_x_lb != None) and (center_y_lb != None):\n end_points.append([center_x_lb, center_y_lb])\n else:\n if len(center_left_pts)>0:\n for pt_idx in range(0, len(center_left_pts)):\n end_points.append([leftmost_x, center_left_pts[pt_idx]])\n if len(center_bottom_pts)>0:\n for pt_idx in range(0, len(center_bottom_pts)):\n end_points.append([center_bottom_pts[pt_idx], bottom_most_y])\n\n # If there are corner edges, get the centroid of that\n center_x_ur, center_y_ur, center_right_pts, center_upper_pts = self._get_corner_centers(center_right_pts, \\\n center_upper_pts, topmost_y, right_most_x)\n if (center_x_ur != None) and (center_y_ur != None):\n end_points.append([center_x_ur, center_y_ur])\n else:\n if len(center_right_pts)>0:\n for pt_idx in range(0, len(center_right_pts)):\n end_points.append([right_most_x, center_right_pts[pt_idx]])\n if len(center_upper_pts)>0:\n for pt_idx in range(0, len(center_upper_pts)):\n end_points.append([center_upper_pts[pt_idx], topmost_y])\n\n # If there are corner edges, get the centroid of that\n center_x_ul, center_y_ul, center_left_pts, center_upper_pts = self._get_corner_centers(center_left_pts, \\\n center_upper_pts, topmost_y, leftmost_x)\n if (center_x_ul != None) and (center_y_ul != None):\n end_points.append([center_x_ul, center_y_ul])\n else:\n if len(center_left_pts)>0:\n for pt_idx in range(0, len(center_left_pts)):\n end_points.append([leftmost_x, center_left_pts[pt_idx]])\n if len(center_upper_pts)>0:\n for pt_idx in range(0, len(center_upper_pts)):\n end_points.append([center_upper_pts[pt_idx], topmost_y])\n\n\n # If there are corner edges, get the centroid of that\n center_x_br, center_y_br, center_right_pts, center_bottom_pts = self._get_corner_centers(center_right_pts, \\\n center_bottom_pts, bottom_most_y, right_most_x)\n if (center_x_br != None) and (center_y_br != None):\n end_points.append([center_x_br, center_y_br])\n else:\n if len(center_right_pts)>0:\n for pt_idx in range(0, len(center_right_pts)):\n end_points.append([right_most_x, center_right_pts[pt_idx]])\n if len(center_bottom_pts)>0:\n for pt_idx in range(0, len(center_bottom_pts)):\n end_points.append([center_bottom_pts[pt_idx], bottom_most_y])\n\n #self.showme(segmented_instances_copy, 'bbox')\n\n return end_points", "def Counters(ss, train):\n if train:\n return \"Run:\\t%d\\tEpoch:\\t%d\\tTrial:\\t%d\\tCycle:\\t%d\\tName:\\t%s\\t\\t\\t\" % (ss.TrainEnv.Run.Cur, ss.TrainEnv.Epoch.Cur, ss.TrainEnv.Trial.Cur, ss.Time.Cycle, ss.TrainEnv.TrialName.Cur)\n else:\n return \"Run:\\t%d\\tEpoch:\\t%d\\tTrial:\\t%d\\tCycle:\\t%d\\tName:\\t%s\\t\\t\\t\" % (ss.TrainEnv.Run.Cur, ss.TrainEnv.Epoch.Cur, ss.TestEnv.Trial.Cur, ss.Time.Cycle, ss.TestEnv.TrialName.Cur)", "def printStations(self):\n print(\"Bus numero \" + str(self._num) + \" :\")\n for i in range(len(self._stations)) :\n print(self._stations[i])\n print('\\n')", "def print_components_sizes(distance, points):\n SortedX = sorted([point for point in points], key = abscisse)\n\n result = prochesX(SortedX, distance)\n dernier_pointX_1 = result[len(result)-1]\n dernier_indice = SortedX.index(dernier_pointX_1)\n\n origine = Point([0.0, 0.0])\n segment_1 = Segment([Point([dernier_pointX_1.x, 0]), Point([dernier_pointX_1.x, 1])])\n\n SortedY = sorted([point for point in result], key = ordonnee)\n result_bis = prochesY(SortedY, distance)\n dernier_pointXbis_1 = result_bis[len(result_bis)-1]\n dernier_indice_bis = SortedX.index(dernier_pointXbis_1)\n\n segment_2 = Segment([Point([0, dernier_pointXbis_1.y]), Point([1, dernier_pointXbis_1.y])])\n tycat(origine, points, (segment_1, segment_2))\n \"\"\"\n affichage des tailles triees de chaque composante\n \"\"\"\n segments = []\n research_base = [point for point in points]\n origine = Point([0.0, 0.0])\n total = research_base.copy()\n s = 0\n enveloppe = []\n while len(research_base) > 0:\n current = research_base[0]\n research_base.pop(0)\n for point in research_base:\n if current.distance_to(point) < distance:\n s += 1\n segments.append(Segment([current, point]))\n enveloppe.append(s)\n tycat(origine, total, segments)", "def label_segments(segs, truths, detected):\n for seg in segs:\n for truth in truths:\n if time_overlap(seg, truth): \n seg[\"label\"] = truth[\"label\"]\n for det in detected:\n if time_overlap(seg, det):\n if det[\"label\"] == truth[\"label\"]:\n seg[\"match\"] = True\n else:\n seg[\"match\"] = False\n return segs", "def mainTransect():\n\n\n # Start = (3260, 2672 ) new transect data start/end points\n #\n # End = (2731, 12280)\n #\n imageName = 'TransectStitch2.jpg'\n End = (594, 1046)\n Start = (1718, 460)\n \n Start = (3581, 1582)\n End = (3227, 7596)\n#\n# Start = (1035,588)\n# End = (456,1720)\n\n # Start = tuple(raw_input('Please Input Start Coordinates'))\n # End = tuple(raw_input('Please Input End Coordinates'))\n \n stuff= divideTransect(Start, End, imageName)\n if len(stuff) != 50: \n print \"Error! Transect length is not 50 meters.\" \n return stuff", "def helix_triplet_stats (self):\n\n for Value in ['Phi']:\n\n HistogramPlot(np.array(self. values_list(Value, flat=True)), 'myproject/myapp/static/myapp/static/Stats/HelixTriplet/'+Value )\n #zrobic jakies dict coby robilo ranges, uzaleznialo np od zakresu albo od czegos\n\n return", "def labels(self, threshold, segment=True, exclude_border=0):\n data = self.unmasked_data\n isfin = numpy.isfinite(data)\n data[~isfin] = numpy.amin(data[isfin])\n regions = (data > threshold)\n if segment:\n local_max = peak_local_max(data, indices=False,\n exclude_border=0,\n footprint=numpy.ones((3, 3)),\n labels=regions)\n markers = measurements.label(local_max)[0]\n labels = watershed(-data, markers, mask=regions)\n if exclude_border > 0:\n # Remove basins originating from edge peaks\n diff = numpy.zeros_like(local_max)\n for i in range(local_max.ndim):\n local_max = local_max.swapaxes(0, i)\n diff = diff.swapaxes(0, i)\n diff[:exclude_border] = local_max[:exclude_border]\n diff[-exclude_border:] = local_max[-exclude_border:]\n diff = diff.swapaxes(0, i)\n local_max = local_max.swapaxes(0, i)\n \n for l in numpy.sort(labels[diff])[::-1]:\n labels[labels == l] = 0\n labels[labels > l] -= 1\n ulabels = numpy.unique(labels)\n n = ulabels[ulabels != 0].size\n else:\n data_thres = numpy.zeros_like(data)\n data_thres[regions] = data[regions]\n labels, n = measurements.label(data_thres)\n return labels, n", "def _convert_to_multi_segment(self):\n\n self.header['nb_segment'] = [self.info['n_episodes']]\n\n # drop repeated signal headers\n self.header['signal_channels'] = \\\n self.header['signal_channels'].reshape(\n self.info['n_episodes'], -1)[0]\n\n # reshape signal memmap list\n new_sig_memmaps = []\n n_channels = len(self.header['signal_channels'])\n sig_memmaps = self._raw_signals[0]\n for first_index in np.arange(0, len(sig_memmaps), n_channels):\n new_sig_memmaps.append(\n sig_memmaps[first_index:first_index + n_channels])\n self._raw_signals = new_sig_memmaps\n\n self.logger.debug('New number of segments: {}'.format(\n self.info['n_episodes']))\n\n return", "def segment(args):\n from jcvi.formats.base import SetFile\n\n p = OptionParser(segment.__doc__)\n p.add_option(\n \"--chain\",\n default=1,\n type=\"int\",\n help=\"Allow next N genes to be chained\",\n )\n opts, args = p.parse_args(args)\n\n if len(args) != 2:\n sys.exit(not p.print_help())\n\n idsfile, bedfile = args\n bed = Bed(bedfile)\n order = bed.order\n ids = SetFile(idsfile)\n losses = Grouper()\n skip = opts.chain\n for i, a in enumerate(bed):\n a = a.accn\n for j in range(i + 1, i + 1 + skip):\n if j >= len(bed):\n break\n b = bed[j].accn\n if a in ids:\n losses.join(a, a)\n if a in ids and b in ids:\n losses.join(a, b)\n\n losses = list(losses)\n singletons = [x for x in losses if len(x) == 1]\n segments = [x for x in losses if len(x) > 1]\n ns, nm, nt = len(singletons), len(segments), len(losses)\n assert ns + nm == nt\n\n # Summary for all segments\n for x in sorted(singletons) + sorted(segments):\n print(\n \"\\t\".join(\n str(x)\n for x in (\"|\".join(sorted(x)), len(x), estimate_size(x, bed, order))\n )\n )\n\n # Find longest segment stretch\n if segments:\n mx, maxsegment = max([(len(x), x) for x in segments])\n print(\"Longest stretch: run of {0} genes\".format(mx), file=sys.stderr)\n print(\" {0}\".format(\"|\".join(sorted(maxsegment))), file=sys.stderr)\n seg_asize = sum(estimate_size(x, bed, order) for x in segments)\n seg_bsize = sum(\n estimate_size(x, bed, order, conservative=False) for x in segments\n )\n else:\n seg_asize = seg_bsize = 0\n\n sing_asize = sum(estimate_size(x, bed, order) for x in singletons)\n sing_bsize = sum(\n estimate_size(x, bed, order, conservative=False) for x in singletons\n )\n total_asize = sing_asize + seg_asize\n total_bsize = sing_bsize + seg_bsize\n print(\n \"Singleton ({0}): {1} - {2} bp\".format(ns, sing_asize, sing_bsize),\n file=sys.stderr,\n )\n print(\n \"Segment ({0}): {1} - {2} bp\".format(nm, seg_asize, seg_bsize), file=sys.stderr\n )\n print(\n \"Total ({0}): {1} - {2} bp\".format(nt, total_asize, total_bsize),\n file=sys.stderr,\n )\n print(\n \"Average ({0}): {1} bp\".format(nt, (total_asize + total_bsize) / 2),\n file=sys.stderr,\n )", "def test_segment_image(self):\n with Image.open(self.subject) as im:\n image = im.convert(\"RGB\")\n\n segment_generator = image_helper.segment_image(image, 200, 200)\n\n sizes = [\n ((200, 200), next(segment_generator)),\n ((200, 200), next(segment_generator)),\n ((200, 200), next(segment_generator)),\n ((200, 200), next(segment_generator)),\n ((200, 200), next(segment_generator)),\n ((200, 200), next(segment_generator)),\n ((200, 200), next(segment_generator)),\n ((200, 200), next(segment_generator)),\n ((200, 50), next(segment_generator)),\n ((200, 50), next(segment_generator)),\n ((200, 50), next(segment_generator)),\n ((200, 50), next(segment_generator))\n ]\n\n for size, segment in sizes:\n self.assertEqual(size, segment.size)", "def get_segment_index(datadb):\n #match in time!!!!\n if cfg.use_saliency:\n segment_index_tar = util.get_time_for_visual(datadb)\n segment_index_tar_future = OrderedDict()\n for key in segment_index_tar.keys():\n segment_index_tar_future[key] = np.array(segment_index_tar[key])+max_encoder_seq_length\n return segment_index_tar,segment_index_tar_future" ]
[ "0.65298843", "0.6119681", "0.5801361", "0.57355803", "0.573119", "0.56613064", "0.56600964", "0.55376303", "0.5510109", "0.5501271", "0.5472551", "0.5463381", "0.5461672", "0.5427262", "0.5396404", "0.5374486", "0.5335036", "0.5311621", "0.53089184", "0.5294616", "0.52887076", "0.5276814", "0.5267482", "0.52413595", "0.5240319", "0.5236007", "0.52303064", "0.52009064", "0.5187432", "0.5173821", "0.51721454", "0.5171812", "0.5166023", "0.5165577", "0.51496047", "0.5125185", "0.5115615", "0.51090515", "0.5101117", "0.50948745", "0.5091153", "0.5089861", "0.50832915", "0.508102", "0.5079471", "0.5073039", "0.5062897", "0.50615764", "0.50420946", "0.503602", "0.50359845", "0.5026256", "0.50202787", "0.50197077", "0.5014879", "0.501363", "0.50104636", "0.5009859", "0.50045747", "0.5004149", "0.500229", "0.49835414", "0.49814832", "0.49812236", "0.49809813", "0.49707505", "0.49674362", "0.4966179", "0.49505565", "0.49499172", "0.4948722", "0.49463093", "0.4941606", "0.49405628", "0.49357283", "0.49357283", "0.49357143", "0.49350598", "0.49331844", "0.4931874", "0.492529", "0.49211487", "0.49162734", "0.49157453", "0.49146143", "0.49133396", "0.4910705", "0.49064657", "0.49054706", "0.49035412", "0.48993888", "0.48974323", "0.4895786", "0.48942167", "0.48933825", "0.48929855", "0.4892216", "0.48913488", "0.4891124", "0.4886425", "0.48835096" ]
0.0
-1
this class describes the optical characteristics of any material or element
def __init__(self, passing_wavelengths=None, reflectivity=None): self.passing_wavelengths=passing_wavelengths self.reflectivity=reflectivity
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getAllAttribute(self):\n\n self.shape_type = OpenMaya.MPlug(self.thisObj, self.iShapeType).asShort()\n self.draw_type = OpenMaya.MPlug(self.thisObj, self.iDrawingType).asShort()\n self.up_axis = OpenMaya.MPlug(self.thisObj, self.iUpAxis).asShort()\n self.xRay = OpenMaya.MPlug(self.thisObj, self.iXRay).asBool()\n self.billBoard = OpenMaya.MPlug(self.thisObj, self.iBillBoard).asBool()\n self.forceRefresh = OpenMaya.MPlug(self.thisObj, self.iForceRefresh).asBool()\n\n plug_edge_color = OpenMaya.MPlug(self.thisObj, self.iEdgeColor)\n self.edge_color = self.getMPoint(plug_edge_color)\n self.edge_opacity = OpenMaya.MPlug(self.thisObj, self.iEdgeOpacity).asFloat()\n\n plug_polygon_color = OpenMaya.MPlug(self.thisObj, self.iPolygonColor)\n self.polygon_color = self.getMPoint(plug_polygon_color)\n self.polygon_opacity = OpenMaya.MPlug(self.thisObj, self.iPolygonOpacity).asFloat()\n\n self.shape_size = OpenMaya.MPlug(self.thisObj, self.iShapeSize).asFloat()\n self.edge_size = OpenMaya.MPlug(self.thisObj, self.iEdgeSize).asFloat()\n\n plug_offset_position = OpenMaya.MPlug(self.thisObj, self.iPositionOffset)\n self.offset_position = self.getMPoint(plug_offset_position)\n plug_offset_rotation = OpenMaya.MPlug(self.thisObj, self.iRotationOffset)\n self.offset_rotation = self.getMPoint(plug_offset_rotation)", "def elemental_descriptor(A1_ion, A2_ion, B_ion):\n ele_A1 = mg.Element(A1_ion)\n ele_A2 = mg.Element(A2_ion)\n ele_B = mg.Element(B_ion)\n ele_O = mg.Element('O') \n # A/B ion oxidation state \n common_oxidation_states_A1 = ele_A1.common_oxidation_states[0]\n common_oxidation_states_A2 = ele_A2.common_oxidation_states[0]\n common_oxidation_states_A = np.mean(common_oxidation_states_A1 + common_oxidation_states_A2)\n common_oxidation_states_B = ele_B.common_oxidation_states[0]\n # ionic radius property\n ionic_radius_A1 = float(str(ele_A1.average_ionic_radius)[:-4])\n ionic_radius_A2 = float(str(ele_A2.average_ionic_radius)[:-4])\n ionic_radius_A = (ionic_radius_A1+ ionic_radius_A2)/2\n ionic_radius_B = float(str(ele_B.average_ionic_radius)[:-4])\n ionic_radius_O = float(str(ele_O.average_ionic_radius)[:-4])\n # Tolerance factor \n TF = (ionic_radius_A + ionic_radius_O)/(np.sqrt(2)*(ionic_radius_B + ionic_radius_O))\n # Octahedral factor\n OF = ionic_radius_B/ionic_radius_O \n # ionic_radius ratios\n ionic_ration_AO = ionic_radius_A / ionic_radius_O\n ionic_ration_BO = ionic_radius_B / ionic_radius_O\n # averaged electronegativity for A and B atoms\n Pauling_electronegativity_A1 = ele_A1.X\n Pauling_electronegativity_A2 = ele_A2.X\n Pauling_electronegativity_A = (Pauling_electronegativity_A1 + Pauling_electronegativity_A2)/2\n Pauling_electronegativity_B = ele_B.X\n Pauling_electronegativity_O = ele_O.X\n # Difference in the electronegativity for A-O and B-O\n Diff_A_O = Pauling_electronegativity_A - Pauling_electronegativity_O\n Diff_B_O = Pauling_electronegativity_B - Pauling_electronegativity_O\n return [common_oxidation_states_A, common_oxidation_states_B, Pauling_electronegativity_A, Pauling_electronegativity_B, TF, OF, ionic_ration_AO, ionic_ration_BO, Diff_A_O, Diff_B_O]", "def __init__(self, folder):\n print \"folder passed is \", folder\n self.folder = folder\n self.geometry = gf.geometry(self.folder)\n self.elements = gf.dictionary_set()\n self.area = np.zeros(shape = (8))\n self.Vol = (self.geometry.properties['span_number']*(self.geometry.properties['span_width']*\n self.geometry.properties['span_height'] + self.geometry.properties['cover_height']\n *self.geometry.properties['span_width']/2))\n self.F = np.zeros(shape = (8, 8))\n of.view_factor(self.geometry, self.F, self.area, self.Vol)\n tran = [self.geometry.properties['tra_cover_out'],0.0,0.0,\n self.geometry.properties['tra_sidewall_out'],\n self.geometry.properties['tra_cover_in'],\n self.geometry.properties['tra_sidewall_in'],0.0,0.0]\n emi = [self.geometry.properties['emi_cover_out'],1.0,1.0,\n self.geometry.properties['emi_sidewall_out'],\n self.geometry.properties['emi_cover_in'],\n self.geometry.properties['emi_sidewall_in'],1.0,1.0] \n self.tr, self.em, self.re = of.optictal_prop(tran,emi)\n if ((self.tr + self.em).any() > 1.0):\n print \"error in optical properties\"\n self.T = np.zeros(shape = (2,10))\n self.RH = np.zeros(shape = (2,10))\n # 8 inside,9 outside \n self.qcond = np.zeros(shape = (2,8))\n self.qconv = np.zeros(shape = (2,8))\n self.qrad = np.zeros(shape = (2,8))\n self.j = np.zeros(shape = (2,8))\n self.g = np.zeros(shape = (2,8))\n self.alpha = np.zeros(shape = (2,8))\n deltaT = 300\n RH_in = 0.6\n fg.set_initial_conditions(self.geometry.properties['t_air_inside'],\n 278,\n RH_in,self.T,self.RH , self.geometry.properties['t_air'],self.g,\n self.geometry.properties['sky_temp'])\n self.T, self.j, self.g, self.alpha, self.qrad, self.qconv = fg.solver_T(self.T,self.qrad,self.qconv,self.alpha,self.j,self.g,self.em,self.tr,\n self.geometry.properties['wind_speed'],\n self.F,self.geometry.properties['heat_flux'],1,1.0,self.area,\n self.geometry.properties['rho'],self.geometry.properties['cp'],\n self.Vol,self.geometry.properties['degree_window'],deltaT)", "def get_obj_desc():\n\n attributes = {'results' : ['aperiodic_params_', 'peak_params_',\n 'r_squared_', 'error_',\n '_gaussian_params'],\n 'settings' : ['peak_width_limits', 'max_n_peaks',\n 'min_peak_height', 'peak_threshold',\n 'aperiodic_mode'],\n 'data' : ['power_spectrum', 'freq_range', 'freq_res'],\n 'data_info' : ['freq_range', 'freq_res'],\n 'arrays' : ['freqs', 'power_spectrum', 'aperiodic_params_',\n 'peak_params_', '_gaussian_params'],\n 'model_components' : ['_spectrum_flat', '_spectrum_peak_rm',\n '_ap_fit', '_peak_fit']}\n\n return attributes", "def get_visual_attrib_template():\n return {\"conaffinity\": \"0\", \"contype\": \"0\", \"mass\": \"1e-8\", \"group\": \"1\"}", "def mortality(self):\n pass", "def __init__(self, type, objective, illumination, ccd):\n self.type = type # e.g. Olympus iX73\n self.objective = objective\n self.illumination = illumination\n self.ccd = ccd", "def __init__(self,**kwargs):\n self.attr = ['angle','width','height','m','Fg','Fs','Fd','kf','Ff']\n # attributes of the incline in order: angle,width,height, mass,Fg(gravity force),Fs(statical force), Fd (dynamical force),kf(friction coefficient), Ff(friction force)\n self.data = {param: None for param in self.attr}#initialazing data\n self.given_data = set() #set of data given by user\n self.add_data(**kwargs)", "def __init__(self, aperture_model, helptext, fig):\n # location, width, left, right for placing location, width (both)\n # or left/right side\n self.mode = \"\"\n self.aperture_model = aperture_model\n self.aperture_id = None\n self.last_x = None\n self.last_y = None\n self.fig = fig\n self.helptext_area = helptext\n self.helptext_area.text = self.helptext()", "def __init__(self, name=None, materials=None,diameter=None,fluorescence_spectra=None, concentration=None,\n electrophoretic_mobility=None, zeta=None):\n\n self.name = name\n self.materials=materials\n self.concentration=concentration\n self.electrophoretic_mobility=electrophoretic_mobility\n self.zeta=zeta\n self.diameter=diameter\n if diameter:\n k_b = 1.3806e-23\n T=298\n mu=0.001\n self.diffusivity = k_b*T/(6*np.pi*mu*diameter/2)\n\n self.fluorescence_spectra=fluorescence_spectra", "def default(self,MMEL,E):\n self.get_user_settings_from_var(E)\n E_att_sets= MMEL.divide_by_attribute_set()\n print(\"Generic MMOut\")\n for key in E_att_sets:\n #print(\"key:>%s<\" %key)\n #print(self.convert_to_2d(E_att_sets[key]))\n print(self.run_to2d(self.convert_to_2d(E_att_sets[key])))", "def __init__(self):\n super().__init__()\n self.type = 'UniformDiscrete'\n self.dimensionality = 1\n self.distType = 'Discrete'\n self.memory = True", "def __init__(self, diffuse=RGB(1,1,1), Kd=1.0, specular=RGB(1,1,1), Ks=0.0, \n shininess=8.0, Kt=0.0, ior=1.0, name=None):\n \n if name is None:\n name = \"Material %d\" % Material._num_materials\n \n Material._num_materials += 1\n \n self.name = name\n self.diffuse = diffuse\n self.Kd = Kd\n self.specular = specular\n self.Ks = Ks\n self.shininess = shininess\n self.Kt = Kt\n self.ior = ior", "def _init_generate_physical_attributes(self):\n # Prepare these now, for speedier access\n config = self.person.cosmos.config\n year = self.person.cosmos.year\n male = self.person.male\n # Determine age of physical peak, i.e., baseball prime\n self.age_of_physical_peak = config.determine_age_of_physical_peak()\n # Determine handedness\n self.lefty = True if random.random() < config.chance_of_being_left_handed else False\n self.righty = not self.lefty\n self.left_handed = 1.0 if self.lefty else 0.0\n self.right_handed = 1.0 if self.righty else 0.0\n # Determine hustle\n self.hustle = config.determine_hustle()\n # Determine adult height this person will attain, in inches\n if male:\n self.adult_height = normal(\n config.adult_male_height_mean(year=year), config.adult_male_height_sd(year=year)\n )\n else:\n self.adult_height = normal(\n config.adult_female_height_mean(year=year), config.adult_female_height_sd(year=year)\n )\n # Determine this person's BMI TODO BMI INCREASES AS ADULTHOOD PROGRESSES\n if male:\n self.bmi = normal(\n config.young_adult_male_bmi_mean(year=year), config.young_adult_male_bmi_sd(year=year)\n )\n else:\n self.bmi = normal(\n config.young_adult_female_bmi_mean(year=year), config.young_adult_female_bmi_sd(year=year)\n )\n # Determine propensities for coordination, reflexes, agility, jumping...\n self.coordination_propensity = config.determine_coordination_propensity()\n self.reflexes_propensity = config.determine_reflexes_propensity(\n coordination_propensity=self.coordination_propensity\n )\n self.agility_propensity = config.determine_agility_propensity()\n self.jumping_propensity = config.determine_jumping_propensity() # Number of inches added/subtracted to base\n # ...and finally footspeed propensity, which is a bit more convoluted to compute\n primitive_coordination = config.determine_primitive_coordination(bmi=self.bmi) if self.bmi > 24 else 1.0\n adult_coordination = primitive_coordination * self.coordination_propensity\n primitive_footspeed = config.determine_primitive_footspeed(\n coordination=adult_coordination, height=self.adult_height\n )\n self.footspeed_propensity = config.determine_footspeed_propensity(primitive_footspeed=primitive_footspeed)\n # Finally, fit these potentials to the person's current age\n self.develop()", "def add_details(self):\n\n if self.co.algorithm == \"vv\":\n algo = \"Verlocity Verlot\"\n if self.co.algorithm == \"rk4o\":\n algo = \"Runge Kutta Forth Order\"\n if self.co.algorithm == \"herm\":\n algo = \"Hermite Fourth Order\"\n\n self.algorithm_title = self.ax.text(\n 1.01, 0.65, \"Algorithm:\", transform=self.ax.transAxes\n )\n self.algorithm_text = self.ax.text(\n 1.01, 0.58, algo, transform=self.ax.transAxes\n )\n self.timestep_text = self.ax.text(\n 1.01, 0.51, \"dt =\" + str(self.co.tstep), transform=self.ax.transAxes\n )\n self.length_softening_distance = self.ax.text(\n 1.01,\n 0.44,\n r\"$\\epsilon$ = \" + str(self.co.epsilon),\n transform=self.ax.transAxes,\n )", "def __init__(self, vs, material):\n self.vs = vs\n self.material = material", "def __init__(self, name=None, zeta=None, concentration=None, index_of_refraction=None, transparency=None, fluorescence_spectra=None,\n permittivity=None, conductivity=None, thickness=None, youngs_modulus=None, poissons_ratio=None,\n density=None, dielectric_strength=None, reaction_site_density=None, Ka=None, Kb=None, width=None, length=None):\n # identity\n self.name = name\n\n # geometry\n self.length = length\n self.width = width\n self.thickness = thickness\n\n # mechanical\n self.density = density\n self.concentration = concentration # For a solid, this is % by volume.\n self.youngs_modulus = youngs_modulus\n self.poissons_ratio = poissons_ratio\n\n # optical\n self.index_of_refraction = index_of_refraction\n self.fluorescence_spectra = fluorescence_spectra\n self.transparency = transparency\n if self.transparency:\n self.reflectivity = 1 / self.transparency\n\n # electrochemical\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n self.zeta = zeta\n self.dielectric_strength = dielectric_strength\n if reaction_site_density:\n self.reaction_site_density = reaction_site_density*1e18 # (#/nm2) surface density of reaction sites: accepts nm2 and converts to m2 (see Squires)\n self.Ka = Ka # reaction equilibrium constant - upper bound\n self.Kb = Kb # reaction equilibrium constant - lower bound", "def __init__(self):\n super().__init__()\n self.dimensionality = 1\n self.distType = 'Continuous'", "def characteristic(self):\n raise NotImplementedError('characteristic()')", "def __init__(self, jsondict=None, strict=True):\n \n self.allergenicIndicator = None\n \"\"\" Whether the substance is a known or suspected allergen.\n Type `bool`. \"\"\"\n \n self.alternate = None\n \"\"\" Indicates an alternative material of the device.\n Type `bool`. \"\"\"\n \n self.substance = None\n \"\"\" The substance.\n Type `CodeableConcept` (represented as `dict` in JSON). \"\"\"\n \n super(DeviceDefinitionMaterial, self).__init__(jsondict=jsondict, strict=strict)", "def __init__(self, name, type_name,h, D, alpha):\n self.name = name\n self.type = type_name\n self.length = 0\n self.h = h\n self.diameter = D\n self.alpha = alpha", "def __init__(self):\n self.data = None\n self.decisionTree = {}\n self.enClass = 0\n self.nlClass = 0\n self.listAttributes = [\"Contains-het\", \"Contains-de\", \"Contains-een\", \"Contains-en/aan\", \"Contains-ij\", \"wordLength14\",\n \"Contains-a/an\", \"Contains-are/were\", \"Contains-and\", \"Contains-on/to\", \"Contains-the\"]\n self.infoGain = []\n self.entropy = 0", "def __init__(self):\r\n\r\n super(Metallized, self).__init__()\r\n\r\n # Initialize public scalar attributes.\r\n self.spec_sheet = 0\r\n if self.hazard_rate_type < 3: # MIL-HDBK-217\r\n self.reference_temperature = 358.0", "def __init__(self, type, slen=4, alen=1, lexsize=256):\n # vowels {i,u,e,o} in articulatory features (hi, bk, rd) \\in {-1,0,1}\n self.vowels = N.array(((1.0, 0.0, 0.0),\n (1.0, 1.0, 0.0),\n (0.0, 0.0, 0.0),\n (0.0, 1.0, 0.0)))\n self.vf = {(1.0, 0.0, 0.0): \"i\",\n (1.0, 1.0, 0.0): \"u\",\n (0.0, 0.0, 0.0): \"e\",\n (0.0, 1.0, 0.0): \"o\"}\n self.consonants = list(\"bcdfghjklmnpqrstvwxyz\")\n # acoustic:articulatory mapping fxn for vowel prototypes\n # acoustic reps are F1,F2' pairs, articulatory reps are feature-based\n self.vowel_map = {}\n self.vowel_spread = 0\n self.memory = N.empty((lexsize, slen, 2))\n # each agent has its own articulatory variability\n #TODO: maybe this should be inferred by the learners\n # on the basis of their data?\n self.alpha = N.random.normal(15, 2)\n self.beta = N.random.normal(2, 0.25)\n if self.beta < 1.0:\n self.beta = 1.1\n\n if type == \"learner\":\n self.stems = N.empty((lexsize, 4, 3), dtype=float)\n #self.affixes = N.empty((1,4))\n elif type == \"speaker\":\n tmp = [[x, y, 0.0] for x in [0.0, 1.0] for y in [0.0, 1.0]]\n self.stems = N.array([[a, b, c, d] for a in tmp for b in tmp\n for c in tmp for d in tmp])\n else:\n sys.exit(\"Undefined agent type. Aborting.\")\n # vectorized versions of some fxns\n self.vec_perceive = vectorize(self.perceive)\n self.vec_articulate = vectorize(self.articulate)\n self.vec_acoustify = vectorize(self.acoustify)", "def __init__(self, name=None, species=None, concentration=None, conductivity=None, pH=None, density=None, viscosity=None,\n permittivity=None, temperature=None, valence=1.0):\n # identity\n self.name = name\n\n # electro/chemical\n self.species = species\n self.concentration = concentration # (mmol) = (mmol/L) = (mol/m3)\n self.conductivity = conductivity\n if permittivity:\n self.permittivity = permittivity\n if pH:\n self.pH = pH\n self.c_H = 10**-pH * 1e3 # (mmol) = (mmol/L) = (mol/m3); (concentration of Hydrogen ions (H+)\n self.valence = valence\n\n # mechanical\n self.density = density\n self.viscosity = viscosity\n self.temperature = temperature\n self.diffusivity = 2e-9 # (m^2/s) Diffusivity of KCl in DI water [Soni]", "def __init__(self):\n self.label = \"Logistic regression\"\n self.description = \"This tool is a useful complement to Weights-of-Evidence Calculate Response tool as Logistic Regression does not make the assumption of conditional independence of the evidence with regards to the training sites. Using the evidence and assocaited weights tables, this tool creates the outputs the response and standard deviation rasters. The calculations are based on the Gen_Class attribute in the weights table and the type of evidence. Please note that the Logistic Regression tool accepts a maximum of 6,000 unique conditions or it fails. Also note that there is an upper limit of 100,000 unit cells per class in each evidence raster layer. If a class in an evidence raster goes above this, the script contains a function to increase the unit cell size to ensure an upper limit of 100,000. These issues are unable to be fixed due to a hard coded limitation in the Logistic Regression executable sdmlr.exe.\"\n self.canRunInBackground = False\n self.category = \"Weights of Evidence\"", "def __init__(self, strength, constitution, dexterity,\r\n intelligence, wisdom, charisma):\r\n super().__init__(strength, constitution, dexterity,\r\n intelligence, wisdom, charisma)\r\n if self.constitution < 13:\r\n self.constitution = 13\r\n self.hp = (12 + self.conMod)", "def print_help(self):\r\n\t\ttext = \"\\tName: ml_scikit_OPTICS\"\r\n\t\ttext += \"\\n\\t\\tThis machine learning plugin uses scikit-learn's OPTICS algorithm.\\n\"\r\n\t\ttext += \"\\n\\t\\tOptional Parameters:\"\r\n\t\ttext += \"\\n\\t\\t\\tOPTICS_skip_normalization: Do NOT perform normalization (scaling) of data, skip this step.\"\r\n\t\ttext += \"\\n\\t\\t\\OPTICS_eps: Specify eps parameter (default is 1.0).\"\r\n\t\ttext += \"\\n\\t\\t\\OPTICS_min_samples: Specify min_samples parameter (default is 5).\"\r\n#\r\n# OPTICS (with memory complexity n) is an alternative to DBSCAN (with memory complexity n^2)\r\n# which has time complexity n^2 in general with the default max_eps = np.inf. \r\n# We will set max_eps = eps to reduce the run-time.\r\n#\r\n\t\treturn text", "def __init__(self):\n super().__init__()\n self.dimensionality = 1\n self.distType = 'Discrete'\n self.type = 'MarkovCategorical'\n self.steadyStatePb = None # variable containing the steady state probabilities of the Markov Model\n self.transition = None # transition matrix of a continuous time Markov Model", "def description(self):\n return \"Edit <b>apertures</b> interactively\"", "def __init__(self, alias, desc, gender, inroom=False, visible=True,\n convo=False, merchant=False\n ):\n Object.__init__(self, alias, desc, inroom=False, visible=True)\n self.gender = gender\n self.visible = visible\n self.in_room = inroom\n self.has_convo = convo\n self.is_merchant = merchant\n self.inv = {}\n self.equipment = {\n 'head': None,\n 'acc': None,\n 'body': None,\n 'hands': None,\n 'feet': None,\n 'weapon': None,\n 'ring': None,\n 'neck': None\n }\n self.skills = {}\n \"\"\"Create baseline level for skills.\"\"\"\n for key in SKILLS_MASTER_DICT:\n self.skills[key] = 15\n self.skills_counter = {}\n \"\"\"Create counter for succesful skill uses.\"\"\"\n for key in SKILLS_MASTER_DICT:\n self.skills_counter[key] = 0", "def layer_properties(freq_vec, material):\n # name of the material\n material_name = material[0]\n # thickness of the material (reshape with freq shape, in a tuple, to\n # allow the sum with the tuple of material properties)\n thickness = (np.array( [material[1]]*len(freq_vec) ), )\n # check if we have to pass extra arguments for non homogenous material\n if material_name == 'meta':\n param = material[2:]\n else:\n param = ()\n # read/compute material properties\n prop = mat.properties(material_name, freq_vec, *param)\n\n return thickness + prop", "def __init__(self, **kwargs):\n\n polymer_type = \"PE\"\n helice = Helice()\n num_monomers = 30\n tacticity = \"\"\n chiriality = \"\"\n head_tail_defect_ratio = 0\n configs = 30\n infinite = False\n\n for key in kwargs:\n if key == \"polymer_type\":\n polymer_type = kwargs[\"polymer_type\"]\n elif key == \"helice\":\n helice = kwargs[\"helice\"]\n elif key == \"num_monomers\":\n num_monomers = kwargs[\"num_monomers\"]\n if is_integer_num(num_monomers):\n if num_monomers < 1:\n raise ValueError(\n \"Number of monomers should be equal or larger than 1\"\n )\n else:\n raise ValueError(\"Number of monomers should be an integer\")\n elif key == \"tacticity\":\n tacticity = kwargs[\"tacticity\"]\n elif key == \"chiriality\":\n chiriality = kwargs[\"chiriality\"]\n elif key == \"head_tail_defect_ratio\":\n head_tail_defect_ratio = kwargs[\"head_tail_defect_ratio\"]\n elif key == \"configs\":\n configs = kwargs[\"configs\"]\n elif key == \"infinite\":\n infinite = kwargs[\"infinite\"]\n else:\n raise KeyError(\n \"Unknown input %s for Chain class\\n Please see help for more information\"\n % key\n )\n\n if polymer_type not in polymer_types:\n raise ValueError(\n polymer_type\n + \" do not exist in our library, please consider using custom feature\"\n )\n self.polymer_type = polymer_types[polymer_type]\n\n if self.polymer_type.helicity:\n self.custom = 0\n else:\n self.custom = 1\n\n if self.custom:\n print(\"Warning: Custom type, only read helice motifs and turns info\")\n self.helice = helice\n\n if not 0 <= (head_tail_defect_ratio) <= 1:\n raise ValueError(\n \"Defect ratio of head to head and tail to tail connections is\",\n head_tail_defect_ratio,\n \"and should be in the range of [0,1]\",\n )\n self.head_tail_defect_ratio = head_tail_defect_ratio\n\n self.unit_num_monomers = 1\n if \"num_monomers\" not in kwargs:\n if infinite:\n num_monomers = 2\n else:\n num_monomers = 1\n\n self.num_monomers = num_monomers\n\n self.tacticity = tacticity\n if self.tacticity:\n if self.tacticity == \"N/A\":\n self.tacticity = \"\"\n else:\n print(\"Warning: Custom type does not have tacticity\")\n self.tacticity = \"\"\n\n self.chiriality = chiriality\n if self.chiriality:\n if self.chiriality == \"N/A\":\n self.chiriality = \"\"\n else:\n print(\"Warning: Custom type does not have chiriality\")\n self.chiriality = \"\"\n\n self.infinite = infinite\n\n else:\n monomer_backbone_atoms = len(self.polymer_type.backbone_atoms)\n\n if helice.atoms % monomer_backbone_atoms:\n raise Exception(\n \"Number of backbone atoms in a motif must be multiple of number of monomer backbone atoms %d\\n\"\n % monomer_backbone_atoms\n )\n if tacticity == \"syndiotactic\":\n multiple = int(monomer_backbone_atoms * 2 / helice.atoms)\n if (multiple * helice.atoms) % (monomer_backbone_atoms * 2):\n raise Exception(\n \"Number of backbone atoms in a motif for syndiotactic configuration must be multiple of twice of \\\n the number of monomer backbone atoms %d\\n\"\n % monomer_backbone_atoms\n * 2\n )\n elif multiple != 1:\n print(\n \"Number of backbone atoms in a motif for syndiotactic configuration should be multiple of twice \\\n of the number of monomer backbone atoms %d\\n\"\n % (monomer_backbone_atoms * 2)\n )\n print(\n \"Trying Helice_%d_%d_%d...\"\n % (\n helice.atoms * multiple,\n helice.motifs,\n helice.turns * multiple,\n )\n )\n helice = Helice(\n helice.atoms * multiple, helice.motifs, helice.turns * multiple\n )\n # else:\n # if monomer_backbone_atoms != helice.atoms:\n # raise ValueError(\"Number of backbone atoms in a motif must be %d\" % helice.atoms)\n helice_backbone_atoms = helice.atoms * helice.motifs\n self.helice = helice\n\n if not 0 <= (head_tail_defect_ratio) <= 1:\n raise ValueError(\n \"Defect ratio of head to head and tail to tail connections is\",\n head_tail_defect_ratio,\n \"and should be in the range of [0,1]\",\n )\n self.head_tail_defect_ratio = head_tail_defect_ratio\n\n self.unit_num_monomers = int(helice_backbone_atoms / monomer_backbone_atoms)\n if \"num_monomers\" not in kwargs:\n if infinite:\n if tacticity == \"atactic\" or head_tail_defect_ratio:\n num_monomers = 10 * self.unit_num_monomers\n elif helice_backbone_atoms > 2:\n num_monomers = self.unit_num_monomers\n else:\n num_monomers = 2\n\n if num_monomers < self.unit_num_monomers:\n raise ValueError(\n \"Number of monomers should be equal or larger than %d in order to generate Helice_%s chain.\\nCurrent \\\n number of monomers is %d\"\n % (self.unit_num_monomers, helice, num_monomers)\n )\n\n if infinite:\n if num_monomers % self.unit_num_monomers:\n raise ValueError(\n \"Number of monomers should be multiple of %d in order to generate infinite periodic Helice_%s \\\n chain.\\nCurrent number of monomers is %d\"\n % (self.unit_num_monomers, helice, num_monomers)\n )\n elif num_monomers * monomer_backbone_atoms < 3:\n raise ValueError(\n \"Number of backbone atoms should be more than 2 in order to create infinite periodic \\\n chain.\\nCurrent number of backbone atoms along the periodic chain is %d\\nPlease increate \\\n number of monomers.\"\n % (num_monomers * monomer_backbone_atoms)\n )\n self.num_monomers = num_monomers + 2 if infinite else num_monomers\n\n self.tacticity = tacticity\n if self.tacticity:\n if self.tacticity == \"N/A\":\n self.tacticity = \"\"\n elif self.tacticity not in [\"isotactic\", \"atactic\", \"syndiotactic\"]:\n raise TypeError(\n \"Unknown tacticity, please specify one of the following: isotactic, atactic and syndiotactic\"\n )\n elif not self.polymer_type.side_atom:\n raise ValueError(\"Please specify side_atom\")\n\n self.chiriality = chiriality\n if str(self.helice) in [\"2_1_1\", \"4_1_2\"]:\n self.torsion_seq = [180, 180, 180, 180]\n if self.chiriality:\n self.chiriality = \"\"\n print(\"Zig-zag conformation does not have chiriality\")\n elif str(self.helice) in [\"2_2_1\", \"4_2_2\"]:\n if self.chiriality == \"left\":\n self.torsion_seq = [300, 300, 300, 300]\n elif self.chiriality == \"right\":\n self.torsion_seq = [60, 60, 60, 60]\n else:\n raise ValueError(\"Please specify chiriality: left or right\")\n elif str(self.helice) in [\"2_3_1\", \"4_3_2\"]:\n if self.chiriality == \"left\":\n self.torsion_seq = [180, 300, 180, 300]\n elif self.chiriality == \"right\":\n self.torsion_seq = [60, 180, 60, 180]\n else:\n raise ValueError(\"Please specify chiriality: left or right\")\n elif str(self.helice) == \"4_1_1\":\n self.torsion_seq = [60, 180, 300, 180]\n if self.chiriality:\n self.chiriality = \"\"\n print(\"Helice_4_1_1 conformation does not have chiriality\")\n elif str(self.helice) == \"4_2_1\":\n if self.chiriality == \"left\":\n self.torsion_seq = [180, 180, 300, 300]\n elif self.chiriality == \"right\":\n self.torsion_seq = [60, 60, 180, 180]\n else:\n raise ValueError(\"Please specify chiriality: left or right\")\n elif str(self.helice) == \"4_3_1\":\n if self.chiriality == \"left\":\n if self.helice.sub_type:\n self.torsion_seq = [180, 300, 300, 300]\n else:\n self.torsion_seq = [180, 180, 180, 300]\n elif self.chiriality == \"right\":\n if self.helice.sub_type:\n self.torsion_seq = [60, 60, 60, 180]\n else:\n self.torsion_seq = [60, 180, 180, 180]\n else:\n raise ValueError(\"Please specify chiriality: left or right\")\n else:\n raise Exception(\"Helice_%s is currently not supported\" % self.helice)\n\n self.configs = configs\n self.infinite = infinite\n # self.pattern = 0\n self.monomers = []\n self.weights = {}", "def __init__(self):\r\n\r\n super(Bypass, self).__init__()\r\n\r\n # Initialize public scalar attributes.\r\n self.specification = 0 # MIL-C-25 or MIL-C-12889.\r\n self.spec_sheet = 0 #\r\n if self.hazard_rate_type < 3: # MIL-HDBK-217\r\n self.reference_temperature = 358.0\r\n\r\n# def set_attributes(self, values):\r\n \"\"\"\r\n Method to set the Capacitor data model attributes.\r\n\r\n :param tuple values: tuple of values to assign to the instance\r\n attributes.\r\n :return: (_code, _msg); the error code and error message.\r\n :rtype: tuple\r\n \"\"\"", "def show_materials(self):\n print('\\nThe materials with known dielectric properties are:\\n')\n pprint.pprint(mats.Electrical.props)\n# pprint.pprint(mats.Electrical.DIELECTRIC)\n print('\\nThe materials with known loss tangents are:\\n')\n pprint.pprint(mats.Electrical.props)\n# pprint.pprint(mats.Electrical.LOSS_TAN)\n return", "def __init__(self):\n am.AbstractMeasurement.__init__(self)\n self.face_mesh = mp_face_mesh.FaceMesh(\n min_detection_confidence=0.5, min_tracking_confidence=0.5)\n self.drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1)", "def __init__(self):\n self.name = \"Osyczka\"\n objectives = [ob_os_1, ob_os_2]\n constraints = [con_os_1, con_os_2, con_os_3, con_os_4, con_os_5, con_os_6]\n decisions = [Decision(0, 10), Decision(0, 10), Decision(1, 5), Decision(0, 6), Decision(1, 5), Decision(0, 10)]\n Model.__init__(self, objectives, constraints, decisions)", "def advanced_properties(self):\n return self._advanced_properties", "def hk_armor(self):\n self.name = \"Holy Knight's Armor\"\n self.rarity = \"Common\"\n self.pdef_value = 40\n self.mdef_value = 10\n self.increase_crit = 0\n self.desc = \"Armor of the Holy Guard, you feel the light flowing.\"", "def attributes(self):", "def show_info(self):\n # attr[0] attr[1]\n attrs = [(self.TYP.value, 'nam'),\n ('Skill', 'skl')]\n # voeg ook alle stats en skills in deze lijst toe.\n for stat in Minimals:\n attrs.append((stat.value, stat.name))\n attrs.append(('Spell Battery', 'cur_bat'))\n for stat in StatType:\n attrs.append((stat.value, stat.name))\n for skill in SkillType:\n attrs.append((skill.value, skill.name))\n\n # nu alle mogelijkheden geladen zijn, ga dan aan de slag met diegene die van toepassing zijn\n attr_list = []\n\n import enum\n for attr in attrs:\n value_of_attr = self.get_value_of(attr[1])\n # uitzondering, 'wht' altijd gewoon weergeven\n if attr[0] == StatType.wht.value:\n # deze uitzondering geldt niet voor weapons en shields.\n if not isinstance(self.get_value_of('skl'), enum.Enum): # niet wanneer 'skl' een waarde heeft\n attr_list.append((attr[0], str(value_of_attr)))\n elif value_of_attr:\n if isinstance(value_of_attr, enum.Enum): # uitzondering alleen voor 'skl'\n value_of_attr = value_of_attr.value\n elif attr[0] == StatType.hit.value: # uitzondering alleen voor 'hit'\n value_of_attr = str(value_of_attr)+\"%\"\n attr_list.append((attr[0], str(value_of_attr)))\n\n return attr_list", "def detail(self):\n return self.uniform(\"detail\",\n self.img_scale * .05,\n self.img_scale * .2)", "def __init__(self, make, model, year):\n self.make = make\n self.model = model\n self.year = year\n self.odometer_reading = 0 #Setting a default value for an Attribute", "def __init__(self, model, zc, rw, influence = None, head_change = -1, strength = 1,\r\n drawdown_specified = False, variables = [], priors = []):\r\n \r\n import numpy as np\r\n \r\n self.model = model\r\n model.elementlist.append(self)\r\n \r\n self.variables = variables\r\n self.priors = priors\r\n \r\n if influence is None:\r\n # If no influence radius is specified, set it to twice the model radius\r\n self.influence = 2*self.model.domain_radius\r\n else:\r\n # Otherwise, set it to the user-defined value\r\n self.influence = influence\r\n \r\n # The well's strength defines its effect on the flow field; this is\r\n # overwritten later on to achieve the desired head_change which depends\r\n # on the aquifer parameters\r\n self.strength = strength\r\n \r\n # This is the well's position in terms of complex coordinates\r\n self.zc = zc\r\n if not np.isscalar(self.zc):\r\n self.zc = self.zc[0] + 1j*self.zc[1]\r\n \r\n # The well radius is specified in canonical units\r\n self.rw = rw\r\n \r\n # Check if drawdown specified\r\n self.drawdown_specified = drawdown_specified\r\n \r\n if self.drawdown_specified:\r\n \r\n # Get the head change variable\r\n self.head_change = head_change\r\n \r\n # Adjust the strength so that the desired drawdown is achieved\r\n self.set_potential_target()\r\n \r\n # Check if the prior matches the number of parameters\r\n if len(self.priors) != len(self.variables):\r\n raise Exception('Number of priors must match number of unknown variables. Number of priors: '+str(self.priors)+' / Number of unknown variables: '+str(len(self.variables)))\r\n \r\n # Go through all elements\r\n if len(self.variables) > 0:\r\n # There are some model variables specified\r\n for idx,var in enumerate(self.variables):\r\n self.model.num_params += 1\r\n exec(\"self.model.params += [self.%s]\" % var)\r\n self.model.priors += [self.priors[idx]]\r\n self.model.variables += [var]\r\n if 'name' in list(self.priors[idx].keys()):\r\n self.model.param_names += [self.priors[idx]['name']] \r\n else: \r\n self.model.param_names += ['unknown']", "def artAttrCtx(*args, accopacity: bool=False, activeListChangedProc: Union[AnyStr, bool]=\"\",\n afterStrokeCmd: Union[AnyStr, bool]=\"\", alphaclamp: Union[AnyStr, bool]=\"none\",\n alphaclamplower: Union[float, bool]=0.0, alphaclampupper: Union[float, bool]=1.0,\n attrSelected: Union[AnyStr, bool]=\"\", beforeStrokeCmd: Union[AnyStr, bool]=\"\",\n brushalignment: bool=True, brushfeedback: bool=True, clamp: Union[AnyStr,\n bool]=\"none\", clamplower: Union[float, bool]=0.0, clampupper: Union[float,\n bool]=1.0, clear: bool=True, colorAlphaValue: Union[float, bool]=0.0,\n colorRGBAValue: Union[List[float, float, float, float], bool]=None,\n colorRGBValue: Union[List[float, float, float], bool]=None, colorRamp:\n Union[AnyStr, bool]=\"\", colorfeedback: bool=False, colorfeedbackOverride:\n bool=False, colorrangelower: Union[float, bool]=0.0, colorrangeupper:\n Union[float, bool]=1.0, dataTypeIndex: Union[int, bool]=0, disablelighting:\n bool=False, dragSlider: AnyStr=\"\", duringStrokeCmd: Union[AnyStr, bool]=\"\",\n dynclonemode: bool=True, exists: bool=True, expandfilename: bool=True,\n exportaspectratio: Union[float, bool]=0.0, exportfilemode: Union[AnyStr,\n bool]=\"luminance/rgb\", exportfilesave: AnyStr=\"\", exportfilesizex: Union[int,\n bool]=0, exportfilesizey: Union[int, bool]=0, exportfiletype: Union[AnyStr,\n bool]=\"\", filterNodes: bool=True, history: bool=True, image1: Union[AnyStr,\n bool]=\"\", image2: Union[AnyStr, bool]=\"\", image3: Union[AnyStr, bool]=\"\",\n importfileload: AnyStr=\"\", importfilemode: Union[AnyStr, bool]=\"alpha\",\n importreassign: bool=False, interactiveUpdate: bool=True, lastRecorderCmd:\n Union[AnyStr, bool]=\"\", lastStampName: Union[AnyStr, bool]=\"\", lowerradius:\n Union[float, bool]=0.0, makeStroke: Union[int, List[int], bool]=0, mappressure:\n Union[AnyStr, bool]=\"none\", maxvalue: Union[float, bool]=1.0, minvalue:\n Union[float, bool]=0.0, name: AnyStr=\"\", objattrArray: Union[AnyStr, bool]=\"\",\n opacity: Union[float, bool]=1.0, outline: bool=True, outwhilepaint: bool=False,\n paintNodeArray: Union[AnyStr, bool]=\"\", paintattrselected: AnyStr=\"\", paintmode:\n Union[AnyStr, bool]=\"screen\", paintoperationtype: Union[AnyStr, bool]=\"Paint\",\n pickColor: bool=True, pickValue: bool=True, playbackCursor: Union[List[float,\n float], List[List[float, float]], bool]=None, playbackPressure: Union[float,\n List[float], bool]=0.0, preserveclonesource: bool=True, profileShapeFile:\n Union[AnyStr, bool]=\"\", projective: bool=False, radius: Union[float, bool]=1.0,\n rampMaxColor: Union[List[float, float, float], bool]=None, rampMinColor:\n Union[List[float, float, float], bool]=None, record: bool=True, reflection:\n bool=False, reflectionaboutorigin: bool=True, reflectionaxis: Union[AnyStr,\n bool]=\"x\", screenRadius: Union[float, bool]=0.0, selectclonesource: bool=True,\n selectedattroper: Union[AnyStr, bool]=\"absolute\", showactive: bool=True,\n stampDepth: Union[float, bool]=0.0, stampProfile: Union[AnyStr, bool]=\"\",\n stampSpacing: Union[float, bool]=1.0, strokesmooth: Union[AnyStr, bool]=\"\",\n surfaceConformedBrushVertices: bool=True, tablet: bool=True, tangentOutline:\n bool=True, toolOffProc: Union[AnyStr, bool]=\"\", toolOnProc: Union[AnyStr,\n bool]=\"\", useColorRamp: bool=True, useMaxMinColor: bool=True, usepressure:\n bool=False, value: Union[float, bool]=0.0, whichTool: Union[AnyStr, bool]=\"\",\n worldRadius: Union[float, bool]=0.0, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def get_description():\n desc = {\"description\": __doc__, \"data\": True}\n desc[\"arguments\"] = [\n dict(\n type=\"station\",\n name=\"station\",\n default=\"IATDSM\",\n label=\"Select Station:\",\n network=\"IACLIMATE\",\n ),\n dict(\n type=\"select\",\n name=\"var\",\n default=\"spi\",\n options=PDICT,\n label=\"Select which metric to plot:\",\n ),\n dict(\n type=\"select\",\n name=\"c\",\n default=\"ncei91\",\n options=PDICT2,\n label=\"Which climatology to use for averages:\",\n ),\n dict(\n type=\"int\",\n name=\"days\",\n default=90,\n label=\"Over how many trailing days to compute the metric?\",\n ),\n ]\n return desc", "def __init__(self, ai, color, bling):\n Base.__init__(self,ai, color, bling)\n self.name = \"Max\"\n self.desc = \"Strong tanks, weak artillery.\"\n self.level1 = 50\n self.level2 = 100\n self.WeaponBonus = 1.2", "def getShortDesc():\n\treturn \"Animator mode\"", "def __init__(self, person):\n self.person = person # The person to which this body belongs\n # Prepare all the attributes that we'll be setting momentarily; these will\n # be instantiated as Feature attributes (that extend Float)\n self.age_of_physical_peak = None\n self.lefty = False # Left-handed\n self.righty = False # Right-handed\n self.left_handed = None # 1.0 if lefty, else 0.0 (represented as a float so that we can cast to Feature)\n self.right_handed = None\n self.hustle = None\n self.height = None\n self.weight = None\n self.adult_height = None\n self.bmi = None # Body mass index\n self.coordination = None\n self.coordination_propensity = None\n self.reflexes_propensity = None\n self.agility_propensity = None\n self.jumping_propensity = None\n self.footspeed_propensity = None\n self.reflexes = None\n self.agility = None\n self.vertical = None # Maximum jumping height in inches\n self.vertical_reach = None # Max height (in feet) a person can reach while standing with arm stretched upward\n self.full_speed_seconds_per_foot = None\n self.full_speed_feet_per_second = None\n self.speed_home_to_first = None\n # If you have parents, inherit physical attributes\n if False: # TODO delete after inheritance implemented\n # if self.person.mother:\n self._init_inherit_physical_attributes()\n # Otherwise, generate them from scratch\n else:\n self._init_generate_physical_attributes()", "def __init__(self, color, metal, finish, weight):\n self.color = color\n self.metal = metal\n self.finish = finish\n self.weight = weight\n # All params passed", "def show_properties(self):\n print(\"L:\", self.L)\n print(\"d:\", self.d)\n print(\"D:\", self.D)\n print(\"dtype:\", self.dtype)\n print(\"R[-1]:\", self.R[-1])\n print(\"F[-1]:\", self.F[-1])\n print(\"Cummulated norm C:\", self.normC)", "def __init__(self):\n self.state_dim = 12\n self.measurement_dim = 6", "def __defaults__(self): \n self.tag = 'Constant-property atmosphere'\n self.composition = Data()\n self.composition.gas = 1.0", "def armor(self):\n capacity = self._getAttribute(Attribute.armorCapacity)\n em = self._getAttribute(Attribute.armorEM)\n explosive = self._getAttribute(Attribute.armorExplosive)\n kinetic = self._getAttribute(Attribute.armorKinetic)\n thermal = self._getAttribute(Attribute.armorThermal)\n\n em = 1.0 - em\n explosive = 1.0 - explosive\n kinetic = 1.0 - kinetic\n thermal = 1.0 - thermal\n\n return {\n \"capacity\": capacity,\n \"resists\": {\n \"em\": em,\n \"explosive\": explosive,\n \"kinetic\": kinetic,\n \"thermal\": thermal\n }\n }", "def __init__(self, traits={}, person=None):\n PowerDict.__init__(self, traits)\n Capability.__init__(self, person)", "def getDesc():\n\treturn \"Create an animation showing the dataset with the Animator\"", "def advanced_features(self):\n return self._advanced_features", "def __init__(self, jsondict=None, strict=True):\n \n self.characteristicCodeableConcept = None\n \"\"\" Specific characteristic that is relevant to the administration\n guideline.\n Type `CodeableConcept` (represented as `dict` in JSON). \"\"\"\n \n self.characteristicQuantity = None\n \"\"\" Specific characteristic that is relevant to the administration\n guideline.\n Type `Quantity` (represented as `dict` in JSON). \"\"\"\n \n self.value = None\n \"\"\" The specific characteristic.\n List of `str` items. \"\"\"\n \n super(MedicationKnowledgeAdministrationGuidelinesPatientCharacteristics, self).__init__(jsondict=jsondict, strict=strict)", "def __init__(self, elt):\n self.name = elt.get(\"name\")\n self.expansion = elt.find(\"expansion\").text\n self.count = int(elt.find(\"count\").text)\n# print(_(\"We put \") + str(self.count)\\\n# + _(\" card\") + (\"s\" if self.count > 1 else \"\") \\\n# +\" '\" + self.name + _(\"' on the bag.\"))\n \n self.location = None\n self.abilities = []\n for _spec in elt.findall(\"specifications\"):\n self.movemement = _spec.get(\"movemement\")\n self.symbole = SYMBOLS.get(_spec.get(\"dimension\"), \"-\")\n self.color = MOVEMENT.get(self.movemement, BOLD_GREY)\n \n self.description = None\n if elt.find(\"description\") is not None:\n self.description = str(elt.find(\"description\").text)\n for _ability in elt.findall(\"abilities\"):\n name = _ability.get(\"name\")\n if _ability.get(\"value\"):\n name += \" [\" + str(_ability.get(\"value\")) + \"]\"\n self.abilities.append(name)\n\n try:\n self.awareness = int(elt.find(\"awareness\").text)\n except:\n self.awareness = \"-\"\n self.horror_rating, self.horror_damage = \"-\", \"-\"\n self.combat_rating, self.combat_damage = \"-\", \"-\"\n self.toughness = \"-\"\n for _stat in elt.findall(\"combat_stat\"):\n self.toughness = int(_stat.find(\"toughness\").text)\n if _stat.find(\"horror\") is not None:\n if _stat.find(\"horror\").get(\"rating\"):\n self.horror_rating = int(_stat.find(\"horror\").get(\"rating\"))\n if _stat.find(\"horror\").get(\"damage\"):\n self.horror_damage = int(_stat.find(\"horror\").get(\"damage\"))\n \n if _stat.find(\"combat\") is not None:\n if _stat.find(\"combat\").get(\"rating\"):\n self.combat_rating = int(_stat.find(\"combat\").get(\"rating\"))\n if _stat.find(\"combat\").get(\"damage\"):\n self.combat_damage = int(_stat.find(\"combat\").get(\"damage\"))", "def define_material(self):\n\n # Check which class should be called.\n const_eqn = self.config['material']['const_eqn']\n if isclass(const_eqn):\n mat_class = self.config['material']['const_eqn']\n elif const_eqn == 'lin_elastic':\n mat_class = materials.solid_materials.LinearIsoMaterial\n elif const_eqn == 'neo_hookean':\n mat_class = materials.solid_materials.NeoHookeMaterial\n elif const_eqn == 'demiray':\n mat_class = materials.solid_materials.DemirayMaterial\n elif const_eqn == 'fung':\n mat_class = materials.solid_materials.FungMaterial\n elif const_eqn == 'guccione':\n mat_class = materials.solid_materials.GuccioneMaterial\n elif const_eqn == 'holzapfel_ogden':\n mat_class = materials.solid_materials.HolzapfelOgdenMaterial\n elif const_eqn == 'newtonian' or const_eqn == 'stokes':\n mat_class = materials.fluids.NewtonianFluid\n else:\n raise NotImplementedError(\"Shouldn't be in here...\")\n\n # Create an instance of the material class and store\n # as member data.\n try:\n inverse = self.config['formulation']['inverse']\n except KeyError:\n inverse = False\n self._material = mat_class(inverse=inverse,\n **self.config['material'])\n\n return None", "def description(self):\n desc = self.title\n ops = []\n for attribute in self.attributes.all():\n value = attribute.value\n if isinstance(value, list):\n ops.append(\n \"%s = '%s'\" % (attribute.type, (\", \".join([str(v) for v in value])))\n )\n else:\n ops.append(\"%s = '%s'\" % (attribute.type, value))\n if ops:\n desc = \"%s (%s)\" % (desc, \", \".join(ops))\n return desc", "def create_descr(self, attr_name):", "def __init__(self):\n self.size_joint_feature = None", "def test():\n\n # get the measure trait class\n from p2.traits.Measure import Measure as measure\n\n\n # a client\n class Component:\n \"\"\"\n Simple class with a measure\n \"\"\"\n\n # declare a measure\n attr = measure()\n\n\n # get the attribute; careful not to trigger the descriptor behavior\n attr = Component.__dict__[\"attr\"]\n # verify it's a measure\n assert isinstance(attr, measure)\n # verify it has the right category name\n assert attr.category == \"measure\"\n # and that the trait predicates have the right values\n assert attr.isBehavior == False\n assert attr.isDerivation == False\n assert attr.isMeasure == True\n assert attr.isProperty == False\n assert attr.isFacility == False\n # all done\n return attr", "def __init__(self, eta, mu, n_feature):\r\n self.eta = 0.09\r\n self.weight = [0.0] * n_feature\r\n self.temp = [0.0] * n_feature\r\n self.mu = 0.0\r\n self.size= n_feature", "def __init__(self):\n self._tyrannosaurus = []\n self._triceratops = []", "def __init__(self):\n self.rho=[]\n self.te=[]\n self.ti=[]\n self.ne=[]\n self.ni=[]\n self.ni1=[]\n self.ni2=[]\n self.ni3=[]\n self.vtor=[]\n self.zeff=[]\n\n self.nion=1\n self.Z=[]\n self.A=[]\n self.coll_mode=[]", "def __init__(self):\n self.label = \"Grand WOFE\"\n self.description = \"From list of Evidence layers generate weights tables and output rasters from Calculate Respons and Logistic Regression.\"\n self.canRunInBackground = False\n self.category = \"Weights of Evidence\"", "def __init__(self):\n super().__init__()\n self.mapping = {}\n self.values = set()\n self.type = 'Categorical'\n self.dimensionality = 1\n self.distType = 'Discrete'\n self.isFloat = False", "def init_meta(self):\n self.meta = {}\n # Required (core)\n self.meta['ra'] = dict(ext=0, card='RA', required_ftypes=['science', 'standard'])\n self.meta['dec'] = dict(ext=0, card='DEC', required_ftypes=['science', 'standard'])\n self.meta['target'] = dict(ext=0, card='OBJECT')\n self.meta['decker'] = dict(ext=0, card='DECKNAME')\n self.meta['binning'] = dict(card=None, compound=True)\n self.meta['mjd'] = dict(ext=0, card='MJD')\n # This may depend on the old/new detector\n self.meta['exptime'] = dict(ext=0, card='ELAPTIME')\n self.meta['airmass'] = dict(ext=0, card='AIRMASS')\n #self.meta['dispname'] = dict(ext=0, card='ECHNAME')\n # Extras for config and frametyping\n self.meta['hatch'] = dict(ext=0, card='HATOPEN')\n self.meta['dispname'] = dict(ext=0, card='XDISPERS')\n self.meta['filter1'] = dict(ext=0, card='FIL1NAME')\n self.meta['echangle'] = dict(ext=0, card='ECHANGL', rtol=1e-3)\n self.meta['xdangle'] = dict(ext=0, card='XDANGL', rtol=1e-3)\n# self.meta['idname'] = dict(ext=0, card='IMAGETYP')\n # NOTE: This is the native keyword. IMAGETYP is from KOA.\n self.meta['idname'] = dict(ext=0, card='OBSTYPE')\n self.meta['frameno'] = dict(ext=0, card='FRAMENO')\n self.meta['instrument'] = dict(ext=0, card='INSTRUME')", "def __init__(self):\n self.topology = None\n self.learningRate = None\n self.momentum = None\n self.name = None\n self.size = None\n #self._hiddenActiv_fun_key = None\n #self._outActiv_fun_key = None\n #self.output_activation = None\n #self.hidden_activation = None", "def get_description():\n desc = {\"description\": __doc__}\n desc[\"data\"] = True\n desc[\"arguments\"] = [\n dict(\n type=\"select\",\n options=PDICT3,\n default=\"temps\",\n name=\"v\",\n label=\"Which Variable(s) to Plot\",\n ),\n dict(\n type=\"station\",\n name=\"station1\",\n default=\"IATDSM\",\n label=\"Select First Station:\",\n network=\"IACLIMATE\",\n ),\n dict(\n type=\"select\",\n name=\"c1\",\n label=\"Climatology Source for First Station:\",\n default=\"1951\",\n options=PDICT,\n ),\n dict(\n type=\"station\",\n name=\"station2\",\n default=\"IATDSM\",\n optional=True,\n label=\"Select Second Station (Optional):\",\n network=\"IACLIMATE\",\n ),\n dict(\n type=\"select\",\n name=\"c2\",\n label=\"Climatology Source for Second Station:\",\n default=\"1951\",\n options=PDICT,\n ),\n dict(\n type=\"select\",\n name=\"s\",\n label=\"For difference plot, should smoother be applied:\",\n default=\"0\",\n options=PDICT2,\n ),\n dict(\n type=\"year\",\n min=1880,\n name=\"sy1\",\n default=1991,\n label=\"Inclusive Start Year for First Station Period of Years:\",\n ),\n dict(\n type=\"year\",\n min=1880,\n name=\"ey1\",\n default=2020,\n label=\"Inclusive End Year for First Station Period of Years:\",\n ),\n dict(\n type=\"year\",\n min=1880,\n name=\"sy2\",\n default=1981,\n label=\"Inclusive Start Year for Second Station Period of Years:\",\n ),\n dict(\n type=\"year\",\n min=1880,\n name=\"ey2\",\n default=2010,\n label=\"Inclusive End Year for Second Station Period of Years:\",\n ),\n ]\n return desc", "def __init__(self, scene_type=\"macbeth\", wave=None, name=\"Scene\",\n il=None, fov=1.0, dist=1.0, **kwargs):\n # check inputs\n if il is None:\n il = Illuminant()\n assert isinstance(il, Illuminant), \"il should be Illuminant instance\"\n\n # interpolate for wavelength samples if specified\n if wave is not None:\n il.wave = wave\n\n # Initialize instance attribute to default values\n self.name = name # name of the object\n self.photons = np.array([]) # scene photons\n self.fov = fov # horizontal field of view of the scene in degree\n self.dist = dist # viewing distance in meters\n self.illuminant = il # illuminant\n\n # switch by scene_type\n scene_type = str(scene_type).lower().replace(\" \", \"\")\n if scene_type == \"macbeth\": # macbeth color checker\n if \"patch_size\" in kwargs:\n patch_size = kwargs[\"patch_size\"]\n else:\n patch_size = 16\n if np.isscalar(patch_size):\n patch_size = [patch_size, patch_size]\n self.name = \"Macbeth Color Checker\"\n # load surface reflectance\n surface = np.reshape(spectra_read(\"macbethChart.mat\", self.wave).T, (4, 6, self.wave.size), order=\"F\")\n # compute photons\n self.photons = np.zeros((4*patch_size[0], 6*patch_size[1], self.wave.size))\n for ii in range(self.wave.size):\n self.photons[:, :, ii] = np.kron(surface[:, :, ii], np.ones((patch_size[0], patch_size[1])))\n # multiply by illuminant\n self.photons *= il.photons\n\n elif scene_type == \"noise\": # white noise pattern\n # get scene size\n if \"scene_size\" in kwargs:\n scene_size = kwargs[\"scene_sz\"]\n else:\n scene_size = np.array([128, 128])\n self.name = \"White noise\"\n # generate noise pattern and compute photons\n noise_img = np.random.rand(scene_size[0], scene_size[1])\n self.photons = noise_img[:, :, None] * il.photons\n else:\n raise(ValueError, 'Unsupported scene type')", "def __init__(self, units):\n super(BahdanauAttention, self).__init__()\n self.W1 = tf.keras.layers.Dense(units)\n self.W2 = tf.keras.layers.Dense(units)\n self.V = tf.keras.layers.Dense(1)", "def paramDetails(cls):\n return {\n 'dim': (10, 20, 2, 20),\n 'nIter': (1, 10, 2, 5),\n 'lamb': (.1, 1., .1, .05),\n 'alph': (30, 50, 5, 40)\n }", "def get_model_with_properties():\n \n m = ConcreteModel()\n\n # ------------------------------------------------------------------\n # Data\n # ------------------------------------------------------------------\n\n m.np = 25 # Number of possible tays\n m.c = 4 # Number of components\n m.lc = 1 # Light component\n m.hc = 4 # Heavy component\n\n #### Constant parameters\n m.Rgas = 8.314 # Ideal gas constant in J/mol K\n m.Tref = 298.15 # Reference temperature in K\n\n #### Product specifications\n m.xspec_lc = 0.99 # Final liquid composition for methanol (1)\n m.xspec_hc = 0.99 # Fnal liquid composition for butanol (4)\n m.xspec_inter2 = 0.99 # Final liquid composition for ethanol (2)\n m.xspec_inter3 = 0.99 # Final liquid composition for propanol (3)\n m.Ddes = 50 # Final flowrate in distillate in mol/s\n m.Bdes = 50 # Final flowrate in bottoms in mol/s\n m.Sdes = 50 # Final flowrate in side product streams in mol/s\n\n # #### Known initial values\n m.Fi = m.Ddes + m.Bdes + 2 * m.Sdes # Side feed flowrate in mol/s\n m.Vi = 400 # Initial value for vapor flowrate in mol/s\n m.Li = 400 # Initial value for liquid flowrate in mol/s\n\n m.Tf = 358 # Side feed temperature in K\n\n m.Preb = 1.2 # Reboiler pressure in bar\n m.Pbot = 1.12 # Bottom-most tray pressure in bar\n m.Ptop = 1.08 # Top-most tray pressure in bar\n m.Pcon = 1.05 # Condenser pressure in bar\n m.Pf = 1.02\n\n m.rr0 = 0.893 # Internal reflux ratio initial value\n m.bu0 = 0.871 # Internal reflux ratio initial value\n\n\n #### Scaling factors\n m.Hscale = 1e3 \n m.Qscale = 1e-3 \n\n \n #### Constants for the calculation of liquid heat capacity\n m.cpc = {} # Constant 1 for liquid heat capacity \n m.cpc2 = {} # Constant 2 for liquid heat capacity \n m.cpc[1] = m.Rgas \n m.cpc[2] = 1\n m.cpc2['A', 1] = 1 / 100\n m.cpc2['B', 1] = 1 / 1e4\n m.cpc2['A', 2] = 1\n m.cpc2['B', 2] = 1\n\n\n # ------------------------------------------------------------------\n # Physical Properties\n #\n # Notation:\n # MW ........................ molecular weight in g/gmol\n # TB ........................ boiling point temperature in K\n # TC ........................ critical temperature in K\n # PC ........................ critical pressure in bar\n # w ........................ acentric factor\n # lden ...................... liquid density g/m3,\n # dHvap ..................... heat of vaporization in J/mol.\n # vpA, vpB, vpC, and vpD .... vapor pressure constants\n # cpA, cpB, cpC, and cpD .... heat capacity constants J/mol:\n # 1 for liq and 2 for vapor phase\n #\n # Reference A: R.C. Reid, J.M. Prausnitz and B.E. Poling,\n # \"The Properties of gases and liquids\", 1987 and 2004 Eds.\n #\n # ------------------------------------------------------------------\n\n m.prop = {} # Properties of components:\n cpL = {} # Ruczika-D method for liquid heat capacity calculation\n # (Reference A, page 6.20)\n sumA = {}\n sumB = {}\n sumC = {}\n cpL['a', 'C(H3)(C)'] = 4.19845\n cpL['b', 'C(H3)(C)'] = -0.312709\n cpL['c', 'C(H3)(C)'] = 0.178609\n cpL['a', 'C(H2)(C2)'] = 2.7345\n cpL['b', 'C(H2)(C2)'] = 0.122732\n cpL['c', 'C(H2)(C2)'] = -0.123482\n cpL['a', 'C(H2)(C)(O)'] = 0.517007\n cpL['b', 'C(H2)(C)(O)'] = 1.26631\n cpL['c', 'C(H2)(C)(O)'] = -0.0939713\n cpL['a', 'O(H)(C)'] = 16.1555\n cpL['b', 'O(H)(C)'] = -11.938\n cpL['c', 'O(H)(C)'] = 2.85117\n cpL['a', 'C(H3)(O)'] = 3.70344\n cpL['b', 'C(H3)(O)'] = -1.12884\n cpL['c', 'C(H3)(O)'] = 0.51239\n sumA[1] = (cpL['a', 'C(H3)(O)']\n + cpL['a', 'O(H)(C)']) \n sumB[1] = (cpL['b', 'C(H3)(O)']\n + cpL['b', 'O(H)(C)'])\n sumC[1] = (cpL['c', 'C(H3)(O)']\n + cpL['c', 'O(H)(C)'])\n sumA[2] = (cpL['a', 'C(H3)(C)']\n + cpL['a', 'C(H2)(C)(O)']\n + cpL['a', 'O(H)(C)'])\n sumB[2] = (cpL['b', 'C(H3)(C)']\n + cpL['b', 'C(H2)(C)(O)']\n + cpL['b', 'O(H)(C)'])\n sumC[2] = (cpL['c', 'C(H3)(C)']\n + cpL['c', 'C(H2)(C)(O)']\n + cpL['c', 'O(H)(C)'])\n sumA[3] = (cpL['a', 'C(H3)(C)']\n + cpL['a', 'C(H2)(C2)']\n + cpL['a', 'C(H2)(C)(O)']\n + cpL['a', 'O(H)(C)'])\n sumB[3] = (cpL['b', 'C(H3)(C)']\n + cpL['b', 'C(H2)(C2)']\n + cpL['b', 'C(H2)(C)(O)']\n + cpL['b', 'O(H)(C)'])\n sumC[3] = (cpL['c', 'C(H3)(C)']\n + cpL['c', 'C(H2)(C2)']\n + cpL['c', 'C(H2)(C)(O)']\n + cpL['c', 'O(H)(C)'])\n sumA[4] = (cpL['a', 'C(H3)(C)']\n + 2 * cpL['a', 'C(H2)(C2)']\n + cpL['a', 'C(H2)(C)(O)']\n + cpL['a', 'O(H)(C)'])\n sumB[4] = (cpL['b', 'C(H3)(C)']\n + 2 * cpL['b', 'C(H2)(C2)']\n + cpL['b', 'C(H2)(C)(O)']\n + cpL['b', 'O(H)(C)'])\n sumC[4] = (cpL['c', 'C(H3)(C)']\n + 2 * cpL['c', 'C(H2)(C2)']\n + cpL['c', 'C(H2)(C)(O)']\n + cpL['c', 'O(H)(C)'])\n\n ## Methanol: component 1\n m.prop[1, 'MW'] = 32.042\n m.prop[1, 'TB'] = 337.7\n m.prop[1, 'TC'] = 512.6\n m.prop[1, 'PC'] = 80.9\n m.prop[1, 'w'] = 0.556\n m.prop[1, 'lden'] = 792e3\n m.prop[1, 'dHvap'] = 38.376e3\n m.prop[1, 'vpA'] = -8.54796\n m.prop[1, 'vpB'] = 0.76982\n m.prop[1, 'vpC'] = -3.10850\n m.prop[1, 'vpD'] = 1.54481\n m.prop[1, 'cpA', 1] = sumA[1]\n m.prop[1, 'cpB', 1] = sumB[1]\n m.prop[1, 'cpC', 1] = sumC[1]\n m.prop[1, 'cpD', 1] = 0\n m.prop[1, 'cpA', 2] = 2.115e1\n m.prop[1, 'cpB', 2] = 7.092e-2\n m.prop[1, 'cpC', 2] = 2.587e-5\n m.prop[1, 'cpD', 2] = -2.852e-8\n\n\n ## Ethanol: component 2\n m.prop[2, 'MW'] = 46.069\n m.prop[2, 'TB'] = 351.4\n m.prop[2, 'TC'] = 513.9\n m.prop[2, 'PC'] = 61.4\n m.prop[2, 'w'] = 0.644\n m.prop[2, 'lden'] = 789.3e3\n m.prop[2, 'dHvap'] = 42.698e3\n m.prop[2, 'vpA'] = -8.51838\n m.prop[2, 'vpB'] = 0.34163\n m.prop[2, 'vpC'] = -5.73683\n m.prop[2, 'vpD'] = 8.32581\n m.prop[2, 'cpA', 1] = sumA[2]\n m.prop[2, 'cpB', 1] = sumB[2]\n m.prop[2, 'cpC', 1] = sumC[2]\n m.prop[2, 'cpD', 1] = 0\n m.prop[2, 'cpA', 2] = 9.014\n m.prop[2, 'cpB', 2] = 2.141e-1\n m.prop[2, 'cpC', 2] = -8.390e-5\n m.prop[2, 'cpD', 2] = 1.373e-9\n\n\n ## Propanol: component 3\n m.prop[3, 'MW'] = 60.096\n m.prop[3, 'TB'] = 370.3\n m.prop[3, 'TC'] = 536.8\n m.prop[3, 'PC'] = 51.7\n m.prop[3, 'w'] = 0.623\n m.prop[3, 'lden'] = 804e3\n m.prop[3, 'dHvap'] = 47.763e3\n m.prop[3, 'vpA'] = -8.05594\n m.prop[3, 'vpB'] = 4.25183e-2\n m.prop[3, 'vpC'] = -7.51296\n m.prop[3, 'vpD'] = 6.89004\n m.prop[3, 'cpA', 1] = sumA[3]\n m.prop[3, 'cpB', 1] = sumB[3]\n m.prop[3, 'cpC', 1] = sumC[3]\n m.prop[3, 'cpD', 1] = 0\n m.prop[3, 'cpA', 2] = 2.47\n m.prop[3, 'cpB', 2] = 3.325e-1\n m.prop[3, 'cpC', 2] = -1.855e-4\n m.prop[3, 'cpD', 2] = 4.296e-8\n\n\n ## Butanol: component 4\n m.prop[4, 'MW'] = 74.123\n m.prop[4, 'TB'] = 390.9\n m.prop[4, 'TC'] = 563.1\n m.prop[4, 'PC'] = 44.2\n m.prop[4, 'w'] = 0.593\n m.prop[4, 'lden'] = 810e3\n m.prop[4, 'dHvap'] = 52.607e3\n m.prop[4, 'vpA'] = -8.00756\n m.prop[4, 'vpB'] = 0.53783\n m.prop[4, 'vpC'] = -9.34240\n m.prop[4, 'vpD'] = 6.68692\n m.prop[4, 'cpA', 1] = sumA[4]\n m.prop[4, 'cpB', 1] = sumB[4]\n m.prop[4, 'cpC', 1] = sumC[4]\n m.prop[4, 'cpD', 1] = 0\n m.prop[4, 'cpA', 2] = 3.266\n m.prop[4, 'cpB', 2] = 4.18e-1\n m.prop[4, 'cpC', 2] = -2.242e-4\n m.prop[4, 'cpD', 2] = 4.685e-8\n\n\n return m", "def __init__(self):\r\n self.label = \"ProcessAirQuality\"\r\n self.alias = \"ProcessAirQuality\"\r\n\r\n # List of tool classes associated with this toolbox\r\n self.tools = [AirQuality]", "def __init__(self,det,data, **kwargs):\n self._data = data\n self._name = det\n self._module_attrs = []\n self._book_attrs = []\n if det in data._device_sets:\n if 'desc' in self._det:\n self.desc = self._det['desc']\n else:\n self.desc = self._name\n \n else:\n self.desc = det\n \n self._user_funcs = {}", "def __init__(self, **kwargs):\n Interaction.__init__(self, **kwargs)\n self._demands = None # the resource demanded by this interaction\n self._penalty = None # how to penalize for not meeting demand NOT IMPLEMENTED", "def __init__(self, model = None, cso = None, fast_classification = True, paper = None):\n self.cso = cso #Stores the CSO Ontology\n self.paper = paper #Paper to analyse\n self.model = model #contains the cached model\n self.min_similarity = 0.90 #Initialises the min_similarity\n self.fast_classification = fast_classification # if will use the full model or not\n self.explanation = dict()", "def initializeParameters(self):\r\n\t\tself.input_raster.enabled = True\r\n\t\tself.approach.enabled = True\r\n\t\tself.predefined_pattern.enabled = False\r\n\t\tself.predefined_pattern.value = 'Mexican Hat wavelet'\r\n\t\tself.pattern_workspace.enabled = False\r\n\t\tself.point_matrix_size.enabled = False\r\n\t\tself.point_matrix_size.value = 3\r\n\t\tself.point_vectors.enabled = False\r\n\t\tself.mapping_field.enabled = False\r\n\t\tself.move_to_max.enabled = False\r\n\t\tself.move_to_max_distance.enabled = False\r\n\t\tself.move_to_max_distance.value = 3\r\n\t\tself.mh_iteration.enabled = False\r\n\t\tself.mh_dil_val.enabled = False\r\n\t\tself.mh_dil_val.value = 1\r\n\t\tself.mh_dil_start.value = 0.01\r\n\t\tself.mh_dil_stop.value = 1\r\n\t\tself.mh_dil_step.value = 0.1\r\n\t\tself.mh_dil_start.enabled = False\r\n\t\tself.mh_dil_stop.enabled = False\r\n\t\tself.mh_dil_step.enabled = False\r\n\t\tself.transform.enabled = False\r\n\t\tself.size_of_the_cell.enabled = False\r\n\t\tself.size_of_the_cell.value = 1\r\n\t\tself.output_sim_matrix.enabled = False\r\n\t\tself.output_table.enabled = False\r\n\t\tself.output_raster_workspace.enabled = False", "def getElementProperties():", "def artFluidAttrCtx(*args, accopacity: bool=False, activeListChangedProc: Union[AnyStr,\n bool]=\"\", afterStrokeCmd: Union[AnyStr, bool]=\"\", alphaclamp: Union[AnyStr,\n bool]=\"none\", alphaclamplower: Union[float, bool]=0.0, alphaclampupper:\n Union[float, bool]=1.0, attrSelected: Union[AnyStr, bool]=\"\", autoSave:\n Union[AnyStr, bool]=\"\", beforeStrokeCmd: Union[AnyStr, bool]=\"\",\n brushalignment: bool=True, brushfeedback: bool=True, clamp: Union[AnyStr,\n bool]=\"none\", clamplower: Union[float, bool]=0.0, clampupper: Union[float,\n bool]=1.0, clear: bool=True, colorAlphaValue: Union[float, bool]=0.0,\n colorRGBAValue: Union[List[float, float, float, float], bool]=None,\n colorRGBValue: Union[List[float, float, float], bool]=None, colorRamp:\n Union[AnyStr, bool]=\"\", colorfeedback: bool=False, colorfeedbackOverride:\n bool=False, colorrangelower: Union[float, bool]=0.0, colorrangeupper:\n Union[float, bool]=1.0, currentPaintableFluid: Union[AnyStr, bool]=\"\",\n dataTypeIndex: Union[int, bool]=0, delaySelectionChanged: bool=True,\n disablelighting: bool=False, displayAsRender: bool=True, displayVelocity:\n bool=True, doAutoSave: bool=True, dragSlider: AnyStr=\"\", duringStrokeCmd:\n Union[AnyStr, bool]=\"\", dynclonemode: bool=True, exists: bool=True,\n expandfilename: bool=True, exportaspectratio: Union[float, bool]=0.0,\n exportfilemode: Union[AnyStr, bool]=\"luminance/rgb\", exportfilesave:\n AnyStr=\"\", exportfilesizex: Union[int, bool]=0, exportfilesizey: Union[int,\n bool]=0, exportfiletype: Union[AnyStr, bool]=\"\", filterNodes: bool=True,\n history: bool=True, image1: Union[AnyStr, bool]=\"\", image2: Union[AnyStr,\n bool]=\"\", image3: Union[AnyStr, bool]=\"\", importfileload: AnyStr=\"\",\n importfilemode: Union[AnyStr, bool]=\"alpha\", importreassign: bool=False,\n interactiveUpdate: bool=True, lastRecorderCmd: Union[AnyStr, bool]=\"\",\n lastStampName: Union[AnyStr, bool]=\"\", lowerradius: Union[float, bool]=0.0,\n makeStroke: Union[int, List[int], bool]=0, mappressure: Union[AnyStr,\n bool]=\"none\", maxvalue: Union[float, bool]=1.0, minvalue: Union[float,\n bool]=0.0, name: AnyStr=\"\", objattrArray: Union[AnyStr, bool]=\"\", opacity:\n Union[float, bool]=1.0, outline: bool=True, outwhilepaint: bool=False,\n paintNodeArray: Union[AnyStr, bool]=\"\", paintattrselected: AnyStr=\"\",\n paintmode: Union[AnyStr, bool]=\"screen\", paintoperationtype: Union[AnyStr,\n bool]=\"Paint\", pickColor: bool=True, pickValue: bool=True, playbackCursor:\n Union[List[float, float], List[List[float, float]], bool]=None,\n playbackPressure: Union[float, List[float], bool]=0.0, preserveclonesource:\n bool=True, profileShapeFile: Union[AnyStr, bool]=\"\", projective: bool=False,\n property: Union[AnyStr, bool]=\"\", radius: Union[float, bool]=1.0,\n rampMaxColor: Union[List[float, float, float], bool]=None, rampMinColor:\n Union[List[float, float, float], bool]=None, record: bool=True, reflection:\n bool=False, reflectionaboutorigin: bool=True, reflectionaxis: Union[AnyStr,\n bool]=\"x\", rgbValue: Union[List[float, float, float], bool]=None,\n screenRadius: Union[float, bool]=0.0, selectclonesource: bool=True,\n selectedattroper: Union[AnyStr, bool]=\"absolute\", showactive: bool=True,\n stampDepth: Union[float, bool]=0.0, stampProfile: Union[AnyStr, bool]=\"\",\n stampSpacing: Union[float, bool]=1.0, strokesmooth: Union[AnyStr, bool]=\"\",\n surfaceConformedBrushVertices: bool=True, tablet: bool=True,\n tangentOutline: bool=True, toolOffProc: Union[AnyStr, bool]=\"\", toolOnProc:\n Union[AnyStr, bool]=\"\", useColorRamp: bool=True, useMaxMinColor: bool=True,\n useStrokeDirection: bool=True, usepressure: bool=False, value: Union[float,\n bool]=0.0, velocity: Union[List[float, float, float], bool]=None,\n whichTool: Union[AnyStr, bool]=\"\", worldRadius: Union[float, bool]=0.0,\n q=True, query=True, e=True, edit=True, **kwargs)->Union[None, Any]:\n pass", "def __init__(self, p_control = 0, p_treatment = 0, n_control = 0, n_treatment = 0, power = None, alpha = 0.05):\n self.p_control = p_control\n self.p_treatment = p_treatment\n\n self.n_control = n_control\n self.n_treatment = n_treatment\n\n self.var_control = 1 * p_control * (1 - p_control)\n self.var_treatment = 1 * p_treatment * (1 - p_treatment)\n\n self.norm_null = None\n self.norm_alt = None\n\n self.binom_null = None\n self.binom_alt = None\n\n self.binom_control = None\n self.binom_treatment = None\n\n self.confidence_control = None\n self.confidence_treatment = None\n\n if n_control > 0 and n_treatment > 0 and p_control > 0 and p_treatment > 0:\n control = self.p_control * self.n_control\n treatment = self.p_treatment * self.n_treatment\n sample = self.n_control + self.n_treatment\n\n self.p_sample = (control + treatment) / sample\n else:\n self.p_sample = None\n\n if power == 1:\n print('Sample size approaches infinity as power approaches 1, so 1 is an invalid power vlaue. Changing power to 0.99.')\n self.power = 0.99\n elif power == 0:\n print('Sample size is undefined at power of 0, so 0 is an invalid power value. Changing power to 0.01.')\n self.power = 0.01\n else:\n self.power = power\n\n self.alpha = alpha\n self.p_value = None", "def __init__(self):\n self.X = None\n self.Y = None\n self.features = None\n self.max = self.min = None\n self._look_up = None\n self.attr_weight = None", "def __init__(self, encut, magmom, ldaul, Uparam, Jparam, name=\"DFTCL_settings\"):\n\n cl_settings = {\"ISPIN\": 2, \"MAGMOM\": magmom, \"SAXIS\": None, \"LSORBIT\": None, \"LNONCOLLINEAR\": None}\n dftu_settings = {\"LDAU\": \".TRUE.\", \"LDAUU\": Uparam, \"LDATYPE\": 2, \"LDAUL\": ldaul, \"LDAUJ\": Jparam , \"LMAXMIMX\": 4}\n InputParameters.__init__(self, name=name, magnetic_settings=cl_settings, hubbard_settings=dftu_settings)\n self.update_electronic_settings(\"encut\", encut)", "def polyChipOff(*args, attraction: Union[float, bool]=0.0, caching: bool=True,\n constructionHistory: bool=True, duplicate: bool=True, gain: Union[float,\n List[float], bool]=1.0, gravity: Union[List[float, float, float], bool]=None,\n gravityX: Union[float, bool]=0.0, gravityY: Union[float, bool]=0.0, gravityZ:\n Union[float, bool]=0.0, keepFacesTogether: bool=True, keepFacetTogether:\n bool=True, localCenter: Union[int, bool]=0, localDirection: Union[List[float,\n float, float], bool]=None, localDirectionX: Union[float, bool]=0.0,\n localDirectionY: Union[float, bool]=0.0, localDirectionZ: Union[float,\n bool]=0.0, localRotate: Union[List[float, float, float], bool]=None,\n localRotateX: Union[float, bool]=0.0, localRotateY: Union[float, bool]=0.0,\n localRotateZ: Union[float, bool]=0.0, localScale: Union[List[float, float,\n float], bool]=None, localScaleX: Union[float, bool]=0.0, localScaleY:\n Union[float, bool]=0.0, localScaleZ: Union[float, bool]=0.0, localTranslate:\n Union[List[float, float, float], bool]=None, localTranslateX: Union[float,\n bool]=0.0, localTranslateY: Union[float, bool]=0.0, localTranslateZ:\n Union[float, bool]=0.0, magnX: Union[float, bool]=0.0, magnY: Union[float,\n bool]=0.0, magnZ: Union[float, bool]=0.0, magnet: Union[List[float, float,\n float], bool]=None, name: AnyStr=\"\", nodeState: Union[int, bool]=0, offset:\n Union[float, bool]=0.0, pivot: Union[List[float, float, float], bool]=None,\n pivotX: Union[float, bool]=0.0, pivotY: Union[float, bool]=0.0, pivotZ:\n Union[float, bool]=0.0, random: Union[float, bool]=0.0, scale: Union[List[float,\n float, float], bool]=None, scaleX: Union[float, bool]=0.0, scaleY: Union[float,\n bool]=0.0, scaleZ: Union[float, bool]=0.0, translate: Union[List[float, float,\n float], bool]=None, translateX: Union[float, bool]=0.0, translateY: Union[float,\n bool]=0.0, translateZ: Union[float, bool]=0.0, weight: Union[float, bool]=0.0,\n worldSpace: bool=True, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass", "def properties(self):\n txt = '{} has rank {} and the following properties:'\n print(txt.format(self, self.rank()))\n s = \"\\t- {} {}\"\n print(s.format('irreducible: ', self.is_irreducible()))\n print(s.format('mutation finite: ', self.is_mutation_finite()))\n print(s.format('simply-laced: ', self.is_simply_laced()))\n print(s.format('skew-symmetric: ', self.is_skew_symmetric()))\n print(s.format('finite: ', self.is_finite()))\n if self.is_irreducible():\n print(s.format('affine: ', self.is_affine()))\n print(s.format('elliptic: ', self.is_elliptic()))", "def __init__(self, c, w, t, s):\n self.color = c\n self.weight = w\n self.type = t\n self.size = s", "def __init__(self):\n super().__init__()\n self._supported[\"mixed_states\"] = True\n self._short_name = \"gaussian\"", "def explain(self):\n return self.description + \" ({:.4f} eV/K/atom x {} K x {} atoms)\".format(\n self._adj_per_deg, self.temp, self.n_atoms\n )", "def setup(self, name, chem, conc, Kw, D, x_coord, y_coord, dx, dy, dz): \r\n self.name = name\r\n \r\n self.chem = chem # a list of objectives of the class Chemical\r\n if chem is None:\r\n self.nSpecies = 0\r\n elif type(chem) is not list :\r\n self.nSpecies = 1\r\n else :\r\n self.nSpecies = len(chem) # Number of chemcial species\r\n self.conc = conc # The dependent variable (named 'conc') of interest\r\n self.Kw = Kw\r\n self.D = D\r\n \r\n self.x_coord = x_coord\r\n self.y_coord = y_coord\r\n self.dx = dx\r\n self.dy = dy\r\n self.dz = dz", "def printClassifier(self):\n classifier_info = \"\"\n for att in range(cons.env.format_data.numb_attributes):\n att_info = cons.env.format_data.attribute_info[att]\n if att in self.specified_attributes: #If the attribute was specified in the rule\n i = self.specified_attributes.index(att)\n #-------------------------------------------------------\n # CONTINUOUS ATTRIBUTE\n #-------------------------------------------------------\n if att_info[0]:\n classifier_info += str(self.condition[i][0])+';'+str(self.condition[i][1]) + \"\\t\"\n #-------------------------------------------------------\n # DISCRETE ATTRIBUTE\n #-------------------------------------------------------\n else:\n classifier_info += str(self.condition[i]) + \"\\t\"\n else: # Attribute is wild.\n classifier_info += '#' + \"\\t\"\n #-------------------------------------------------------------------------------\n specificity = len(self.condition) / float(cons.env.format_data.numb_attributes)\n\n if cons.env.format_data.discrete_action:\n classifier_info += str(self.action)+\"\\t\"\n else:\n classifier_info += str(self.action[0])+';'+str(self.action[1])+\"\\t\"\n #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\n classifier_info += '{:.1f}'.format(self.prediction)+\"\\t\"+'{:.2f}'.format(self.error)+\"\\t\"+'{:.2f}'.format(self.fitness)+\"\\t\"+str(self.numerosity)+\"\\t\"+str(self.ga_count)+\"\\t\"\n classifier_info += '{:.1f}'.format(self.mean_actionset_sz)+\"\\t\\t\"+str(self.ga_timestamp)+\"\\t\\t\"+str(self.init_timestamp)+\"\\t\\t\"+'{:.2f}'.format(specificity)+\"\\t\\t\"\n classifier_info += '{:.1f}'.format(self.delete_vote)+\"\\t\\t\"+str(self.action_cnt)+\"\\n\"\n\n #------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\n return classifier_info", "def __init__(self, info):\n self.val = info[\"prediction\"] * info[\"learning_rate\"]\n self.train_size = info[\"ematrix\"].label.shape[0]\n self.avg_target = np.mean(info[\"ematrix\"].label, axis=0)[0]", "def __init__(self,cosmology, mass_function, halo_physics, kh_vector, mass_bins, volume, kh_min=0, pt_type = 'EFT', pade_resum = True, smooth_density = True, IR_resum = True, npoints = 1000, verb=False):\n\n # Write attributes, if they're of the correct type\n if isinstance(cosmology, Cosmology):\n self.cosmology = cosmology\n else:\n raise TypeError('cosmology input must be an instance of the Cosmology class!')\n if isinstance(mass_function, MassFunction):\n self.mass_function = mass_function\n else:\n raise TypeError('mass_function input must be an instance of the MassFunction class!')\n if isinstance(halo_physics, HaloPhysics):\n self.halo_physics = halo_physics\n else:\n raise TypeError('halo_physics input must be an instance of the HaloPhysics class!')\n\n # Write useful attributes\n self.kh_vector = kh_vector\n self.kh_min = kh_min\n self.mass_bins = mass_bins\n self.N_bins = len(mass_bins)-1\n self.N_k = len(self.kh_vector)\n self.volume = volume\n self.verb = verb\n self.pt_type = pt_type\n self.pade_resum = pade_resum\n self.smooth_density = smooth_density\n self.IR_resum = IR_resum\n self.npoints = npoints\n\n # Generate a power spectrum class with this k-vector\n self.halo_model = HaloModel(cosmology, mass_function, halo_physics, kh_vector, kh_min,verb=self.verb)\n\n # Copy in the MassIntegrals class\n self.mass_integrals = self.halo_model.mass_integrals\n\n if self.cosmology.use_neutrinos:\n if self.verb:\n print(\"Note: massive neutrinos are not implemented in full, so we assume CDM+baryon power spectra here.\")\n print(\"(This will creates only a (subdominant) percent-level error for typical neutrino masses.)\")\n\n # Run some checks\n assert self.mass_bins[0]>=np.power(10.,self.mass_integrals.min_logM_h), 'Minimum bin must be above MassIntegral limit!'\n assert self.mass_bins[-1]<=np.power(10.,self.mass_integrals.max_logM_h), 'Maximum bin must be below MassIntegral limit!'\n\n # Compute linear power for the k-vector\n self.linear_power = self.cosmology.compute_linear_power(self.kh_vector,self.kh_min).copy()", "def metallicity(method, emsystem):\n if method == 'PG16':\n # Requires Hbeta, [OII], [OIII], [NII], [SII]\n R2 = (emsystem.get_emline('[OII] 3726').attrib['flux'] +\n emsystem.get_emline('[OII] 3729').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n R3 = (emsystem.get_emline('[OIII] 4959').attrib['flux'] +\n emsystem.get_emline('[OIII] 5007').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n N2 = (emsystem.get_emline('[NII] 6548').attrib['flux'] +\n emsystem.get_emline('[NII] 6584').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n S2 = (emsystem.get_emline('[SII] 6716').attrib['flux'] +\n emsystem.get_emline('[SII] 6731').attrib['flux']) / emsystem.get_emline('Hbeta').attrib['flux']\n # Proceed\n if np.log10(N2) < -0.6:\n r_val = 7.932 + 0.944*np.log10(R3/R2) + 0.695*np.log10(N2) + \\\n ((0.97 - 0.291*np.log10(R3/R2)) - 0.019*np.log10(N2))*np.log10(R2)\n\n s_val = 8.072 + 0.789*np.log10(R3/S2) + 0.726*np.log10(N2) + \\\n (1.069 - 0.170*np.log10(R3/S2) +0.022*np.log10(N2))*np.log10(S2)\n else:\n r_val = 8.589 + 0.022*np.log10(R3/R2) + 0.399*np.log10(N2) + \\\n (-0.137 + 0.164*np.log10(R3/R2) + 0.589*np.log10(N2))*np.log10(R2)\n\n s_val = 8.424 + 0.030*np.log10(R3/S2) + 0.751*np.log10(N2) + \\\n (-0.349 + 0.182*np.log10(R3/S2) +0.508*np.log10(N2))*np.log10(S2)\n return r_val.decompose().value, s_val.decompose().value", "def __init__(self):\n\n # self.threshold = 3.\n self.gamma_min = 3\n self.gamma_max = 12\n self.n_samples = 40\n # self.do_plots = False\n # self.do_albedo = True\n # self.verbose = True\n\n self.nbands = 7\n self.bu = np.array([0.004, 0.015, 0.003, 0.004, 0.013, 0.010, 0.006])\n\n # Determine 250 or 500 meters product\n # self.resolution = 500\n\n # self.pixelWidth = 500\n # self.pixelHeight = 500", "def doParametersOfInterest(self):\n\n self.modelBuilder.doVar(\"eAfb[0.6,-0.75,0.75]\");\n self.modelBuilder.doVar(\"eA0[0.05, -1.0, 1.0]\");\n self.modelBuilder.doVar(\"rAfb[1.0,-5.0, 5.0]\");\n self.modelBuilder.doVar(\"rA0[1.0, -5.0, 5.0]\");\n self.modelBuilder.doSet(\"POI\",\"rAfb,rA0\")\n self.modelBuilder.factory_('expr::mAfb(\"@0*@1\",eAfb,rAfb)')\n self.modelBuilder.factory_('expr::mA0(\"(@0*@1)\",eA0,rA0)')\n\n \n self.modelBuilder.factory_('expr::eAlph(\"2.0*@0/(2.0-@0)\",eA0)')\n self.modelBuilder.factory_('expr::eNorm(\"3.0/4.0/(2.0+@0)\",eAlph)')\n self.modelBuilder.factory_('expr::eRAlph(\"@0*@1\",eAlph,eNorm)')\n self.modelBuilder.factory_('expr::eRpl(\"(@0+@1)\",eNorm,eAfb)')\n self.modelBuilder.factory_('expr::eRmn(\"(@0-@1)\",eNorm,eAfb)')\n\n self.modelBuilder.factory_('expr::mAlph(\"2.0*@0/(2.0-@0)\",mA0)')\n self.modelBuilder.factory_('expr::mNorm(\"3.0/4.0/(2.0+@0)\",mAlph)')\n self.modelBuilder.factory_('expr::mRAlph(\"@0*@1\",mAlph,mNorm)')\n self.modelBuilder.factory_('expr::mRpl(\"(@0+@1)\",mNorm,mAfb)')\n self.modelBuilder.factory_('expr::mRmn(\"(@0-@1)\",mNorm,mAfb)')", "def __init__(self, coordinates, emissivity = 0.4): \n\t\tsuper().__init__(coordinates)\n\t\tself.emissivity = emissivity\n\t\tself.type = 'thermometer'", "def view_props(self):\n camera = getattr(self.ui, \"camera\")\n _camera_props = ['camera.%s' % k for k in camera._controls.attrs.keys()]\n surface = getattr(self.ui, \"surface\")\n _subject = list(surface._folders.attrs.keys())[0]\n _surface = getattr(surface, _subject)\n _surface_props = ['surface.{subject}.%s'%k for k in _surface._controls.attrs.keys()]\n _curvature_props = ['surface.{subject}.curvature.brightness',\n 'surface.{subject}.curvature.contrast',\n 'surface.{subject}.curvature.smoothness']\n return _camera_props + _surface_props + _curvature_props", "def __init__(self, *args, **kwds):\n if args or kwds:\n super(RobotDescriptionAGV, self).__init__(*args, **kwds)\n #message fields cannot be None, assign default values for those that are\n if self.header is None:\n self.header = std_msgs.msg.Header()\n if self.vehicle_id is None:\n self.vehicle_id = opil_v2.msg.Id()\n if self.left_size is None:\n self.left_size = 0.\n if self.right_size is None:\n self.right_size = 0.\n if self.front_size is None:\n self.front_size = 0.\n if self.rear_size is None:\n self.rear_size = 0.\n if self.min_height is None:\n self.min_height = 0.\n if self.max_height is None:\n self.max_height = 0.\n if self.payload is None:\n self.payload = 0.\n if self.max_pos_x_vel is None:\n self.max_pos_x_vel = 0.\n if self.max_neg_x_vel is None:\n self.max_neg_x_vel = 0.\n if self.max_pos_x_acc is None:\n self.max_pos_x_acc = 0.\n if self.max_neg_x_acc is None:\n self.max_neg_x_acc = 0.\n if self.max_pos_y_vel is None:\n self.max_pos_y_vel = 0.\n if self.max_neg_y_vel is None:\n self.max_neg_y_vel = 0.\n if self.max_pos_y_acc is None:\n self.max_pos_y_acc = 0.\n if self.max_neg_y_acc is None:\n self.max_neg_y_acc = 0.\n if self.max_pos_ang_vel is None:\n self.max_pos_ang_vel = 0.\n if self.max_neg_ang_vel is None:\n self.max_neg_ang_vel = 0.\n if self.velocity_control_sensitivity is None:\n self.velocity_control_sensitivity = 0.\n if self.min_turning_radius is None:\n self.min_turning_radius = 0.\n if self.batt_capacity is None:\n self.batt_capacity = 0.\n if self.batt_max_voltage is None:\n self.batt_max_voltage = 0.\n if self.vehicle_type is None:\n self.vehicle_type = ''\n if self.vendor is None:\n self.vendor = ''\n if self.action_capability is None:\n self.action_capability = []\n else:\n self.header = std_msgs.msg.Header()\n self.vehicle_id = opil_v2.msg.Id()\n self.left_size = 0.\n self.right_size = 0.\n self.front_size = 0.\n self.rear_size = 0.\n self.min_height = 0.\n self.max_height = 0.\n self.payload = 0.\n self.max_pos_x_vel = 0.\n self.max_neg_x_vel = 0.\n self.max_pos_x_acc = 0.\n self.max_neg_x_acc = 0.\n self.max_pos_y_vel = 0.\n self.max_neg_y_vel = 0.\n self.max_pos_y_acc = 0.\n self.max_neg_y_acc = 0.\n self.max_pos_ang_vel = 0.\n self.max_neg_ang_vel = 0.\n self.velocity_control_sensitivity = 0.\n self.min_turning_radius = 0.\n self.batt_capacity = 0.\n self.batt_max_voltage = 0.\n self.vehicle_type = ''\n self.vendor = ''\n self.action_capability = []" ]
[ "0.6058424", "0.5873271", "0.5822662", "0.57114613", "0.5669548", "0.5591406", "0.55108035", "0.5495015", "0.54344803", "0.54193187", "0.5389301", "0.5373029", "0.5358729", "0.5333054", "0.5332131", "0.5322646", "0.53215945", "0.5312435", "0.52957547", "0.52953637", "0.5294324", "0.5284092", "0.5255884", "0.52524304", "0.52445966", "0.52068096", "0.5203285", "0.5189514", "0.5186104", "0.51832235", "0.5182095", "0.51820767", "0.51765573", "0.5174449", "0.51723635", "0.51722103", "0.51677114", "0.5160849", "0.5145461", "0.5124554", "0.5124181", "0.5117854", "0.5111978", "0.5111713", "0.510733", "0.51056135", "0.51051044", "0.5093712", "0.5088862", "0.5087442", "0.50865865", "0.5085255", "0.5079771", "0.5078717", "0.50760937", "0.50760627", "0.5071391", "0.5071174", "0.5069027", "0.50662315", "0.50633097", "0.5062421", "0.50617796", "0.50593966", "0.50576985", "0.5055233", "0.505332", "0.5049451", "0.50465405", "0.5036289", "0.5022576", "0.50197124", "0.50180626", "0.50112176", "0.5006883", "0.50067884", "0.5003418", "0.49994045", "0.4988822", "0.49875137", "0.49831608", "0.49804157", "0.49801207", "0.4973679", "0.4972471", "0.49677855", "0.49614447", "0.49582198", "0.49552518", "0.49540517", "0.49493155", "0.49441433", "0.49436063", "0.49424356", "0.4936082", "0.49298292", "0.4925046", "0.4924628", "0.49245", "0.49238098", "0.49191856" ]
0.0
-1
what value was measured and when
def __init__(self, reference_value=None, measured_value=None): self.reference_value = reference_value self.measured_value = measured_value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calc_stat_values(self):", "def measure(self):\n pass", "def observation_value(self):\n pass", "def state(self):\n return self._measure", "def t_measure_estimate(self):\n ho = self.humidity_oversampling\n to = self.temperature_oversampling\n po = self.pressure_oversampling\n typ = 1. + 2.*to + (2.*po + 0.5)*bool(po) + (2.*ho +0.5)*bool(ho)\n mx = 1.25 + 2.3*to + (2.3*po + 0.575)*bool(po) + (2.3*ho +0.575)*bool(ho)\n return typ, mx", "def value(self) -> float:", "def getMeasures():", "def measure(self):\n return self._measure", "def measured(data: Union[Observation, LabelledObservation]) -> float:\n return data[1]", "def value(self) -> float:\n ...", "def value(self) -> float:\n ...", "def value(self) -> float:\n ...", "def getTelemetryValue(self) -> float:\n\t\treturn super().getTelemetryValue()", "def advancedStats():", "def stats(self):", "def state(self):\n appliance = self._coordinator.data[\"appliances\"][self._appliance_id]\n smart_meter = appliance[\"smart_meter\"]\n echonetlite_properties = smart_meter[\"echonetlite_properties\"]\n measured_instantaneous = next(\n value[\"val\"] for value in echonetlite_properties if value[\"epc\"] == 231\n )\n _LOGGER.debug(\"Current state: %sW\", measured_instantaneous)\n return measured_instantaneous", "def last_fmeasure(self):\n return self.get_fvalue(self.last_position())", "def compute_statistics(self):", "def measurements(self) -> NONEARRAY:\n pass", "def val_monitor(self):\n return f\"{self.ACRONYM}@val_fbeta\", \"max\"", "def data_rate(self):\n return self._data_rate", "def duration(self):\r\n return self.t2 - self.t1", "def get_t_value(self):\n return float(self.data[2]) / 10", "def measure_number(self):\n return self._measure_number", "def do_data(self, a):\n print(\"mV: \" + str(ise.mV))", "def measure(self, imgage, previous=None):", "def time(self):\r\n raise NotImplementedError", "def state(self):\n result = self.probe.get_data(SENSOR_TYPES[self.sensor][2])\n round_to = SENSOR_TYPES[self.sensor][3].get(\"round\")\n if round_to is not None:\n result = round(result, round_to)\n return result", "def tvalues(self):\n return self.params / self.bse", "def tvalues(self):\n return self.params / self.bse", "def get_metric(self, data_row: pd.Series) -> float:", "def get_measured_current(self):\n status = self.get_status_response()\n current = status[16] + (status[17] * 0x100) + (status[18] * 0x10000) + (status[19] * 0x1000000)\n current = float(current)\n current /= (1000.0 * 1000.0)\n return current\n #end get_measured_current", "def getDiffusivity(self): # real signature unknown; restored from __doc__\n pass", "def getMeasure(unique_name):", "def getMeasure(unique_name):", "def value(self):\n return 0", "def compute_metrics(self):\n pass", "def value(self):", "def Value(self) -> _n_0_t_14:", "def _get_detection_time_multiplier(self):\n return self.__detection_time_multiplier", "def moments(self):", "def stats(self):\n pass", "def get_measurement(self):\n return self._convert_to_depth(self._avg_sample())", "def result(self):\n return (\"HitRate@\" + str(self.length) + \": \"), (self.hit / self.test)", "def estimate(values, target):\n\n # next time\n # diff(values)\n\n\n return 1.", "def get_pvalue_thd(self):\n terminals_values = []\n for terminal in self.feature_tree.get_terminals():\n temp = self.get_mannwitneyu_pvalue(terminal)\n terminals_values.append(temp)\n if temp == 1:\n print('non siginificant')\n while 0 in terminals_values:\n terminals_values.remove(0)\n self.pvalue_thd = min(self.pvalue_thd,np.mean(terminals_values))\n #print('pvalue_thd',self.pvalue_thd)", "def get_performance(self):\n return self.sim.tps", "def getValue(self):\n return self.__diastolic", "def get_value(self):\r\n return 0", "def estimate(self) -> None:\n pass", "def describe_collect(self):\n logger.info(\"describe_collect()\")\n d = dict(\n source = \"elapsed time, s\",\n dtype = \"number\",\n shape = (1,)\n )\n return {\n self.name: {\n \"x\": d\n }\n }", "def test_get_measure_parameters(self):\n pass", "def get_estimate(self):\n if not self.has_samplers():\n self.draw_samplers()\n \n v = np.percentile(self.samplers, [16, 50, 84])\n return v[1], v[2]-v[1], v[1]-v[0]", "def static_metrics(self) -> dict[str, float | int]:\n return self.performance[\"meta\"]", "def get_th_values(self):\n return (\n float(self.data[2]) / 10,\n int(self.data[3]),\n float(self.data[4]) / 10,\n )", "def log_values_and_advantages(self, state):\n advantage = self.model.get_advantage(to_variable(to_cuda(state, self.gpu_device)))\n self.writer.add_histogram(tag='predictions/advantages',\n values=advantage.view(-1),\n global_step=self.total_steps,\n bins='auto')\n value = self.model.get_value(to_variable(to_cuda(state, self.gpu_device)))\n self.writer.add_histogram(tag='predictions/values',\n values=value.view(-1),\n global_step=self.total_steps,\n bins='auto')", "def averageTime(self):\n \n pass", "def get_number_of_measurement(self):\n num_of_meas = 0\n for time in self.mdvtc.keys():\n num_of_meas = num_of_meas + self.mdvtc[time].get_number_of_measurement()\n #\n return num_of_meas", "def info(self):\n out = f\"sec: {self.em_sec()}\\nmin: {self.em_min()}\"\n out += f\"\\nhora: {self.em_hora()}\\ndia: {self.em_dia()}\"\n return out", "def printLatestMeasurement(self): \n data = self.tristimulus[len(self.tristimulus)-1]\n x = data[0]\n y = data[1]\n L = data[2]\n print\"(x,y) = ({0:.4f}, {1:.4f}), L = {2:.4f} cd/m2 ({3:.4f} fL)\".format( x, y, L, 0.291863508*L)", "def getTimes():", "def getTimes():", "def getTimes():", "def value_stats(values):\n stats = describe(values)\n mean = stats.mean\n std = np.sqrt(stats.variance)\n t_stat = t.ppf(1 - 0.025, len(values) - 1)\n dev = t_stat * (std / np.sqrt(len(values)))\n trim_mean_v = trim_mean(values, 0.25)\n upper_val = mean + dev\n lower_val = mean - dev\n\n return mean, trim_mean_v, std, upper_val, lower_val", "def getTime(self):\n return self.step / (self.max_step + int(self.include))", "def getMeasurement(self):\n if(ADConverterSettings.useRealAD):\n print \"Real AD not activated\"\n #return self.adcdac.read_adc_voltage(1)\n else:\n return self.__readMeasurementFromFile()", "def default_metric_value(self) -> float:", "def getETA():", "def getETA():", "def calculateDataRate(self):\n pass", "def get_integration_times( self ):\n if self.inst is not None:\n low = 0.01/ self.inst.line_freq\n else:\n low = 0.01/ 50\n \n high = 1\n \n return ( low, high )", "def summary(self):\n return self.pfm", "def value(self):\n nd1 = super().nd1()\n nd2 = super().nd2()\n _nd1 = 1 - nd1\n _nd2 = 1 - nd2\n f1 = _nd1 * self.s\n f2 = _nd2 * self.x * math.exp(-self.rf * self.t)\n return f2 - f1", "def get(self):\n if self.num_inst == 0:\n return (self.name, float('nan'))\n else:\n return (self.name, self.sum_metric / self.num_inst)", "def var(self):\n\n return time_stat(self, stat=\"var\")", "def value(self):\n if len(self.fscore_history) == 0:\n return 0\n else:\n return np.mean(self.fscore_history)", "def explain(self):\n return self.description + f\" ({self.value:.3f} eV)\"", "def get_timings(self):\n exp=lib.is_Exposure_d8(self.hcam,7)*1E-3\n frame_rate=lib.is_SetFrameRate(self.hcam,0x8000)\n return self.AcqTimes(exp,1./frame_rate)", "def _measurement_update(self):\n pass", "def event_stats(self):\n pass", "def stat_values(self):\n return self._stat_values", "def getStati(self):\n raise \"not implemented\"", "def time(self):\n raise NotImplementedError()", "def get_meas_time_current(instrument):\n return float(instrument.query('SENSE:CURR:DC:APER?') )", "def value(self): \r\n c = self.nd1() * self.s * math.exp(-self.div * self.t)\r\n c -= self.nd2() * self.x * math.exp(-self.rf * self.t)\r\n \r\n return c", "def measure(self, recommender):", "def calculate_output(self):", "def calculate(self):", "def unit_of_measurement(self):\n return SENSOR_TYPES[self.type][1]", "def calculate_dataset_metrics(self):\n pass", "def Temp(t):\n return 20 # Need to link to data", "def unit_of_measurement(self):\n return self._metadata[1]", "def _baseline_value(self):\n t = self['primary']\n return np.median(t.data[:int(10e-3/t.dt)])", "def get_stats(self):\n return {'param_noise_stddev': self.current_stddev}", "def raw_measure(self) -> List[int]:\n # name, command, signals, delay\n return self._run_profile((\"raw_measure\", [0x20, 0x50], 2, 0.025))", "def get_measured_outputs_values(self):\n obsOut = numpy.zeros(self.get_num_measured_outputs())\n i = 0\n for o in self.outputs:\n if o.is_measured_output():\n obsOut[i] = o.read_value_in_fmu(self.fmu)\n i += 1\n return obsOut", "def sm_measure_current(self,num_readings=1):\n self.sm.set_measurement_function(\"CURRENT\")\n self.sm.format_readings(\"CURRENT\")\n ret = average(self.sm.take_measurement(num_readings))\n self.sm_restore_display\n return ret", "def measurements(self):\n return self._measurements", "def eta_details(self):\n\t\t# Experimentation gives you 72pts to a random science every production\n\t\t# Stupid brute force implementation for now\n\t\trequired = self.required\n\t\trate = self.player.science\n\t\tdef combine(base, add, add_time, chance):\n\t\t\t# add given add into base with +add_time tick and modified by chance\n\t\t\tfor time, p in add.items():\n\t\t\t\ttime += add_time\n\t\t\t\tp *= chance\n\t\t\t\tbase[time] = base.get(time, 0) + p\n\t\tdef _eta_details(value, time_to_prod=self.galaxy.production_rate):\n\t\t\tnaive_eta = max(0, int(math.ceil((required - value)/rate)))\n\t\t\tif naive_eta <= time_to_prod: return {naive_eta: 1}\n\t\t\tbase = {}\n\t\t\twithout_extra = _eta_details(value + rate*time_to_prod)\n\t\t\twith_extra = _eta_details(value + rate*time_to_prod + 72)\n\t\t\tcombine(base, without_extra, time_to_prod, 6/7.)\n\t\t\tcombine(base, with_extra, time_to_prod, 1/7.)\n\t\t\treturn base\n\t\treturn _eta_details(self.current, self.galaxy.production_rate - self.galaxy.production_counter)", "def value(self):\n if self.buffer:\n return np.mean([ep['l'] for ep in self.buffer])\n else:\n return 0", "def measure_current(self):\n self._raise_not_implemented()" ]
[ "0.66767734", "0.6539252", "0.64366204", "0.6429938", "0.64134157", "0.63915974", "0.63749766", "0.6278977", "0.6275547", "0.61454076", "0.61454076", "0.61454076", "0.61224276", "0.6064027", "0.6003514", "0.59992427", "0.59715676", "0.5941743", "0.58559453", "0.58508056", "0.5837854", "0.5827338", "0.58193344", "0.58162546", "0.5812454", "0.5801404", "0.579807", "0.5775057", "0.57730824", "0.57730824", "0.577132", "0.5755176", "0.5750228", "0.5739937", "0.5739937", "0.57234883", "0.57230294", "0.57208127", "0.5716852", "0.570466", "0.5694739", "0.56918633", "0.56854594", "0.56843776", "0.56769663", "0.56757945", "0.56731963", "0.5671919", "0.5653738", "0.5653405", "0.56529737", "0.5649411", "0.5647539", "0.5624665", "0.5624565", "0.56239945", "0.56191987", "0.5615522", "0.56128997", "0.5606744", "0.5594736", "0.5594736", "0.5594736", "0.55918723", "0.5590143", "0.5572029", "0.5569518", "0.55685884", "0.55685884", "0.5567788", "0.55644536", "0.55636936", "0.5560402", "0.5559307", "0.55536634", "0.55478853", "0.5544635", "0.55435365", "0.5543438", "0.5538677", "0.5536168", "0.5534787", "0.55346084", "0.5533202", "0.5533166", "0.55177826", "0.55130905", "0.55104846", "0.550636", "0.55025417", "0.5492244", "0.5490831", "0.54902554", "0.54885966", "0.5488514", "0.54853463", "0.5483807", "0.54795814", "0.5473702", "0.5472056", "0.54654235" ]
0.0
-1
Object for storing measurements
def __init__(self, value=None, date=None): self.value = value self.date = date
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def measurements(self) -> NONEARRAY:\n pass", "def measurements(self):\n return self._measurements", "def measurements(self):\n return self.config['measurements']", "def getMeasures():", "def getMeasurements(self):\n return self._Measurements", "def _measure():\n return {\n 'type' : 'class',\n 'name' : 'measure',\n 'base' : None,\n 'is_abstract' : False,\n 'doc' : None,\n 'properties' : [\n ('description', 'str', '0.1', None),\n ('identification', 'str', '0.1', None),\n ('name', 'str', '0.1', None),\n ],\n 'decodings' : [\n ('description', 'child::cim:measureDescription'),\n ('identification', 'child::cim:measureIdentification/gmd:code/gco:CharacterString'),\n ('name', 'child::cim:nameOfMeasure'),\n\n # Hacks due to DKRZ misimplementation.\n ('description', 'parent::cim:report/child::gmd:measureDescription/gco:CharacterString'),\n ('name', 'parent::cim:report/child::gmd:nameOfMeasure/gco:CharacterString'),\n ]\n }", "def getAllMeasurement(self): \n return self.measurement", "def get_measurements(self):\n metrics = {}\n for key in self.fields.keys():\n metrics[key] = []\n # What's in output:\n # proc_pid date virt res shrd cpu mem power gpus_power\n while not self.queue.empty():\n data = self.queue.get().strip().split()\n for field in self.fields:\n tp = self.fields[field]['type']\n idx = self.fields[field]['index']\n count = self.fields[field]['count']\n if count == -1:\n metrics[field].append(ResourceMonitor.str_to_type(data[idx], tp))\n elif count == 0:\n metrics[field].append([ResourceMonitor.str_to_type(data[idx], tp)])\n else:\n metrics[field].append([\n ResourceMonitor.str_to_type(data[index], tp) for index in xrange(idx, idx+count)\n ])\n return metrics", "def measurements(self):\n # get available measurement types for this node\n measurement_types = self.measurement_types()\n\n # retrieve measurement for each type\n return list(self.measurement(t) for t in measurement_types)", "def _measurement_update(self):\n pass", "def measure(self, lastMeasure=None, m=None):\n if m is None:\n m = {}\n m['_time'] = time.time()\n if lastMeasure is not None:\n m['_stepDuration'] = time.time() - lastMeasure['_time']\n else:\n m['_stepDuration'] = time.time() - self._start_t\n self._msr(m)\n return m", "def measure(self):\n pass", "def measurements(self) -> NONEARRAY:\n\n return self._measurements", "def getMeasure(unique_name):", "def getMeasure(unique_name):", "def measurements(self) -> List[Measurement]:\n return self._measurements", "def get_measurement_data(self) -> MeasurementData:\n result = MeasurementDataStructure()\n Utils.check(VL53L1X_C_LIBRARY.VL53L1_GetRangingMeasurementData(self.dev, byref(result)))\n return MeasurementData(result)", "def __init__(self, measure):\n self.measure = measure # Dictionary of the measurement steps\n self.devices = {} # Dictionary holding all the devices\n self.output_devices = [] # List of devices with output capabilities\n self.daqs = {} # Dictionary that holds for each daq the inputs and outputs.\n self.rotation_stages = [] # If there are rotation stages present, they will show up in this list.\n # This short block is going to become useful in the future, when interfacing with a GUI\n for d in self.measure:\n setattr(self, d, self.measure[d])", "def __init__(self):\n self.sensor_value = dict()", "def _config_measurements(self, spec, period):\r\n logging.info(\"Config measurement for spec {0}\".format(spec))\r\n \r\n eq = self._get_equipment()\r\n\r\n measurements=[[],[],[]]\r\n \r\n mplane_param2value={}\r\n for k in spec.parameter_names():\r\n v = spec.get_parameter_value(k)\r\n if isinstance(v,float):\r\n v = \"{:.0f}\".format(v)\r\n else:\r\n v = str(v)\r\n mplane_param2value[k] = v\r\n \r\n for meas_type in sorted(self._meas[\"types\"].keys()):\r\n (meas,add2)=self._add_or_update_measurement(eq,meas_type,mplane_param2value,period)\r\n measurements[add2].append(meas)\r\n \r\n return measurements", "def get_measurements_by_time(self):\n data_path = os.path.abspath(\n os.path.join(\n os.path.dirname(os.path.realpath(__file__)),\n \"..\",\n \"data/NVB_rescale_dataset.p\",\n )\n )\n self.log_print([\"Getting experimental data from {}\".format(data_path)])\n self.measurements = pickle.load(open(data_path, \"rb\"))\n return self.measurements", "def __init__(self,\n measure_name_fmix: str,\n measure_name_emis: str,\n database: str):\n super().__init__()\n self._measurements[self.KEY_FMIX] = Measurement(name=measure_name_fmix,\n unit=self.UNIT_FMIX,\n database=database)\n self._measurements[self.KEY_EMIS] = Measurement(name=measure_name_emis,\n unit=self.UNIT_EMIS,\n database=database)", "def unit_of_measurement(self):\n return SENSOR_TYPES[self.type][1]", "def measurement(metric: Metric, **kwargs) -> Measurement:\n measurement = Measurement(metric, **kwargs)\n measurement.update_measurement()\n return measurement", "def measure(self):\n return self._measure", "def __init__(self,units=None):\n self.__units = units", "def unit_of_measurement(self):\n return SENSOR_TYPES[self._type][1]", "def getMeasurement(self):\n if(ADConverterSettings.useRealAD):\n print \"Real AD not activated\"\n #return self.adcdac.read_adc_voltage(1)\n else:\n return self.__readMeasurementFromFile()", "def measure_dict():\n out = base_dict()\n out['mro']['current'] = ['Measure']\n out['name']['current'] = 'Measure'\n ao(out, 'nSamples', 'Integer', 1, readLevel=3)\n ao(out, 'id', 'String', 'Conversion source ID', readLevel=3)\n ao(out, 'uid', 'String', 'Unique ID', readLevel=5)\n ao(out, 'date', 'Date', '00:00:00 01/01/2000', name='Test date')\n ao(out, 'zerotime', 'Float', name='Acquisition starting time', readLevel=4)\n ao(out, 'elapsed', 'Float', name='Test duration', unit='second')\n ao(out, 'operator', 'String', name='Operator')\n return out", "def unit_of_measurement(self):\n return self._metadata[1]", "def unit_of_measurement(self):\n return SENSOR_TYPES[self._type][2]", "def unit_of_measurement(self):\n return SENSOR_TYPES[self._type][2]", "def test_measurment(self):\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"km\"), 6.214)\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"m\"), 10.936)\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"cm\"), 0.328)\r\n self.assertEqual(Converter.MeasurmentWorldtoUS(10, \"mm\"), 0.394)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"mi\"), 16.093)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"yd\"), 9.144)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"ft\"), 304.8)\r\n self.assertEqual(Converter.MeasurmentUStoWorld(10, \"in\"), 254)", "def get(self):\n return self._measurementController.getMeasurements(), 200", "def unit_of_measurement(self):\n return None", "def unit_of_measurement(self):\n return SENSOR_TYPES[self.sensor][1]", "def unit_of_measurement(self):\n return SENSOR_TYPES[self._type][1] if self._type in SENSOR_TYPES else None", "def store_info(self):\r\n _debug('Protocol: store_info' ) \r\n \r\n #Times\r\n if self.measure_type == '3PL':\r\n self.t_probe_p_s .append(self.t_probe) \r\n self.t_probe_m_s .append(self.t_probe) \r\n if self.measure_type == '4PL':\r\n self.t_probe_p_s .append(self.tp) \r\n self.t_probe_m_s .append(self.tm) \r\n \r\n self.t_pulseSequences_s.append(self.t_pulseSequences)\r\n self.t_process_s .append(self.t_process)\r\n #Total, accumulated, times\r\n self.t_tot_pulseSequences_s.append(self.t_tot_pulseSequences) \r\n self.t_tot_process_s .append(self.t_tot_process) \r\n #Rates\r\n self.Gp_guess_s .append(self.Gp_guess) #Mean of gamma+ \r\n self.Gm_guess_s .append(self.Gm_guess) #Mean of gamma- \r\n self.eGp_guess_s .append(self.eGp_guess) #Uncertainty of gamma+\r\n self.eGm_guess_s .append(self.eGm_guess) #Uncertainty of gamma- \r\n self.cov_GpGm_s .append(self.cov_GpGm) #Covariance of gamma- & gamma- \r\n #Other\r\n self.nb_iteration_s.append(self.iter)\r\n self.R_tot_s .append(self.R_tot)", "def unit_of_measurement(self):\n return SENSOR_TYPES[self.variable][1]", "def unit_of_measurement(self):\n return SENSOR_TYPES[self.variable][1]", "def __init__(self, measurement, tags, fields, time_stamp):\n self.measurement = measurement\n self.tags = tags\n self.fields = fields\n self.time = time_stamp", "def test_unit_of_measurement(self):\n for name in self.sensor_dict:\n sensor = self.sensor_dict[name][\"sensor\"]\n assert self.sensor_dict[name][\"units\"] == sensor.unit_of_measurement", "def __init__(self):\n self.vals = []\n self.mins = []", "def measurements(self):\n # TODO: add in empty measurements for assays that have none?\n return self._measure_queryset", "def __init__(self, values: dict):\n self.x = NonNegativeFloat\n self.y = NonNegativeFloat\n self.z = NonNegativeFloat\n self.mass = NonNegativeFloat\n self.time = NonNegativeFloat", "def unit_of_measurement(self):\r\n return self._sensor_cfg[1]", "def unit_of_measurement(self):\r\n return self._sensor_cfg[1]", "def unit_of_measurement(self) -> str:\n return \"objects\"", "def unit_of_measurement(self):\n return self.sensor_type[\"unit\"]", "def getMeasures(unique_name=None):", "def units(self):\n pass", "def set_metrics(self):", "def unit_of_measurement(self):\n return SENSOR_TYPES[self._sensor][0]", "def _add_or_update_measurement(self,eq,meas_type,mplane_param2value,period):\r\n meas = self._pvsr.create_pvsr_object(\"Measurement\")\r\n meas.ParentId = eq.Id\r\n meas.Type = meas_type\r\n if \"index_mplane_name\" in self._meas:\r\n if self._meas[\"index_mplane_name\"] not in mplane_param2value:\r\n raise ValueError(\"Missing {0} value\".format(self._meas[\"index_mplane_name\"]))\r\n meas.Index = mplane_param2value[self._meas[\"index_mplane_name\"]]\r\n else:\r\n meas.DescriptionToShow = self._meas[\"name\"] + \" \" + self._pvsr_meas_types[meas_type][\"Name\"]\r\n \r\n measA = self._pvsr.listMeasurements(meas)\r\n if len(measA) == 0:\r\n if \"index_mplane_name\" not in self._meas:\r\n meas.Index = self._meas[\"name\"]\r\n measA = self._pvsr.listMeasurements(meas)\r\n \r\n add2 = None\r\n \r\n if len(measA) == 0:\r\n #add\r\n if self._verb==mplane.model.VERB_QUERY:\r\n if \"index_mplane_name\" in self._meas:\r\n raise ValueError(\"The measurement does not exists: Index={0}\".format(meas.Index))\r\n else:\r\n raise ValueError(\"The measurement does not exists: Name={0}\".format(meas.DescriptionToShow))\r\n \r\n if \"index_mplane_name\" in self._meas:\r\n if eq.CollectorType == 'c':\r\n meas.DescriptionToShow = mplane_param2value[self._meas[\"index_mplane_name\"]] + \" \" + self._pvsr_meas_types[meas_type][\"Name\"]\r\n else:\r\n meas.DescriptionToShow = self._meas[\"name\"] + \" \" + self._pvsr_meas_types[meas_type][\"Name\"]\r\n \r\n if \"uda_constants\" in self._meas:\r\n for uda,value in self._meas[\"uda_constants\"].items():\r\n param=self._pvsr.create_pvsr_object(\"Parameter\")\r\n param.Name = uda\r\n param.Value = value\r\n meas.Parameter.append(param)\r\n\r\n for mplane_param,uda in self._mplane2uda.items():\r\n if mplane_param in mplane_param2value and mplane_param2value[mplane_param] != \"\":\r\n param=self._pvsr.create_pvsr_object(\"Parameter\")\r\n param.Name = uda\r\n param.Value = mplane_param2value[mplane_param]\r\n meas.Parameter.append(param)\r\n elif self._uda_name2uda[uda].Required == \"Yes\":\r\n raise ValueError(\"Missing required parameter: {0}\".format(mplane_param))\r\n \r\n logging.info(\"Creating measurement, eq: {0}, type: {1}, index: {2}, name: {3}\".format(eq.Name,meas.Type,meas.Index,meas.DescriptionToShow))\r\n \r\n meas.Switched = \"No\"\r\n meas.RetainRawData = 365\r\n meas.IntervalInSec = period\r\n \r\n add2 = 1\r\n meas = self._pvsr.addMeasurement(meas)\r\n else:\r\n #update\r\n meas = measA[0]\r\n logging.info(\"Measurement already exists: eq: {0}, type: {1}, index: {2}, name: {3}\".format(eq.Name,meas.Type,meas.Index,meas.DescriptionToShow))\r\n \r\n need_mod = False\r\n meas_param_name2value = {}\r\n if hasattr(meas,\"Parameter\"):\r\n for i in range(len(meas.Parameter)):\r\n meas_param_name2value[meas.Parameter[i].Name]=meas.Parameter[i].Value\r\n\r\n if \"check_udas\" in self._meas and self._meas[\"check_udas\"] == False:\r\n pass\r\n else:\r\n for mplane_param,uda in self._mplane2uda.items():\r\n if mplane_param in mplane_param2value and mplane_param2value[mplane_param] != \"\":\r\n if uda not in meas_param_name2value or meas_param_name2value[uda] != mplane_param2value[mplane_param]:\r\n if uda not in meas_param_name2value:\r\n logging.warn(\"Parameter mismatch: {0}: NULL != {1}\".format(uda,mplane_param2value[mplane_param]))\r\n else:\r\n logging.warn(\"Parameter mismatch: {0}: {1} != {2}\".format(uda,meas_param_name2value[uda],mplane_param2value[mplane_param]))\r\n index2remove=None\r\n for i in range(len(meas.Parameter)):\r\n if meas.Parameter[i].Name == uda:\r\n index2remove = i\r\n break\r\n del meas.Parameter[index2remove]\r\n need_mod = True\r\n param=self._pvsr.create_pvsr_object(\"Parameter\")\r\n param.Name = uda\r\n param.Value = mplane_param2value[mplane_param]\r\n meas.Parameter.append(param)\r\n else:\r\n if uda in meas_param_name2value:\r\n index2remove=None\r\n for i in range(len(meas.Parameter)):\r\n if meas.Parameter[i].Name == uda:\r\n index2remove = i\r\n break\r\n if index2remove is not None:\r\n logging.warn(\"Parameter mismatch: {0}: {1} != NULL\".format(uda,meas_param_name2value[uda]))\r\n need_mod = True\r\n del meas.Parameter[index2remove]\r\n \r\n if meas.IntervalInSec != period:\r\n need_mod = True\r\n meas.IntervalInSec = period\r\n logging.warn(\"Parameter mismatch: IntervalInSec: {0} != {1}\".format(meas.IntervalInSec,period))\r\n \r\n if need_mod:\r\n if self._verb==mplane.model.VERB_QUERY:\r\n raise ValueError(\"The measurement parameters do not match: Name={0}\".format(meas.DescriptionToShow))\r\n \r\n logging.warn(\"Modifying measurement: eq: {0}, type: {1}, index: {2}, name: {3}\".format(eq.Name,meas.Type,meas.Index,meas.DescriptionToShow))\r\n meas = self._pvsr.modMeasurement(meas)\r\n add2 = 2\r\n else:\r\n add2 = 0\r\n \r\n return (meas,add2)", "def __init__(self) -> None:\n self.metrics = {}\n self.current = None\n self.run = None", "def test_generate_a_measurement_point(self):\n\n item_name, sensor_name = [\"itemA\", \"sensorA\"]\n\n result = main.generate_a_measurement_point(\n item_name,\n sensor_name=sensor_name,\n sensor_output_file_dir=\"tests/datafiles/\",\n output_filename=\"sensor_out_valid.txt\")\n assert result[\"measurement\"] == item_name\n assert result[\"tags\"][\"sensor\"] == sensor_name\n assert result[\"fields\"][\"temp\"] == self.expected_temp_val", "def __init__(self):\n super().__init__()\n self.metric = 'FMEASR'", "def __init__(self, markers):\n self.markers = markers\n self.last_time = None # Used to keep track of time between measurements \n self.Q_t = np.eye(2)\n self.R_t = np.eye(3)\n # YOUR CODE HERE", "def getPhysicalSamples(self, **kwargs):\n # initialise chans, startSample and endSample with the whole dataset\n options = self.parseGetDataKeywords(kwargs)\n # get data\n timeData = self.getUnscaledSamples(\n chans=options[\"chans\"],\n startSample=options[\"startSample\"],\n endSample=options[\"endSample\"],\n )\n # Scalars are applied in getUnscaledSamples to convert to mV - this is for ease of calculation and because each data file in the run might have a separate scaling\n # all that is left is to divide by the dipole length in km and remove the average\n for chan in options[\"chans\"]:\n if chan == \"Ex\":\n # multiply by 1000/self.getChanDx same as dividing by dist in km\n timeData[chan] = 1000 * timeData[chan] / self.getChanDx(chan)\n timeData.addComment(\n \"Dividing channel {} by electrode distance {} km to give mV/km\".format(\n chan, self.getChanDx(chan) / 1000.0\n )\n )\n if chan == \"Ey\":\n # multiply by 1000/self.getChanDy same as dividing by dist in km\n timeData[chan] = 1000 * timeData[chan] / self.getChanDy(chan)\n timeData.addComment(\n \"Dividing channel {} by electrode distance {} km to give mV/km\".format(\n chan, self.getChanDy(chan) / 1000.0\n )\n )\n\n # if remove zeros - False by default\n if options[\"remzeros\"]:\n timeData[chan] = removeZerosChan(timeData[chan])\n # if remove nans - False by default\n if options[\"remnans\"]:\n timeData[chan] = removeNansChan(timeData[chan])\n # remove the average from the data - True by default\n if options[\"remaverage\"]:\n timeData[chan] = timeData[chan] - np.average(\n timeData[chan]\n )\n\n # add comments\n timeData.addComment(\n \"Remove zeros: {}, remove nans: {}, remove average: {}\".format(\n options[\"remzeros\"], options[\"remnans\"], options[\"remaverage\"]\n )\n )\n return timeData", "def get_measurements(self, param):\n return tuple(self.__buffer[param])", "def __measurement_mode(self):\n self.__measurement_modes = {\"DISCRETE\": 120, \"BATCH\": 1200, \"VIDEO\": 1200, \"STREAMING\": 1200}\n try:\n max_len = self.__measurement_modes[self.measurement_mode]\n except KeyError:\n raise KeyError(\"Invalid measurement mode given\")\n\n self.num_chunks = int(self.video_length / self.chunk_length)\n self.max_chunks = int(max_len / self.chunk_length)", "def unit_of_measurement(self):\n return self.var_units", "def get_measurements(self, pipeline, object_name, category):\n result = self.get_object_measurements(pipeline, object_name, category,\n {self.object_name.value: [] })\n return result", "def __init__(self, start, step, size, unit='SECOND'):\n self.unit = unit\n self.start = start\n self.step = step\n self.size = size", "def unit_of_measurement(self):\n return SENSOR_TYPES.get(self._sensor_type)[1]", "def unit_of_measurement(self):\n return SENSOR_TYPES.get(self._sensor_type)[1]", "def measurements():\n measurements_for_displaying = db.session.query(Measurement).all()\n return render_template('measurement/measurements.html', measurements=measurements_for_displaying)", "def unit_of_measurement(self) -> Any:\n return TEMP_CELSIUS", "def get_measure_par(input_object):\r\n input_object.measurement_strategy = ui.measurement_strategy.currentIndex()\r\n input_object.len_total = ui.total_length.text()\r\n input_object.frequency = ui.frequency.text()\r\n input_object.num_of_mea = ui.num_of_mea.text()\r\n input_object.len_step = ui.length_step.text()\r\n input_object.time_step = ui.time_step.text()\r\n input_object.temperature = ui.temperature.text()\r\n input_object.humidity = ui.humidity.text()\r\n input_object.na_average_factor = ui.na_average_facotr.value()\r\n input_object.multi_measure = ui.multi_measure.value()\r\n if ui.NA_state.text().strip() != '':\r\n input_object.na_state = ui.NA_state.text().strip()\r\n else:\r\n input_object.na_state = None", "def unit_of_measurement(self):\n return DEVICE_MAP[self._sensor_type][DEVICE_MAP_INDEX.index('UNIT_OF_MEASURE_INDEX')]", "def metrics(self):\n raise NotImplementedError(\"metrics\")", "def _fill_meas_result(self,meas,from_time,to_time,meas_data):\r\n input=self._pvsr.create_pvsr_object(\"GetMeasuredValuesInput\")\r\n input.ObjType = \"Measurement\"\r\n input.ObjId = meas.Id\r\n input.From = datetime.datetime.fromtimestamp(from_time)\r\n input.To = datetime.datetime.fromtimestamp(to_time)\r\n logging.info(\"Get values, eq: {0}, type: {1}, index: {2}, name: {3}, {4} -> {5}\".format(self._meas[\"equipment\"],meas.Type,meas.Index,meas.DescriptionToShow,input.From,input.To))\r\n meas_res=self._pvsr.getMeasuredValues(input)\r\n \r\n index2mplane_name={}\r\n multiply = None\r\n if \"first\" in self._meas[\"types\"][meas.Type]:\r\n index2mplane_name[0]=self._meas[\"types\"][meas.Type][\"first\"]\r\n if \"second\" in self._meas[\"types\"][meas.Type]:\r\n index2mplane_name[1]=self._meas[\"types\"][meas.Type][\"second\"]\r\n if \"multiply\" in self._meas[\"types\"][meas.Type]:\r\n multiply=int(self._meas[\"types\"][meas.Type][\"multiply\"])\r\n\r\n if hasattr(meas_res,\"D\"):\r\n for d in meas_res.D:\r\n if d.T not in meas_data:\r\n meas_data[d.T]={}\r\n for index,mplane_name in index2mplane_name.items():\r\n if index < len(d.V):\r\n if multiply is not None:\r\n d.V[index]*=multiply\r\n meas_data[d.T][mplane_name]=d.V[index]\r\n else:\r\n meas_data[d.T][mplane_name]=None", "def __gen_datapoint(self, measurement: str, label: str, value: float) -> dict:\n datapoint = {\n \"measurement\": measurement,\n \"tags\": {\n \"Label\": label,\n \"NodeId\": self.node_id\n },\n \"time\": self.timestamp,\n \"fields\": {\n \"Value\": value\n }\n }\n return datapoint", "def unit_of_measurement(self):\n if self._xfinity_data.unit is not None:\n return self._xfinity_data.unit", "def test_get_measure_parameters(self):\n pass", "def get_units(self,):\n self.UNITS = {'pressure':'Pa',}\n return", "def measure(name, observation):\n if name not in measurements:\n measurements[name] = _StatisticSet()\n measurements[name].observe(observation)", "def __init__(self, instresult, number):\n self.instresult = instresult\n self.number = number\n self.measures = {}", "def unit_of_measurement(self):\n return 'shows'", "def test_mock_datasource_meters(self):\n account1 = self.test_data.accounts[0]\n meter = account1.meters[0]\n self.assertIsInstance(meter, Meter)\n self.assertEqual(meter.PK, 4)\n self.assertEqual(meter.Tariff, \"test_tariff\")\n self.assertEqual(meter.ServiceType, \"test_service_type\")\n self.assertEqual(meter.PODid, \"test_podid\")\n self.assertEqual(meter.MeterNumber, \"test_meter_number_1\")\n self.assertEqual(meter.IntervalStart, date(2016, 1, 1))\n self.assertEqual(meter.IntervalEnd, date(2016, 2, 1))\n self.assertEqual(len(meter.charges), 1)\n self.assertEqual(len(meter.usages), 1)", "def getMeter(self):\n return self._Meter", "def unit_of_measurement(self):\n return self.values.primary.units", "def _calculate_custom_data(self):\n if self.limit is not None:\n self.data['pct'] = self.usage * 100.0 / self.limit\n if self.units == 'hours':\n self.time = timedelta(hours=self.usage)\n self.data['name'] = self.id", "def __init__(self,initial_meter_reading, initial_date):\r\n\t\tself.initial_meter_reading = initial_meter_reading\r\n\t\tself.initial_date = initial_date\r\n\t\tself.total_units_consumed = 0\r\n\t\tself.total_amount_spent = 0", "def write_measurement(self, name: str, measurement: dict):\n self._measurements.append(Measurement(name, measurement))", "def __init__(self, name):\n self._name = name # name of this statistics\n self._n = 0 # number of data points\n self._mean = 0 # sample mean\n self._stDev = 0 # sample standard deviation\n self._max = -sys.float_info.max # maximum\n self._min = sys.float_info.max # minimum", "def __init__(self, time, numerator, denominator):\n self.time = time\n self.numerator = numerator\n self.denominator = denominator", "def _get_measurements_with_derived_metrics(self, measurements):\n\n now = time.time()\n\n def metrics_available(*names):\n return all(name in self._event_names and name in measurements\n and name in self._prev_measurements for name in names)\n\n def delta(*names):\n return [measurements[name] - self._prev_measurements[name] for name in names]\n\n # if specific pairs are available calculate derived metrics\n if self._prev_measurements is not None:\n time_delta = now - self._prev_ts\n\n if metrics_available(MetricName.INSTRUCTIONS, MetricName.CYCLES):\n inst_delta, cycles_delta = delta(MetricName.INSTRUCTIONS,\n MetricName.CYCLES)\n if cycles_delta > 0:\n measurements[DerivedMetricName.IPC] = float(inst_delta) / cycles_delta\n\n if time_delta > 0:\n measurements[DerivedMetricName.IPS] = float(inst_delta) / time_delta\n\n if metrics_available(MetricName.INSTRUCTIONS, MetricName.CACHE_MISSES):\n inst_delta, cache_misses_delta = delta(MetricName.INSTRUCTIONS,\n MetricName.CACHE_MISSES)\n if inst_delta > 0:\n measurements[DerivedMetricName.CACHE_MISSES_PER_KILO_INSTRUCTIONS] = \\\n float(cache_misses_delta) * 1000 / inst_delta\n\n if metrics_available(MetricName.CACHE_REFERENCES, MetricName.CACHE_MISSES):\n cache_ref_delta, cache_misses_delta = delta(MetricName.CACHE_REFERENCES,\n MetricName.CACHE_MISSES)\n if cache_ref_delta > 0:\n cache_hits_count = cache_ref_delta - cache_misses_delta\n measurements[DerivedMetricName.CACHE_HIT_RATIO] = (\n float(cache_hits_count) / cache_ref_delta)\n\n self._prev_measurements = measurements\n self._prev_ts = now\n\n return measurements", "def unit_of_measurement(self):\n return self._units", "def getTemperatureMeasurements(self):\n # self.board.readline()\n self.stop = False\n times = []\n temps = [[], [], []]\n \n # A synchronisation string containing the characters tx is sent before each set of measurements,\n # we ensure correct reading of the measurements by waiting for this string\n while str(self.board.readline()).strip('b\\'\\\\rn') != 'tx':\n pass\n \n while not self.stop:\n # A synchronisation string containing the characters tx is sent before each set of measurements\n tx = self.board.readline()\n if str(tx).strip('b\\'\\\\rn') == 'tx':\n rawData1 = self.board.readline()\n rawData2 = self.board.readline()\n rawData3 = self.board.readline()\n rawData4 = self.board.readline()\n \n \n timeStamp = str(rawData1).strip('b\\'\\\\rn')\n temp1 = str(rawData2).strip('b\\'\\\\rn')\n temp2 = str(rawData3).strip('b\\'\\\\rn')\n temp3 = str(rawData4).strip('b\\'\\\\rn')\n try:\n times.append(float(timeStamp) / 1000)\n temps[0].append(float(temp1) / 128)\n temps[1].append(float(temp2) / 128)\n temps[2].append(float(temp3) / 128)\n # print(f'\\rtime: {float(timeStamp) / 1000:.2f} s, Temperature measured on sensor 1: {float(temp1) / 128:.2f} °C,'\n # f'sensor 2: {float(temp2) / 128:.2f} °C, sensor 3: {float(temp3) / 128:.2f} °C', sep='', end='', flush=True)\n except:\n print(rawData1, rawData2, rawData3, rawData4)\n \n \n if self.stop:\n print('\\nMeasurement finished...')\n \n self.data_stack[self.fetch_kinds[0]] = times\n self.data_stack[self.fetch_kinds[1]] = temps[0]\n self.data_stack[self.fetch_kinds[2]] = temps[1]\n self.data_stack[self.fetch_kinds[3]] = temps[2]\n \n if (len(self.data_stack['Sensor 1 Temp']) != len(times) or len(self.data_stack['Sensor 2 Temp']) != len(times) or len(self.data_stack['Sensor 3 Temp']) != len(times)):\n print(\"Warning: There may be some missing values!\")", "def __init__(self, time=0, uni=None, dmx=None):\n self.time = time\n self.dmx = {}\n if uni and dmx:\n self.dmx[uni] = dmx", "def load_measurements(self):\n # Clear measurementsListWidget\n self.measurementsListWidget.clear()\n\n measurements = self.mgr.obj.measurements\n for i, measurement in enumerate(measurements):\n item = QListWidgetItem('Measurement {:d}: {:s}'.format(i, measurement))\n self.measurementsListWidget.addItem(item)", "def _make_meta(self):\n available_meas_times = list()\n available_intervals = list()\n drill_by = list()\n related = list()\n last_data_set_instance = dict()\n\n if self._data['report_save_historical_instances_ind'] == 'Y':\n # last measurement instance\n res = self._db.Query(\"\"\"SELECT *\n FROM report_data_set_instance\n WHERE\n `element_id`=%s\n AND `segment_value_id` = %s\n ORDER BY measurement_time DESC\n LIMIT 0, 1\"\"\",(self._id, self._segment_value_id))\n if res:\n last_data_set_instance = self._db.record[0]\n last_data_set_instance['measurement_time'] = self._formatter.format_date(last_data_set_instance['measurement_time'])\n\n # available measurement instances\n res = self._db.Query(\"\"\"SELECT *\n FROM report_data_set_instance\n WHERE\n `element_id`=%s\n AND `segment_value_id` = %s\n ORDER BY measurement_time DESC\"\"\",(self._id, self._segment_value_id))\n if res:\n for data_set_instance in self._db.record:\n data_set_instance['measurement_time'] = self._formatter.format_date(data_set_instance['measurement_time'])\n available_meas_times.append(data_set_instance)\n \n\n # get drill by. not for this version\n\n # available measurement intervals\n if self._data['report_primary_shared_dimension_id'] is None:\n self._data['report_primary_shared_dimension_id'] = 0\n\n self._db.Query(\"\"\"\n SELECT measurement_interval.*,\n dashboard_element.element_id\n FROM dashboard_element\n LEFT JOIN measurement_interval\n ON measurement_interval.measurement_interval_id = dashboard_element.measurement_interval_id\n WHERE\n (dashboard_element.`element_id`<>%s\n AND dashboard_element.measurement_interval_id <> %s\n AND dashboard_element.shared_measure_id = %s\n AND dashboard_element.`type` = 'internal report'\n AND ifnull(dashboard_element.report_used_for_drill_to_ind,'N') = %s\n AND ifnull(dashboard_element.report_primary_shared_dimension_id,0) = %s\n AND ifnull(dashboard_element.segment_id,0) = %s)\n OR\n dashboard_element.`element_id`=%s\n AND 3=4\n \n GROUP BY measurement_interval.measurement_interval_id\n ORDER BY\n measurement_interval.display_sequence,\n dashboard_element.name ASC\n \"\"\",\n (self._id,\n self._data['measurement_interval_id'],\n self._data['shared_measure_id'],\n self._data['report_used_for_drill_to_ind'],\n self._data['report_primary_shared_dimension_id'],\n self._data['segment_id'],\n self._id))\n\n\n for interval in self._db.record:\n interval['report_data_set_instance_id'] = 0\n available_intervals.append(interval)\n\n # see related\n self._db.Query(\"\"\"SELECT e.*\n FROM dashboard_element_topic det, dashboard_element e\n WHERE e.element_id = det.dashboard_element_id\n AND dashboard_element_id <> %s\n AND e.enabled_ind = 'Y'\n AND topic_id IN (select topic_id from dashboard_element_topic where dashboard_element_id = %s)\n UNION SELECT e.*\n FROM dashboard_element e, metric_drill_to_report m\n WHERE m.metric_element_id = e.element_id\n AND m.report_element_id = %s\n AND e.enabled_ind = 'Y'\n AND ifnull(e.segment_id,0) = %s\n \"\"\", (self._id, self._id, self._id, self._data['segment_id']))\n \n\n for related_element in self._db.record:\n if not related_element['segment_id']:\n related_element['segment_id'] = 0\n if related_element['segment_id'] == self._data['segment_id']:\n related_element['segment_value_id'] = self._segment_value_id\n else:\n related_element['segment_value_id'] = 0\n related.append(related_element)\n\n # elements displayed on the page\n before_dataset = list()\n after_dataset = list()\n \n charts_before_dataset = list()\n charts_after_dataset = list()\n \n \n # dataset table\n dataset_el = OrderedDict()\n dataset_el['element_id'] = ''\n dataset_el['element_type'] = 'dataset'\n dataset_el['element_name'] = ''\n dataset_el['element_desc'] = ''\n dataset_el['placement'] = ''\n dataset_el['sequence'] = 0\n dataset_el['show_ind'] = self._data['show_data_set_table_in_report_ind']\n \n \n # charts\n self._db.Query(\"\"\"SELECT *\n FROM report_data_set_chart \n WHERE \n `element_id`= %s\n AND \n (ISNULL(report_data_set_pivot_id)\n OR report_data_set_pivot_id = 0) \n ORDER BY display_sequence ASC\"\"\", (self._id, ))\n for chart in self._db.record:\n chart_el = OrderedDict()\n chart_el['element_id'] = chart['report_data_set_chart_id']\n chart_el['element_type'] = 'chart'\n chart_el['pivot_id'] = 0\n if chart['report_data_set_pivot_id']:\n chart_el['pivot_id'] = chart['report_data_set_pivot_id']\n chart_el['element_name'] = chart['name']\n chart_el['element_desc'] = chart['description']\n chart_el['placement'] = chart['chart_placement']\n chart_el['sequence'] = chart['display_sequence']\n chart_el['show_ind'] = chart['enabled_ind']\n if chart_el['placement'] == 'before table': \n charts_before_dataset.append(chart_el)\n else:\n charts_after_dataset.append(chart_el)\n \n # pivots\n self._db.Query(\"\"\"SELECT *\n FROM report_data_set_pivot\n WHERE\n `element_id`= %s\n ORDER BY display_sequence ASC\"\"\", (self._id, ))\n for pivot in self._db.record:\n before_pivot = list()\n after_pivot = list()\n #pivot_element = list()\n \n pivot_el = OrderedDict()\n pivot_el['element_id'] = pivot['report_data_set_pivot_id']\n pivot_el['element_type'] = 'pivot'\n pivot_el['element_name'] = pivot['name']\n pivot_el['element_desc'] = ''\n pivot_el['placement'] = pivot['pivot_table_report_placement']\n pivot_el['sequence'] = pivot['display_sequence']\n pivot_el['show_ind'] = pivot['enabled_ind']\n \n # charts\n self._db.Query(\"\"\"SELECT *\n FROM report_data_set_chart \n WHERE \n `element_id`= %s\n AND report_data_set_pivot_id = %s \n ORDER BY display_sequence ASC\"\"\",\n (self._id, pivot_el['element_id']))\n for chart in self._db.record:\n chart_el = OrderedDict()\n chart_el['element_id'] = chart['report_data_set_chart_id']\n chart_el['element_type'] = 'chart'\n chart_el['pivot_id'] = 0\n if chart['report_data_set_pivot_id']:\n chart_el['pivot_id'] = chart['report_data_set_pivot_id']\n chart_el['element_name'] = chart['name']\n chart_el['element_desc'] = chart['description']\n chart_el['placement'] = chart['chart_placement']\n chart_el['sequence'] = chart['display_sequence']\n chart_el['show_ind'] = chart['enabled_ind']\n if chart_el['placement'] == 'before table': \n before_pivot.append(chart_el)\n else:\n after_pivot.append(chart_el)\n pivot_element = before_pivot + [pivot_el] + after_pivot \n \n if pivot_el['placement'] == 'before data set':\n before_dataset += pivot_element\n else:\n after_dataset += pivot_element\n elements = charts_before_dataset + before_dataset + [dataset_el] + after_dataset + charts_after_dataset\n \n \n self._jfile.make_current_meta(last_data_set_instance,\n available_meas_times,\n available_intervals,\n drill_by,\n related,\n elements,\n self._segment_values)", "def __init__(\n self,\n name: str,\n source: str,\n value: np.ndarray,\n time: np.ndarray,\n units: str,\n ):\n self.name = name\n self.value = value\n self.time = time\n self.units = units\n super().__init__(source)", "def unit_of_measurement(self):\n return SENSOR_UNITS.get(self._sensor_name, None)", "def unit_of_measurement(self):\n return SENSOR_UNITS.get(self._sensor_name, None)", "def output(self):\n return {\n \"time\": self.time,\n \"dmx\": self.dmx\n }", "def generate_message(self):\n meter = Meter.objects.get_or_create(name=\"4530303237303030303130313334353136\")[0]\n\n measurement = Measurement()\n measurement.meter = meter\n measurement.power_usage_current = random.randint(300,400) / 1000\n measurement.power_usage_total_low = 0\n measurement.power_usage_total_normal = 0\n measurement.power_supply_current = random.randint(300,400) / 1000\n measurement.power_supply_total_low = 0\n measurement.power_supply_total_normal = 0\n measurement.timestamp = datetime.now(pytz.utc)\n\n if(measurement.power_usage_current < measurement.power_supply_current):\n measurement.power_usage_current = 0\n else :\n measurement.power_supply_current = 0\n\n return measurement", "def unit_array(self, values):\n self._data_array.values = values\n self._units = self._data_array.attrs['units'] = str(values.units)", "def __init__(self, dtype, unit='', max_value=None, min_value=None):\n self.dtype = dtype\n self.unit = unit\n self.max_value = max_value\n self.min_value = min_value", "def unit_of_measurement(self):\n if self._coordinator.data:\n return self._coordinator.data['unit']\n return None" ]
[ "0.7203409", "0.6870602", "0.68244916", "0.6779195", "0.6685213", "0.6621161", "0.65731907", "0.6567633", "0.656311", "0.65522236", "0.653955", "0.65327895", "0.6467614", "0.6393451", "0.6393451", "0.63583064", "0.6340271", "0.6315247", "0.6288799", "0.62670773", "0.62450945", "0.6218818", "0.62089044", "0.620807", "0.6189735", "0.6164295", "0.61640894", "0.6157975", "0.61479825", "0.6142624", "0.61417407", "0.61417407", "0.61299837", "0.6122011", "0.61166304", "0.60632706", "0.60600334", "0.6053416", "0.60406744", "0.60406744", "0.6032676", "0.60321057", "0.6031913", "0.60194653", "0.6003096", "0.59972566", "0.59972566", "0.5986039", "0.5979557", "0.59725606", "0.59580797", "0.59388584", "0.59350497", "0.5933552", "0.5930869", "0.5927983", "0.59269106", "0.59124106", "0.5908689", "0.59038013", "0.590123", "0.58976585", "0.5888349", "0.5884425", "0.58841795", "0.58841795", "0.58840835", "0.5881451", "0.58792204", "0.5870239", "0.5865213", "0.5864379", "0.5860766", "0.58543533", "0.5838489", "0.58370495", "0.583641", "0.58358574", "0.5834393", "0.5834072", "0.5828339", "0.5826923", "0.5825328", "0.58249724", "0.58084667", "0.58025587", "0.5802386", "0.57833785", "0.57830113", "0.5782527", "0.577522", "0.5752157", "0.5748742", "0.57471776", "0.57444584", "0.57444584", "0.5743986", "0.5734866", "0.57295233", "0.5728327", "0.5724697" ]
0.0
-1
Object for holding electrode configuration details
def __init__(self, material=None, length=None, entrance_length=None): self.material = material self.length = length self.entrance_length = entrance_length
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def configuration():", "def config(self):\n raise NotImplementedError", "def _est_config(self):\n return self._est_method.config", "def config(self):\n pass", "def config(self):\n pass", "def config(self):\n return None", "def get_config(self):\n return {\"name\": self.name, \"tunable\": self.tunable}", "def get_config(self):\n config = {\n \"units\": self.units,\n \"activation\": activations.serialize(self.activation),\n \"recurrent_activation\": activations.serialize(\n self.recurrent_activation\n ),\n \"attention_activation\": activations.serialize(\n self.attention_activation\n ),\n \"use_bias\": self.use_bias,\n \"kernel_initializer\": initializers.serialize(self.kernel_initializer),\n \"recurrent_initializer\": initializers.serialize(\n self.recurrent_initializer\n ),\n \"bias_initializer\": initializers.serialize(self.bias_initializer),\n \"attention_initializer\": initializers.serialize(\n self.attention_initializer\n ),\n \"use_chrono_initialization\": self.unit_forget_bias,\n \"kernel_regularizer\": regularizers.serialize(self.kernel_regularizer),\n \"recurrent_regularizer\": regularizers.serialize(\n self.recurrent_regularizer\n ),\n \"bias_regularizer\": regularizers.serialize(self.bias_regularizer),\n \"activity_regularizer\": regularizers.serialize(\n self.activity_regularizer\n ),\n \"attention_regularizer\": regularizers.serialize(\n self.attention_regularizer\n ),\n \"kernel_constraint\": constraints.serialize(self.kernel_constraint),\n \"recurrent_constraint\": constraints.serialize(\n self.recurrent_constraint\n ),\n \"bias_constraint\": constraints.serialize(self.bias_constraint),\n \"attention_constraint\": constraints.serialize(\n self.attention_constraint\n ),\n \"dropout\": self.dropout,\n \"recurrent_dropout\": self.recurrent_dropout,\n \"return_attention\": self.return_attention,\n }\n base_config = super().get_config()\n del base_config[\"cell\"]\n return dict(list(base_config.items()) + list(config.items()))", "def _get_MindtPy_ECP_config():\n CONFIG = ConfigBlock('MindtPy-GOA')\n\n _add_common_configs(CONFIG)\n _add_ecp_configs(CONFIG)\n _add_oa_cuts_configs(CONFIG)\n _add_subsolver_configs(CONFIG)\n _add_tolerance_configs(CONFIG)\n _add_bound_configs(CONFIG)\n return CONFIG", "def _config(self):\r\n return (\r\n self.destructive,\r\n self.output_type,\r\n self.seed,\r\n )", "def config():", "def config():", "def __init__(self):\n self.config = {}", "def _configure(self):\n dconfig = DConfiguration(self._le2mserv.gestionnaire_graphique.screen)\n if dconfig.exec_():\n pms.TEMPS_PARTIE, pms.TREATMENT, pms.GRILLES = dconfig.get_config()\n self._le2mserv.gestionnaire_graphique.infoserv(\n [trans_TC(u\"Part time: {}\").format(pms.TEMPS_PARTIE),\n trans_TC(u\"Treatment: {}\").format(pms.get_treatment(pms.TREATMENT)),\n trans_TC(u\"Grids: {}\").format(len(pms.GRILLES))])", "def get_config(self):\n return {}", "def config(self):\n return {}", "def config(self) -> Dict[str, Any]:", "def save_config(self):\n\n h_config = configparser.ConfigParser()\n\n h_config[\"general\"] = {}\n if not self.configuration.interval:\n self.configuration.interval = __interval__\n h_config[\"general\"][\"interval\"] = str(self.configuration.interval)\n if not self.configuration.wifi_clients:\n self.configuration.wifi_clients = __wifi_clients_example__\n h_config[\"general\"][\"wifi_clients\"] = \",\".join(self.configuration.wifi_clients)\n if not self.configuration.schedules_names:\n self.configuration.schedules_names = __schedules_names_example__\n h_config[\"general\"][\"schedules_name\"] = \",\".join(self.configuration.schedules_names)\n\n h_config[\"unifi\"] = {}\n if not self.configuration.unifi_host:\n self.configuration.unifi_host = __unifi_controller_host__\n h_config[\"unifi\"][\"host\"] = self.configuration.unifi_host\n if not self.configuration.unifi_port:\n self.configuration.unifi_port = __unifi_controller_port__\n h_config[\"unifi\"][\"port\"] = str(self.configuration.unifi_port)\n if not self.configuration.unifi_username:\n self.configuration.unifi_username = __unifi_controller_user__\n h_config[\"unifi\"][\"username\"] = self.configuration.unifi_username\n if not self.configuration.unifi_password:\n self.configuration.unifi_password = __unifi_controller_pwd__\n h_config[\"unifi\"][\"password\"] = self.configuration.unifi_password\n\n h_config[\"hue\"] = {}\n if not self.configuration.hue_host:\n self.configuration.hue_host = __hue_hub_host__\n h_config[\"hue\"][\"host\"] = self.configuration.hue_host\n if not self.configuration.hue_port:\n self.configuration.hue_port = __hue_hub_port__\n h_config[\"hue\"][\"port\"] = str(self.configuration.hue_port)\n if not self.configuration.hue_key:\n self.configuration.hue_key = __hue_key__\n h_config[\"hue\"][\"key\"] = self.configuration.hue_key\n\n h_config[\"zmq\"] = {}\n if not self.configuration.pub_host:\n self.configuration.pub_host = __zmq_default_publishing_host__\n h_config[\"zmq\"][\"host\"] = self.configuration.pub_host\n if not self.configuration.pub_port:\n self.configuration.pub_port = __zmq_default_publishing_port__\n h_config[\"zmq\"][\"port\"] = str(self.configuration.pub_port)\n if \"no_pub\" in self.configuration:\n h_config[\"zmq\"][\"disabled\"] = str(int(self.configuration.no_pub))\n\n h_config[\"logging\"] = {}\n if self.configuration.syslog_host:\n h_config[\"logging\"][\"syslog_host\"] = self.configuration.syslog_host\n if self.configuration.syslog_port:\n h_config[\"logging\"][\"syslog_port\"] = str(self.configuration.syslog_port)\n if self.configuration.log_file:\n h_config[\"logging\"][\"log_file\"] = str(self.configuration.log_file)\n\n with self.config_file.open(mode='w') as configfile:\n h_config.write(configfile)\n logging.info(\"Configuration saved to {}\".format(str(self.config_file)))", "def get_details(self):\n return self.__config_data", "def config(self):\r\n return self._config", "def get_config(self):\n config = super(Sc2Policy, self).get_config()\n config['eps'] = self.eps\n config['testing'] = self.testing\n return config", "def get_config(self):\n return super().get_config()", "def get_config(self):\n config = {\n 'membership_transform': self.membership_transform,\n 'predictions_transform': self.predictions_transform,\n 'membership_kernel': self.membership_kernel,\n 'predictions_kernel': self.predictions_kernel,\n 'name': self.name,\n }\n config = {k: v for k, v in config.items() if v is not None}\n return self._serialize_config(config)", "def __config_attributes(self):\n self.__name = self.__data[self.__code][\"airportName\"]\n self.__country = Country(name=self.__data[self.__code][\"countryName\"],\n code=self.__data[self.__code][\"countryCode\"])\n try:\n self.__city = self.__data[self.__code][\"city\"]\n except Exception:\n self.__city = ''", "def config(self):\n return self._config", "def get_config(self):\n return {'name': self.name, 'dtype': self.dtype}", "def get_config(self):\n if self.allow_reco():\n return self.chs_config()\n else:\n return self.get_config_j(self.id)", "def get_config(self):\n return self.cat_feats_cfg", "def get_config(self):\n config = {\n 'multichannel': self._multichannel,\n 'complex_part': self._complex_part\n }\n base_config = super().get_config()\n return {**base_config, **config}", "def subconfig(self) -> \"Config\":\n config = Config()\n config.func = self.func\n config.targets = self.targets.copy()\n config.blacklisted_targets = self.blacklisted_targets.copy()\n config.variables = self.variables.copy()\n config.unit_iterator = self.unit_iterator.copy()\n config.ran_units = self.ran_units.copy()\n config.active_units = self.active_units.copy()\n config.entry_point = self.unit_iterator.lst[-1]\n config.signals = self.signals.copy()\n return config", "def get_config(self):\n config = super(EpsGreedyQPolicy, self).get_config()\n config['eps'] = self.eps\n return config", "def get_config(self):\n return {'reduction': self.reduction, 'name': self.name}", "def config():\n return {\n \"COMPONENT_NAME\": \"testing-deleter\",\n \"DEST_SITE\": \"NERSC\",\n \"DISK_BASE_PATH\": \"/path/to/rucio/rse/root\",\n \"HEARTBEAT_PATCH_RETRIES\": \"3\",\n \"HEARTBEAT_PATCH_TIMEOUT_SECONDS\": \"30\",\n \"HEARTBEAT_SLEEP_DURATION_SECONDS\": \"60\",\n \"INPUT_STATUS\": \"detached\",\n \"LTA_REST_TOKEN\": \"fake-lta-rest-token\",\n \"LTA_REST_URL\": \"http://RmMNHdPhHpH2ZxfaFAC9d2jiIbf5pZiHDqy43rFLQiM.com/\",\n \"OUTPUT_STATUS\": \"source-deleted\",\n \"RUN_ONCE_AND_DIE\": \"False\",\n \"SOURCE_SITE\": \"WIPAC\",\n \"WORK_RETRIES\": \"3\",\n \"WORK_SLEEP_DURATION_SECONDS\": \"60\",\n \"WORK_TIMEOUT_SECONDS\": \"30\",\n }", "def config(self) -> NamedTuple:", "def config(self):\n return self._cfg", "def getConfig(self):\n pass", "def __init__(self):\n parameters_list = []\n self.config_dict = self.open_config(parameters_list)\n\n # Define defaults\n self.disc_gt = 0.0\n self.disc_out = 0.0", "def get_full_config(self):\n\n raise Exception(\"Child classes must override get_full_config()\")", "def get_default():\n # default_config = configparser.ConfigParser(allow_no_value=True)\n #\n # default_config.add_section(\"General\")\n # general = default_config[\"General\"]\n # general[\"PermanentLogPath\"] = r\"/home/pi/automationwebserver.log\"\n # general[\"TempLogPath\"] = r\"/var/ramdrive/test.txt\"\n #\n # default_config.add_section(\"ArduinoLink\")\n # arduino = default_config[\"ArduinoLink\"]\n # arduino[\"ArdIPAddress\"] = \"192.168.2.35\"\n # arduino[\"ArdTerminalPort\"] = \"53201\"\n # arduino[\"ArdDatastreamPort\"] = \"53202\"\n # arduino[\"RPiIPAddress\"] = \"192.168.2.34\"\n # arduino[\"RpiTerminalPort\"] = \"53201\"\n # arduino[\"RpiDatastreamPort\"] = \"53202\"\n #\n # default_config.add_section(\"Databases\")\n # databases = default_config[\"Databases\"]\n # databases[\"HostAddress\"] = \"localhost\"\n # databases[\"HostPort\"] = \"3306\"\n # default_config['REALTIME'] = {'databasename': 'testname', 'user': 'testuser',\n # 'password': 'testpassword', 'max_rows': '10'}\n # default_config['HISTORY'] = {'databasename': 'testname', 'user': 'testuser',\n # 'password': 'testpassword'}\n #\n # default_config.add_section(\"DataTransfer\")\n # default_config.set(\"DataTransfer\", r\"# see https://docs.python.org/3.6/library/struct.html#struct.unpack\", None)\n # datatransfer = default_config[\"DataTransfer\"]\n # datatransfer[\"ProtocolVersion\"] = 'a'\n # default_config[\"SensorReadings\"] = {\"tablename\": \"PoolHeaterSensorValues\",\n # \"unpackformat\": \"<Hff?fffffffffff\",\n # \"fieldnames\":\n # \"sim_flags solar_intensity cumulative_insolation\"\\\n # \" surge_tank_ok pump_runtime\"\\\n # \" hx_hot_inlet_inst hx_hot_inlet_smooth\"\\\n # \" hx_hot_outlet_inst hx_hot_outlet_smooth\"\\\n # \" hx_cold_inlet_inst hx_cold_inlet_smooth\"\\\n # \" hx_cold_outlet_inst hx_cold_outlet_smooth\"\\\n # \" temp_ambient_inst temp_ambient_smooth\"\n # }\n # default_config[\"Status\"] = {\"tablename\": \"PoolHeaterStatus\",\n # \"unpackformat\": \"<B?BB?BBBBBB\",\n # \"fieldnames\":\n # \"assert_failure_code realtime_clock_status\"\\\n # \" logfile_status ethernet_status\"\\\n # \" solar_intensity_reading_invalid\"\\\n # \" pump_state\"\\\n # \" hx_hot_inlet_status hx_hot_outlet_status\"\\\n # \" hx_cold_inlet_status hx_cold_outlet_status\"\\\n # \" ambient_status\"\n # }\n return default_config", "def config(self):\n return self.__config", "def config(self):\n return self.__config", "def get_config(self):\n return {'value': self.value}", "def electrode_config(ele_lims, ele_res, true_csd, csd_x, csd_y, csd_z):\n #Potentials\n ele_x_lims = ele_y_lims = ele_z_lims = ele_lims\n ele_x, ele_y, ele_z = generate_electrodes(ele_x_lims, ele_y_lims, ele_z_lims, ele_res)\n if parallel_available:\n pots = calculate_potential_3D_parallel(true_csd, \n ele_x, ele_y, ele_z, \n csd_x, csd_y, csd_z)\n else:\n pots = calculate_potential_3D(true_csd, \n ele_x, ele_y, ele_z, \n csd_x, csd_y, csd_z) \n ele_pos = np.vstack((ele_x, ele_y, ele_z)).T #Electrode configs\n num_ele = ele_pos.shape[0]\n print 'Number of electrodes:', num_ele\n return ele_pos, pots", "def config(self) -> \"AutomationConfig\":", "def config(self) -> \"AutomationConfig\":", "def gather_configuration(self, config):\n config['log']['logging_level'] = self.logDisplay.get_logging_level()\n\n # MIDI\n config['midi']['winch_midi_input'] = self.winchMidiInputCombo.current_item()\n config['midi']['midi_output'] = self.midiOutputCombo.current_item()\n\n # OSC\n addr, port = self.oscListenerConfig.get_OSC_port()\n config['osc']['listener_addr'] = addr\n config['osc']['listener_port'] = str(port)\n addr, port = self.oscSenderConfig.get_OSC_port()\n config['osc']['sender_addr'] = addr\n config['osc']['sender_port'] = str(port)\n\n # DMX\n config['dmx']['dmx_output_serial_port'] = self.dmxSelect.current_item()\n\n # winches\n for i, winchSelect in enumerate(self.winchSelects):\n key = \"winch_%d_output_serial_port\" % (i+1)\n config['winches'][key] = winchSelect.current_item()\n\n return", "def get_data_config(self):\n conf_map = {}\n\n if self.alien_alg.currentIndex() == 1:\n conf_map['alien_alg'] = '\"block_aliens\"'\n if len(self.aliens.text()) > 0:\n conf_map['aliens'] = str(self.aliens.text()).replace('\\n', '')\n if self.alien_alg.currentIndex() == 2:\n conf_map['alien_alg'] = '\"alien_file\"'\n if len(self.alien_file.text()) > 0:\n conf_map['alien_file'] = '\"' + str(self.alien_file.text()) + '\"'\n elif self.alien_alg.currentIndex() == 3:\n conf_map['alien_alg'] = '\"AutoAlien1\"'\n if len(self.AA1_size_threshold.text()) > 0:\n conf_map['AA1_size_threshold'] = str(self.AA1_size_threshold.text())\n if len(self.AA1_asym_threshold.text()) > 0:\n conf_map['AA1_asym_threshold'] = str(self.AA1_asym_threshold.text())\n if len(self.AA1_min_pts.text()) > 0:\n conf_map['AA1_min_pts'] = str(self.AA1_min_pts.text())\n if len(self.AA1_eps.text()) > 0:\n conf_map['AA1_eps'] = str(self.AA1_eps.text())\n if len(self.AA1_amp_threshold.text()) > 0:\n conf_map['AA1_amp_threshold'] = str(self.AA1_amp_threshold.text())\n if self.AA1_save_arrs.isChecked():\n conf_map['AA1_save_arrs'] = \"True\"\n if len(self.AA1_expandcleanedsigma.text()) > 0:\n conf_map['AA1_expandcleanedsigma'] = str(self.AA1_expandcleanedsigma.text())\n\n if len(self.amp_intensity.text()) > 0:\n conf_map['amp_threshold'] = str(self.amp_intensity.text())\n if len(self.binning.text()) > 0:\n conf_map['binning'] = str(self.binning.text()).replace('\\n', '')\n if len(self.center_shift.text()) > 0:\n conf_map['center_shift'] = str(self.center_shift.text()).replace('\\n', '')\n if len(self.adjust_dimensions.text()) > 0:\n conf_map['adjust_dimensions'] = str(self.adjust_dimensions.text()).replace('\\n', '')\n\n return conf_map", "def _set_instance_config(self):\n\t\t\n\t\tif \"PARAMETERS_NAME\" in self.config.keys():\n\t\t\tlogger.info(\"You specified your own PARAMETERS_NAME, I will use it.\")\n\t\telse:\n\t\t\tself.config[\"PARAMETERS_NAME\"] = self._get_params_filepath()\n\t\t\n\t\tif \"FILTER_NAME\" in self.config.keys():\n\t\t\tlogger.info(\"You specified your own FILTER_NAME, I will use it.\")\n\t\telse:\n\t\t\tself.config[\"FILTER_NAME\"] = self._get_conv_filepath()\n\t\t\n\t\t\n\t\tif \"CATALOG_NAME\" in self.config.keys():\n\t\t\tlogger.warning(\"You specified your own CATALOG_NAME, but I will *NOT* use it !\")\n\t\t\tdel self.config[\"CATALOG_NAME\"]\n\n\t\tif \"PSF_NAME\" in self.config.keys():\n\t\t\tlogger.info(\"You specified your own PSF_NAME, I will use it.\")\n\t\telse:\n\t\t\tself.config[\"PSF_NAME\"] = self._get_psf_filepath()", "def get_config():\n name = 'dynamic_pricing'\n num_products = 5\n scale = 1\n noise_var = 10\n p_max = 1\n\n agents = collections.OrderedDict(\n [('bsPricing',\n functools.partial(BootstrapDynamicPricing,\n num_products, scale, noise_var, p_max))]\n )\n\n environments = collections.OrderedDict(\n [('env',\n functools.partial(DynamicPricing,\n num_products, scale, noise_var, p_max))]\n )\n experiments = collections.OrderedDict(\n [(name, ExperimentNoAction)]\n )\n n_steps = 80\n n_seeds = 2000\n config = Config(name, agents, environments, experiments, n_steps, n_seeds)\n return config", "def get_config(self):\n if self.faucet is not None:\n return self.faucet.get_config()\n return None", "def create_config(self):\n\n #FIXME: Try to do it over loops ie. self[attr].set_config()\n for attr, value in self.__dict__.items():\n if attr == \"connection\":\n self.connection.set_config(self.cfg)\n if attr == \"ipv4\":\n self.ipv4.set_config(self.cfg)\n if attr == \"ipv6\":\n self.ipv6.set_config(self.cfg)\n if attr == \"_802_3_ethernet\" and not value == \"none\":\n self._802_3_ethernet.set_config(self.cfg)\n if attr == \"_802_11_wireless\" and not value == \"none\":\n self._802_11_wireless.set_config(self.cfg)\n if attr == \"_802_11_wireless_security\" and not value == \"none\":\n self._802_11_wireless_security.set_config(self.cfg)", "def getUserConfig(self):\n\n # Load Autobidder stats\n userconfig_json = open('./data/config.json')\n json1_str = userconfig_json.read()\n configops = json.loads(json1_str)[0]\n\n config_choices = []\n for key, value in configops.items():\n config_choices.append(value)\n\n conserve_bids = config_choices[0]\n sleep_time = config_choices[1]\n botspeed = config_choices[2]\n bidexpiration_ceiling = config_choices[3]\n buyceiling = config_choices[4]\n sellceiling = config_choices[5]\n\n sleep_time = int(sleep_time)\n botspeed = float(botspeed)\n conserve_bids = int(conserve_bids)\n bidexpiration_ceiling = int(bidexpiration_ceiling)\n buyceiling = float(buyceiling/100)\n sellceiling = float(sellceiling/100)\n\n if (buyceiling > 1):\n log_event(self.queue, \"buy ceiling greater than 1: \" +\n str(buyceiling))\n log_event(self.queue, \"setting it to .85: \")\n buyceiling = 0.85\n\n if (sellceiling > 1):\n log_event(self.queue, \"sell ceiling greater than 1: \" +\n str(sellceiling))\n log_event(self.queue, \"setting it to .95 \")\n sellceiling = 0.95\n\n self.conserve_bids = conserve_bids\n self.sleep_time = sleep_time\n self.botspeed = botspeed\n self.bidexpiration_ceiling = bidexpiration_ceiling\n self.buyceiling = buyceiling\n self.sellceiling = sellceiling\n\n # Return values but this really shouldn't be used - only used on initialization\n return conserve_bids, sleep_time, botspeed, bidexpiration_ceiling, buyceiling, sellceiling", "def config(self):\n if not hasattr(self,\"_config\") or self._config is None:\n self._config = {}\n \n return self._config", "def config(self) -> Any:\n return self._config", "def _init_config_(self):\n self._config= {}", "def get_config(self):\n config = {\n 'F_': self.F_,\n 'attn_heads': self.attn_heads,\n 'attn_heads_reduction': self.attn_heads_reduction,\n 'edge_type_reduction': self.edge_type_reduction,\n 'attention_type': self.attention_type,\n 'attn_dropout': self.attn_dropout,\n 'feature_dropout': self.feature_dropout,\n 'activation': self.activation,\n 'use_value_bias': self.use_value_bias,\n 'use_key_bias': self.use_key_bias,\n 'kernel_initializer': self.kernel_initializer,\n 'bias_initializer': self.bias_initializer,\n 'attn_kernel_initializer': self.attn_kernel_initializer,\n 'attn_bias_initalizer': self.attn_bias_initializer,\n 'kernel_regularizer': self.kernel_regularizer,\n 'bias_regularizer': self.bias_regularizer,\n 'attn_kernel_regularizer': self.attn_kernel_regularizer,\n 'attn_bias_regularizer': self.attn_bias_regularizer,\n 'activity_regularizer': self.activity_regularizer,\n 'kernel_constraint': self.kernel_constraint,\n 'bias_constraint': self.bias_constraint,\n 'attn_kernel_constraint': self.attn_kernel_constraint,\n 'attn_bias_constraint': self.attn_bias_constraint\n }\n base_config = super(BatchShawMultigraphAttention, self).get_config()\n return dict(list(base_config.items())) + list(config.items())", "def config(self):\n return self._config", "def config(self):\n return self._config", "def config(self):\n return self._config", "def config(self):\n return self[CONFIG_KEY]", "def get_config_template(self) -> cconfig.Config:", "def copy_config_to_properties(self, config):\n ## EPICS\n self.epics_root = config.get('epics_root')\n\n ## Directories\n self.smurf_cmd_dir = config.get('smurf_cmd_dir')\n self.tune_dir = config.get('tune_dir')\n self.status_dir = config.get('status_dir')\n self.default_data_dir = config.get('default_data_dir')\n\n ## Useful constants\n constant_cfg = config.get('constant')\n self.pA_per_phi0 = constant_cfg.get('pA_per_phi0')\n\n ## Timing\n timing_cfg = config.get('timing')\n self.timing_reference = timing_cfg['timing_reference']\n\n ## Cold amplifier biases\n amp_cfg = config.get('amplifier')\n\n # 4K HEMT\n self.hemt_Vg = amp_cfg['hemt_Vg']\n self.hemt_bit_to_V = amp_cfg['bit_to_V_hemt']\n self.hemt_Vd_series_resistor = amp_cfg['hemt_Vd_series_resistor']\n self.hemt_Id_offset = amp_cfg['hemt_Id_offset']\n self.hemt_gate_min_voltage = amp_cfg['hemt_gate_min_voltage']\n self.hemt_gate_max_voltage = amp_cfg['hemt_gate_max_voltage']\n\n # 50K HEMT\n self.fiftyk_Vg = amp_cfg['LNA_Vg']\n self.fiftyk_dac_num = amp_cfg['dac_num_50k']\n self.fiftyk_bit_to_V = amp_cfg['bit_to_V_50k']\n self.fiftyk_amp_Vd_series_resistor = amp_cfg['50K_amp_Vd_series_resistor']\n self.fiftyk_Id_offset = amp_cfg['50k_Id_offset']\n ## Tune parameters\n tune_band_cfg = config.get('tune_band')\n self.default_tune = tune_band_cfg['default_tune']\n self.gradient_descent_gain = {\n int(band):v for (band,v) in\n tune_band_cfg['gradient_descent_gain'].items()}\n self.gradient_descent_averages = {\n int(band):v for (band,v) in\n tune_band_cfg['gradient_descent_averages'].items()}\n self.gradient_descent_converge_hz = {\n int(band):v for (band,v) in\n tune_band_cfg['gradient_descent_converge_hz'].items()}\n self.gradient_descent_step_hz = {\n int(band):v for (band,v) in\n tune_band_cfg['gradient_descent_step_hz'].items()}\n self.gradient_descent_momentum = {\n int(band):v for (band,v) in\n tune_band_cfg['gradient_descent_momentum'].items()}\n self.gradient_descent_beta = {\n int(band):v for (band,v) in\n tune_band_cfg['gradient_descent_beta'].items()}\n self.feedback_start_frac = {\n int(band):v for (band,v) in\n tune_band_cfg['feedback_start_frac'].items()}\n self.feedback_end_frac = {\n int(band):v for (band,v) in\n tune_band_cfg['feedback_end_frac'].items()}\n self.eta_scan_del_f = {\n int(band):v for (band,v) in\n tune_band_cfg['eta_scan_del_f'].items()}\n self.eta_scan_averages = {\n int(band):v for (band,v) in\n tune_band_cfg['eta_scan_averages'].items()}\n self.delta_freq = {\n int(band):v for (band,v) in\n tune_band_cfg['delta_freq'].items()}\n # Tracking algo\n self.lms_freq_hz = {\n int(band):v for (band,v) in\n tune_band_cfg['lms_freq'].items()}\n\n ## Reading/writing data\n self.fs = config.get('fs')\n\n ## In fridge\n self.R_sh = config.get('R_sh')\n\n ## Which bands are have their configurations specified in the\n ## pysmurf configuration file?\n smurf_init_config = config.get('init')\n bands = smurf_init_config['bands']\n\n ## Carrier\n self.dsp_enable = smurf_init_config['dspEnable']\n self.ultrascale_temperature_limit_degC = config.get('ultrascale_temperature_limit_degC')\n self.data_out_mux = {\n band:smurf_init_config[f'band_{band}']['data_out_mux']\n for band in bands}\n\n ## AMC\n # Which bands are present in the pysmurf configuration file?\n self.bands = bands\n self.amplitude_scale = {\n band:smurf_init_config[f'band_{band}']['amplitude_scale']\n for band in bands}\n self.iq_swap_in = {\n band:smurf_init_config[f'band_{band}']['iq_swap_in']\n for band in bands}\n self.iq_swap_out = {\n band:smurf_init_config[f'band_{band}']['iq_swap_out']\n for band in bands}\n self.ref_phase_delay = {\n band:smurf_init_config[f'band_{band}']['refPhaseDelay']\n for band in bands}\n self.ref_phase_delay_fine = {\n band:smurf_init_config[f'band_{band}']['refPhaseDelayFine']\n for band in bands}\n self.band_delay_us = {\n band:smurf_init_config[f'band_{band}']['bandDelayUs']\n for band in bands}\n self.att_uc = {\n band:smurf_init_config[f'band_{band}']['att_uc']\n for band in bands}\n self.att_dc = {\n band:smurf_init_config[f'band_{band}']['att_dc']\n for band in bands}\n self.trigger_reset_delay= {\n band:smurf_init_config[f'band_{band}']['trigRstDly']\n for band in bands}\n\n # Mapping from attenuator numbers to bands\n att_cfg = config.get('attenuator')\n att_cfg_keys = att_cfg.keys()\n attenuator = {}\n attenuator['band'] = np.zeros(len(att_cfg_keys),dtype=int)\n attenuator['att'] = np.zeros(len(att_cfg_keys),dtype=int)\n for i, k in enumerate(att_cfg_keys):\n attenuator['band'][i] = att_cfg[k]\n attenuator['att'][i] = k[-1]\n self.attenuator = attenuator\n\n ## RTM\n flux_ramp_cfg = config.get('flux_ramp')\n self.num_flux_ramp_counter_bits = flux_ramp_cfg['num_flux_ramp_counter_bits']\n self.reset_rate_khz = tune_band_cfg.get('reset_rate_khz')\n self.fraction_full_scale = tune_band_cfg.get('fraction_full_scale')\n\n ## Cryocard\n self.bias_line_resistance = config.get('bias_line_resistance')\n self.high_low_current_ratio = config.get('high_low_current_ratio')\n self.high_current_mode_bool = config.get('high_current_mode_bool')\n # Mapping from peripheral interface controller (PIC) to bias group\n pic_cfg = config.get('pic_to_bias_group')\n pic_cfg_keys = pic_cfg.keys()\n pic_to_bias_group = np.zeros((len(pic_cfg_keys), 2), dtype=int)\n for i, k in enumerate(pic_cfg_keys):\n val = pic_cfg[k]\n pic_to_bias_group[i] = [k, val]\n self.pic_to_bias_group = pic_to_bias_group\n\n ## Tracking algo\n # lmsGain ; this one's a little odd ; it's defined in each of\n # the band_# configuration file blocks, while the other main\n # tracking algorithm parameter, lms_freq_hz, is defined in the\n # tune_band configuration file block...\n self.lms_gain = {\n band:smurf_init_config[f'band_{band}']['lmsGain']\n for band in bands}\n self.lms_delay = {\n band:smurf_init_config[f'band_{band}']['lmsDelay']\n for band in bands}\n self.feedback_enable = {\n band:smurf_init_config[f'band_{band}']['feedbackEnable']\n for band in bands}\n self.feedback_gain = {\n band:smurf_init_config[f'band_{band}']['feedbackGain']\n for band in bands}\n self.feedback_limit_khz = {\n band:smurf_init_config[f'band_{band}']['feedbackLimitkHz']\n for band in bands}\n self.feedback_polarity = {\n band:smurf_init_config[f'band_{band}']['feedbackPolarity']\n for band in bands}\n\n ## Mappings\n # Bias groups available\n self.all_groups = config.get('all_bias_groups')\n\n # Number of bias groups and bias group to RTM DAC pair\n # mapping\n bias_group_cfg = config.get('bias_group_to_pair')\n bias_group_keys = bias_group_cfg.keys()\n\n # Number of bias groups\n self.n_bias_groups = len(bias_group_cfg)\n\n # Bias group to RTM DAC pair mapping\n bias_group_to_pair = np.zeros((len(bias_group_keys), 3), dtype=int)\n for i, k in enumerate(bias_group_keys):\n val = bias_group_cfg[k]\n bias_group_to_pair[i] = np.append([k], val)\n self.bias_group_to_pair = bias_group_to_pair\n\n # Bad resonator mask\n bad_mask_config = config.get('bad_mask')\n bad_mask_keys = bad_mask_config.keys()\n bad_mask = np.zeros((len(bad_mask_keys), 2))\n for i, k in enumerate(bad_mask_keys):\n bad_mask[i] = bad_mask_config[k]\n self.bad_mask = bad_mask", "def _get_config_data(self, cr, uid):\n\n model_conf = self.pool.get('customer.support.settings')\n args = [('selected', '=', True)] \n ids = model_conf.search(cr, uid, args)\n config = model_conf.browse(cr, uid, ids[0])\n\n return {\n 'tor_api_key': config.tor_api_key,\n 'tor_domain': config.tor_domain,\n 'company': config.company\n }", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config" ]
[ "0.70692134", "0.6935257", "0.6863148", "0.68504906", "0.68504906", "0.68297553", "0.6823189", "0.6751167", "0.6721291", "0.6666818", "0.65857863", "0.65857863", "0.6573653", "0.6544272", "0.65333617", "0.65164155", "0.6505307", "0.65036625", "0.64881974", "0.64446115", "0.64160365", "0.6406946", "0.6401681", "0.6400954", "0.6393162", "0.6385623", "0.6383458", "0.6375326", "0.63351685", "0.6333494", "0.6330577", "0.63240296", "0.63204634", "0.63074166", "0.6296032", "0.62914866", "0.6269402", "0.6263175", "0.6256515", "0.62445295", "0.62445295", "0.62341565", "0.6226556", "0.6217681", "0.6217681", "0.62174", "0.6215463", "0.6214505", "0.6200769", "0.6196268", "0.61797595", "0.6165011", "0.61542636", "0.61482227", "0.61448634", "0.61308265", "0.6130506", "0.6130506", "0.6130506", "0.6130114", "0.61268985", "0.61253864", "0.6124869", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915", "0.61247915" ]
0.0
-1
everything about a material
def __init__(self, name=None, zeta=None, concentration=None, index_of_refraction=None, transparency=None, fluorescence_spectra=None, permittivity=None, conductivity=None, thickness=None, youngs_modulus=None, poissons_ratio=None, density=None, dielectric_strength=None, reaction_site_density=None, Ka=None, Kb=None, width=None, length=None): # identity self.name = name # geometry self.length = length self.width = width self.thickness = thickness # mechanical self.density = density self.concentration = concentration # For a solid, this is % by volume. self.youngs_modulus = youngs_modulus self.poissons_ratio = poissons_ratio # optical self.index_of_refraction = index_of_refraction self.fluorescence_spectra = fluorescence_spectra self.transparency = transparency if self.transparency: self.reflectivity = 1 / self.transparency # electrochemical self.conductivity = conductivity if permittivity: self.permittivity = permittivity self.zeta = zeta self.dielectric_strength = dielectric_strength if reaction_site_density: self.reaction_site_density = reaction_site_density*1e18 # (#/nm2) surface density of reaction sites: accepts nm2 and converts to m2 (see Squires) self.Ka = Ka # reaction equilibrium constant - upper bound self.Kb = Kb # reaction equilibrium constant - lower bound
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def material(self):\n return self._F_Mstr", "def show_materials(self):\n print('\\nThe materials with known dielectric properties are:\\n')\n pprint.pprint(mats.Electrical.props)\n# pprint.pprint(mats.Electrical.DIELECTRIC)\n print('\\nThe materials with known loss tangents are:\\n')\n pprint.pprint(mats.Electrical.props)\n# pprint.pprint(mats.Electrical.LOSS_TAN)\n return", "def GetMaterial(self, *args):\n return _XCAFDoc.XCAFDoc_MaterialTool_GetMaterial(self, *args)", "def get_material_info(TABLE_info):\n \"\"\"\n 1 Get info from TABLE_info.\n \"\"\"\n width = TABLE_info[0]\n height = TABLE_info[1]\n t_m = TABLE_info[2]\n\n \"\"\"\n 2 Get material info.\n \"\"\"\n z_m = 3 * t_m\n\n m_width = rs.GetInteger(\"Put the width of material\", z_m, None, None)\n\n t_sen = rs.GetReal(\"Put Int(mm): Thickness of material to cut SEN.\", t_m / 2, None, None)\n\n x_m1 = m_width\n x_m2 = height - x_m1\n x_m3 = x_m2\n x_m4 = x_m1\n\n y_m2 = m_width\n y_m3 = y_m2\n y_m1 = width - (y_m2 + y_m3)\n y_m4 = y_m1\n\n\n # material1\n m1_p0 = (x_m3, y_m3)\n m1_p1 = (x_m3, y_m3 + y_m1)\n m1_p2 = (x_m3 + x_m1, y_m3 + y_m1)\n m1_p3 = (x_m3 + x_m1, y_m3)\n m1_points = [m1_p0, m1_p1, m1_p2, m1_p3]\n\n m1_info = [x_m1, y_m1, z_m, m1_points, t_sen]\n\n # material2\n m2_p0 = (0, width - y_m2)\n m2_p1 = (0, width)\n m2_p2 = (height - x_m1, width)\n m2_p3 = (height - x_m1, width - y_m2)\n m2_points = [m2_p0, m2_p1, m2_p2, m2_p3]\n\n m2_info = [x_m2, y_m2, z_m, m2_points, t_sen]\n\n # material3\n m3_p0 = (0, 0)\n m3_p1 = (0, y_m3)\n m3_p2 = (x_m3, y_m3)\n m3_p3 = (x_m3, 0)\n m3_points = [m3_p0, m3_p1, m3_p2, m3_p3]\n\n m3_info = [x_m3, y_m3, z_m, m3_points, t_sen]\n\n # material4\n m4_p0 = (0, y_m3)\n m4_p1 = (0, y_m3 + y_m4)\n m4_p2 = (-x_m4, y_m3 + y_m4)\n m4_p3 = (-x_m4, y_m3)\n m4_points = [m4_p0, m4_p1, m4_p2, m4_p3]\n\n m4_info = [x_m4, y_m4, z_m, m4_points, t_sen]\n\n return m1_info, m2_info, m3_info, m4_info", "def info_materials_raw_get():\n materials = _material_by_group(427) # 427 == intermediate group\n return materials, 200", "def material_from_pack(material):\n texture_file_name = material[0].decode(\"utf-8\").replace('\\x00', '').strip()\n return (\n texture_file_name,\n load_material_texture(texture_file_name)\n )", "def info_materials_get():\n materials = _material_by_group() # empty means all groups\n return materials, 200", "def __init__(self, vs, material):\n self.vs = vs\n self.material = material", "def info_materials_polymer_get():\n materials = _material_by_group(974) # 974 == intermediate group\n return materials, 200", "def __init__(self, diffuse=RGB(1,1,1), Kd=1.0, specular=RGB(1,1,1), Ks=0.0, \n shininess=8.0, Kt=0.0, ior=1.0, name=None):\n \n if name is None:\n name = \"Material %d\" % Material._num_materials\n \n Material._num_materials += 1\n \n self.name = name\n self.diffuse = diffuse\n self.Kd = Kd\n self.specular = specular\n self.Ks = Ks\n self.shininess = shininess\n self.Kt = Kt\n self.ior = ior", "def read_material_data(self, material):\n material_yaml_file = glob.glob(os.path.join(material_dir, material + '.yaml'))\n\n inputs = utilities.yaml_reader(material_yaml_file, material_dir, material)\n self.name = inputs['Name']\n self.materialName = material\n self.elements = inputs['Elements']\n self.zaids = inputs['Elemental ZAIDs']\n self.weightFraction = inputs['Elemental Weight Fractions'] if 'Elemental Weight Fractions' in inputs else []\n self.enrichmentZaids = inputs['Elemental Adjustment ZAIDs'] if 'Elemental Adjustment ZAIDs' in inputs else []\n self.enrichmentIsotopes = inputs['Isotopic Adjustment ZAIDs'] if 'Isotopic Adjustment ZAIDs' in inputs else []\n self.enrichmentVector = inputs['Isotopic Weight Percents'] if 'Isotopic Weight Percents' in inputs else []\n self.isotopicAtomPercents = inputs['Isotopic Atom Percents'] if 'Isotopic Atom Percents' in inputs else []\n self.density = inputs['Density']\n self.linearCoeffExpansion = inputs['Linear Coefficient of Expansion']", "def IsMaterial(self, *args):\n return _XCAFDoc.XCAFDoc_MaterialTool_IsMaterial(self, *args)", "def define_materials():\n global robot\n robot.add_material(ur.Material('Black', ur.Color(0.1, 0.1, 0.1, 1)))\n robot.add_material(ur.Material('LightGrey', ur.Color(0.9, 0.9, 0.9, 1)))\n robot.add_material(ur.Material('Grey', ur.Color(0.6, 0.6, 0.6, 1)))\n robot.add_material(ur.Material('DarkGrey', ur.Color(0.3, 0.3, 0.3, 1)))", "def setMaterial(self,massFraction,polymer):\n M = Materials()\n num = self.material['Detector']['mt']\n if polymer == 'PS':\n self.material['Detector']['matString'] = M.GetPSLiF(massFraction,num)\n elif polymer == 'PEN':\n self.material['Detector']['matString'] = M.GetPENLiF(massFraction,num)\n else:\n raise ValueError('Polymer {} is not in the material database'.format(polymer))", "def link_material(obj, mat):\n if not has_material(obj, mat.name):\n obj.data.materials.append(mat)", "def has_material(obj, name):\n return name in obj.data.materials.keys()", "def info_materials_booster_get():\n materials = _material_by_group(712) # 712 == intermediate group\n return materials, 200", "def AddDispersionMaterial(GeometryName,RGBData):\n\n r,g,b=RGBData\n onlyR = tuple([r,0,0,1])\n onlyG = tuple([0,g,0,1])\n onlyB = tuple([0,0,b,1])\n\n\n currentMaterial = bpy.data.materials.new(name='TypeA'+GeometryName)\n currentMaterial.use_nodes = True\n nodes = currentMaterial.node_tree.nodes\n\n math01 = nodes.new(\"ShaderNodeMath\")\n math01.operation = \"POWER\"\n\n glassBSDF01 = nodes.new(\"ShaderNodeBsdfGlass\")\n glassBSDF01.inputs[0].default_value = onlyR\n currentMaterial.node_tree.links.new(math01.outputs[0],glassBSDF01.inputs[1])\n\n glassBSDF02 = nodes.new(\"ShaderNodeBsdfGlass\")\n glassBSDF02.inputs[0].default_value = onlyG\n currentMaterial.node_tree.links.new(math01.outputs[0],glassBSDF02.inputs[1])\n\n glassBSDF03 = nodes.new(\"ShaderNodeBsdfGlass\")\n glassBSDF03.inputs[0].default_value = onlyB\n currentMaterial.node_tree.links.new(math01.outputs[0],glassBSDF03.inputs[1])\n\n math02 = nodes.new(\"ShaderNodeMath\")\n currentMaterial.node_tree.links.new(math02.outputs[0],glassBSDF02.inputs[2])\n\n math03 = nodes.new(\"ShaderNodeMath\")\n currentMaterial.node_tree.links.new(math02.outputs[0],math03.inputs[1])\n currentMaterial.node_tree.links.new(math03.outputs[0],glassBSDF01.inputs[2])\n\n addShader01 = nodes.new(\"ShaderNodeAddShader\")\n currentMaterial.node_tree.links.new(glassBSDF01.outputs[0],addShader01.inputs[0])\n currentMaterial.node_tree.links.new(glassBSDF02.outputs[0],addShader01.inputs[1])\n\n addShader02 = nodes.new(\"ShaderNodeAddShader\")\n currentMaterial.node_tree.links.new(addShader01.outputs[0],addShader02.inputs[0])\n currentMaterial.node_tree.links.new(glassBSDF03.outputs[0],addShader02.inputs[1])\n\n volumeAbs = nodes.new(\"ShaderNodeVolumeAbsorption\")\n\n materialOutput=nodes.get(\"Material Output\")\n currentMaterial.node_tree.links.new(addShader02.outputs[0],materialOutput.inputs[0])\n currentMaterial.node_tree.links.new(volumeAbs.outputs[0],materialOutput.inputs[1])\n\n bpy.data.objects[GeometryName].data.materials.append(currentMaterial)", "def AddMaterial(self, *args):\n return _XCAFDoc.XCAFDoc_MaterialTool_AddMaterial(self, *args)", "def __repr__(self):\n\n return f'<Material material_id={self.material_id} name={self.name}>'", "def set_material(self, material):\r\n for b in self.buf:\r\n b.set_material(material)", "def validateMaterial(material, adjust=False):\n errors = []\n\n if not material:\n errors.append(ValidateMessage(\"No material defined.\", 'WARNING', material, None, {}))\n return errors, material\n\n if isinstance(material, bpy.types.Object):\n # there are always 18 slots, regardless of whether they are filled or not\n for tex in material.texture_slots:\n if tex is not None:\n try:\n # regular diffuse color texture\n if tex.use_map_color_diffuse:\n # grab the first texture\n material.texture_slots[0].texture.image.filepath.replace('//', '')\n except (KeyError, AttributeError):\n errors.append(\n ValidateMessage(\n \"Diffuse texture incomplete/undefined.\", 'WARNING', material, None, {}\n )\n )\n try:\n # normal map\n if tex.use_map_normal:\n # grab the first texture\n material.texture_slots[0].texture.image.filepath.replace('//', '')\n except (KeyError, AttributeError):\n errors.append(\n ValidateMessage(\n \"Normal texture incomplete/undefined.\", 'WARNING', material, None, {}\n )\n )\n try:\n # displacement map\n if tex.use_map_displacement:\n # grab the first texture\n material.texture_slots[0].texture.image.filepath.replace('//', '')\n except (KeyError, AttributeError):\n errors.append(\n ValidateMessage(\n \"Displacement texture incomplete/undefined.\",\n 'WARNING',\n material,\n None,\n {},\n )\n )\n else:\n if not hasattr(material, \"name\"):\n if adjust:\n material = {'name': 'phobos_error'}\n loglevel = 'WARNING'\n else:\n loglevel = 'ERROR'\n errors.append(\n ValidateMessage(\"Material name not defined.\", 'ERROR', material, None, {})\n )\n return errors, material\n\n if 'diffuse' not in material:\n if adjust:\n material['diffuse'] = (1., 1., 1., 1.)\n loglevel = 'WARNING'\n else:\n loglevel = 'ERROR'\n errors.append(\n ValidateMessage(\"Material diffuse color not defined.\", 'ERROR', material, None, {})\n )\n elif len(material['diffuse']) != 4:\n if adjust:\n if len(material['diffuse']) == 3:\n material['diffuse'] = tuple(material['diffuse'] + [1.])\n loglevel = 'WARNING'\n else:\n loglevel = 'ERROR'\n errors.append(\n ValidateMessage(\n \"Material diffuse color definition insufficient.\", loglevel, material, None, {}\n )\n )\n\n if 'diffuse_intensity' not in material:\n errors.append(\n ValidateMessage(\n \"Material diffuse intensity not defined.\", 'WARNING', material, None, {}\n )\n )\n if adjust:\n material['diffuse_intensity'] = 1.\n return errors, material", "def info_materials_gas_get():\n materials = _material_by_group(711) # 711 == intermediate group\n return materials, 200", "def get_material_features(self):\n return self.material_features", "def load_materials(self):\n # Create material objects\n for meta_mat in self.gltf.materials:\n mat = Material(meta_mat.name)\n mat.color = meta_mat.baseColorFactor or [1.0, 1.0, 1.0, 1.0]\n mat.double_sided = meta_mat.doubleSided\n\n if meta_mat.baseColorTexture is not None:\n mat.mat_texture = self.textures[meta_mat.baseColorTexture[\"index\"]]\n\n self.materials.append(mat)\n self.scene.materials.append(mat)", "def get_material_mapping(self):\n return {name: self.get_material(name) for name in self.parts.keys()}", "def test_materials_present(self):\n self.assertIsNotNone('Materials' in self.header.parameters.attrs)", "def load_material_library( self, url, materials, baseURL=None ):\n #( resolvedURL, os.path.abspath(filename), file, headers )\n try:\n finalURL, filename, file, headers = loader.Loader( url, baseURL )\n except IOError, err:\n if '/' in url:\n possible = url.split( '/' )[-1]\n try:\n finalURL, filename, file, headers = loader.Loader( \n possible, baseURL \n )\n except IOError, err:\n log.warn(\n \"\"\"Unable to load material library: %s\"\"\",\n url,\n )\n return False\n \n material = None\n for line in file.read().splitlines():\n if line.startswith('#'):\n continue\n values = line.split()\n if not values:\n continue\n\n if values[0] == 'newmtl':\n material = self.defaultMaterial()\n materials[values[1]] = material\n elif material is None:\n log.warn('Expected \"newmtl\" in %s', url)\n continue\n\n try:\n if values[0] == 'Kd':\n material.material.diffuseColor = map(float, values[1:])\n elif values[0] == 'Ka':\n material.material.ambientColor = map(float, values[1:])\n elif values[0] == 'Ks':\n material.material.specularColor = map(float, values[1:])\n elif values[0] == 'Ke':\n material.material.emissiveColor = map(float, values[1:])\n elif values[0] == 'Ns':\n material.material.shininess = float(values[1])\n elif values[0] == 'd':\n material.material.opacity = float(values[1])\n elif values[0] == 'map_Kd':\n if '/' in values[1]:\n img_url = [ values[1], values[1].split('/')[-1] ]\n else:\n img_url = [ values[1] ]\n img_url = [\n urllib.basejoin(baseURL, u )\n for u in img_url\n ]\n texture = basenodes.ImageTexture(url=img_url)\n material.texture = texture\n except:\n log.warn('Parse error in %s.', url)", "def make_input_materials(self) :\n # 1 5 1 MATERIAL 1 (arbitrary line, i think) \n # 1.4493e+00 9.9000e-03 7.9000e-03 1. 0. 0. 7.9000e-03 1.\n # 3.8070e-01 1.0420e-01 1.6920e-01 0 1.5100e-02 0. 1.6920e-01 1.\n self.input_materials = \"\"\n number_mats = len(self.core.pattern)+1\n a = self.core.assemblies\n for i in range(0, number_mats-1) :\n # Row 1: description.\n self.input_materials += \" \" + str(i+1) + \" 5 1 MATERIAL \" + \\\n str(i+1) + \" (\" + \\\n a[i].model + \", \" + \\\n str(a[i].enrichment) + \" w/o, \" + \\\n str(a[i].burnup) + \" MWd/kg)\\n\" \n # Rows 2 and 3.\n D1,D2,A1,A2,F1,F2,S12 = a[i].get_constants()\n d = np.array([[D1,A1,F1,1.0,0.0,0.0,F1,1.0],[D2,A2,F2,0.0,S12,0.0,F2,1.0]])\n for j in range(0, 2) :\n for k in range(0, 8) :\n self.input_materials +='%12.4e' %(d[j,k])\n self.input_materials += '\\n'\n \n a = self.core.reflector\n # Row 1: description.\n self.input_materials += \" \" + str(number_mats) + \" 5 1 MATERIAL \" + \\\n str(number_mats) + \" (REFLECTOR) \\n\" \n # Rows 2 and 3.\n D1,D2,A1,A2,F1,F2,S12 = a.get_constants()\n d = np.array([[D1,A1,F1,1.0,0.0,0.0,F1,1.0],[D2,A2,F2,0.0,S12,0.0,F2,1.0]])\n for i in range(0, 2) :\n for j in range(0, 8) :\n self.input_materials +='%12.4e' %(d[i,j])\n self.input_materials += '\\n'\n self.input_materials += \"WHITE\\n\" + \"BLACK\\n\" + \"END\\n\"", "def SetMaterial(self, *args):\n return _XCAFDoc.XCAFDoc_MaterialTool_SetMaterial(self, *args)", "def materials_adding_panel(self, context):\r\n \r\n AM = context.window_manager.asset_m\r\n layout = self.layout\r\n box = layout.box()\r\n view = context.space_data\r\n thumbnails_path = get_directory('icons')\r\n library_path = get_library_path()\r\n extentions = (\".jpg\", \".jpeg\", \".png\")\r\n thumb = [thumb.rsplit(\".\", 1)[0] for thumb in listdir(thumbnails_path) if thumb.endswith(extentions)]\r\n if AM.as_mat_scene:\r\n thumb_list = thumb + [\"AM_Cloth\", \"AM_Sphere\"]\r\n else: \r\n thumb_list = thumb\r\n\r\n cam_is_valid = False\r\n obj_is_valid = False\r\n \r\n \r\n if not AM.as_mat_scene and not bpy.context.object:\r\n box.prop(AM, \"as_mat_scene\", text = \"Save as material scene\")\r\n box.label(\"No active_object in the scene\", icon='ERROR')\r\n box.operator(\"object.cancel_panel_choise\", text=\"Cancel\", icon='X')\r\n \r\n elif not AM.as_mat_scene and not bpy.context.active_object.active_material:\r\n box.prop(AM, \"as_mat_scene\", text = \"Save as material scene\")\r\n box.label(\"The object have no material\", icon='ERROR')\r\n box.operator(\"object.cancel_panel_choise\", text=\"Cancel\", icon='X')\r\n \r\n else:\r\n if AM.as_mat_scene and not isdir(join(library_path, 'materials', \"Render Scenes\")):\r\n box.operator(\"object.create_rder_scn_lib\", text = \"Create render scene library\", icon = 'FILESEL')\r\n box.operator(\"object.cancel_panel_choise\", text=\"Cancel\", icon='X')\r\n \r\n else:\r\n \r\n if AM.as_mat_scene:\r\n asset_name = AM.scene_name\r\n else:\r\n active_mat = context.active_object.active_material\r\n asset_name = active_mat.name\r\n \r\n if len(bpy.context.active_object.material_slots) == 1:\r\n AM.multi_materials = False\r\n \r\n if AM.as_mat_scene and (not asset_name in thumb_list or asset_name in thumb_list and AM.replace_rename == 'replace') or\\\r\n not AM.as_mat_scene and (AM.multi_materials and get_valid_materials() or not AM.multi_materials and asset_name not in thumb_list or asset_name in thumb_list and AM.replace_rename == 'replace'): \r\n if not AM.multi_materials:\r\n if asset_name in thumb_list and AM.replace_rename == 'replace':\r\n box.label(\"\\\" {} \\\" already exist\".format(asset_name), icon='ERROR')\r\n box.separator()\r\n if len(bpy.context.active_object.material_slots) >= 2 and AM.replace_rename == 'rename':\r\n box.prop(AM, \"multi_materials\", text = \"All materials\")\r\n row = box.row(align=True)\r\n row.prop(AM, \"replace_rename\", text=\" \", expand=True)\r\n if AM.replace_rename == 'rename':\r\n if AM.as_mat_scene:\r\n box.prop(AM, \"scene_name\", text = \"\")\r\n else:\r\n box.prop(AM, \"rename_mat\", text=\"\")\r\n \r\n box.prop(AM, \"as_mat_scene\", text = \"Save as material scene\")\r\n if not AM.as_mat_scene and len(bpy.context.active_object.material_slots) >= 2:\r\n if len(get_valid_materials()) != len(bpy.context.active_object.material_slots) and AM.multi_materials:\r\n box.label(\"Some materials wont be added\", icon = 'ERROR')\r\n box.label(\" because there already exist\")\r\n row = box.row()\r\n row.prop(AM, \"multi_materials\", text = \"All materials\")\r\n if AM.as_mat_scene:\r\n row = box.row(align = True)\r\n row.label(\"Scene name:\")\r\n row.prop(AM, \"scene_name\", text = \"\")\r\n \r\n row = box.row(align = True)\r\n row.prop(AM, \"render_type\", text = \" \", expand = True)\r\n row = box.row()\r\n row.label(\"Thumbnail extention:\")\r\n row = box.row(align = True)\r\n row.prop(AM, \"thumb_ext\", expand = True)\r\n \r\n if AM.as_mat_scene:\r\n for obj in context.scene.objects:\r\n if obj.type == 'CAMERA':\r\n cam_is_valid = True\r\n \r\n if len([obj for obj in context.selected_objects if obj.type != 'CAMERA' and bpy.context.active_object == obj]) == 1:\r\n obj_is_valid = True\r\n \r\n row = box.row()\r\n row.label(\"Selected object rendering\", icon = 'FILE_TICK' if obj_is_valid else 'CANCEL')\r\n row = box.row()\r\n row.label(\"Camera in the scene\", icon = 'FILE_TICK' if cam_is_valid else 'CANCEL')\r\n if not cam_is_valid:\r\n row = box.row()\r\n row.operator(\"object.camera_add\", text = \"Add camera\", icon = 'OUTLINER_OB_CAMERA')\r\n \r\n if not AM.as_mat_scene:\r\n # --------------------- # \r\n # RENDER THUMBNAIL #\r\n # --------------------- #\r\n \r\n if AM.render_type == 'render':\r\n row = box.row(align = True)\r\n row.label(\"Thumbnail:\")\r\n row.prop(AM, \"mat_thumb_type\", text = \"\")\r\n \r\n # --------------------- # \r\n # OPENGL THUMBNAIL #\r\n # --------------------- #\r\n \r\n if AM.render_type == 'opengl':\r\n row = box.row(align=True)\r\n row.operator(\"object.setup_ogl_render\", text=\"Setup OGL render\" if not \"AM_OGL_Camera\" in [obj.name for obj in context.scene.objects] else \"View camera\", icon='ZOOMIN')\r\n row.operator(\"object.remove_ogl_render\", text=\"\", icon='ZOOMOUT')\r\n row = layout.column()\r\n row = box.row(align=True) \r\n row.label(\"Background:\")\r\n row.prop(AM, \"background_alpha\", text=\"\")\r\n row = box.row(align=True)\r\n row.prop(view, \"show_only_render\")\r\n\r\n # -------------------- # \r\n # IMAGE THUMBNAIL #\r\n # -------------------- #\r\n \r\n elif AM.render_type == 'image':\r\n row = box.row(align=True)\r\n row.prop(AM, \"image_type\", text=\" \", expand=True)\r\n if AM.image_type == 'disk':\r\n box.label(\"Choose your thumbnail\")\r\n box.prop(AM, \"custom_thumbnail_path\", text=\"\")\r\n else:\r\n box.prop_search(AM, \"render_name\", bpy.data, \"images\", text=\"\") \r\n \r\n row = box.row(align=True)\r\n if (AM.as_mat_scene and AM.scene_name and cam_is_valid and obj_is_valid or not AM.as_mat_scene) and (AM.render_type == 'render' or (asset_name not in thumb_list or AM.replace_rename == 'replace') and AM.render_type == 'opengl' or AM.render_type == 'image' and (AM.image_type == 'disk' and AM.custom_thumbnail_path or AM.image_type == 'rendered' and AM.render_name)):\r\n if AM.as_mat_scene:\r\n row.operator(\"object.add_scene_in_library\", text=\"OK\", icon='FILE_TICK')\r\n else:\r\n row.operator(\"object.add_material_in_library\", text=\"OK\", icon='FILE_TICK')\r\n row.operator(\"object.cancel_panel_choise\", text=\"Cancel\", icon='X')\r\n \r\n else:\r\n if AM.multi_materials and not get_valid_materials():\r\n box.label(\"All materials already exist\".format(asset_name), icon='ERROR')\r\n box.separator()\r\n if len(bpy.context.active_object.material_slots) >= 2:\r\n box.prop(AM, \"multi_materials\", text = \"All materials\")\r\n \r\n else:\r\n box.label(\"\\\" {} \\\" already exist\".format(asset_name), icon='ERROR')\r\n box.separator()\r\n if len(bpy.context.active_object.material_slots) >= 2:\r\n box.prop(AM, \"multi_materials\", text = \"All materials\")\r\n else:\r\n AM.multi_materials = False\r\n row = box.row(align=True)\r\n row.prop(AM, \"replace_rename\", text=\" \", expand=True)\r\n if AM.replace_rename == 'rename':\r\n if AM.as_mat_scene:\r\n box.prop(AM, \"scene_name\", text = \"\")\r\n else:\r\n box.prop(AM, \"rename_mat\", text=\"\")\r\n \r\n row = box.row()\r\n row.operator(\"object.cancel_panel_choise\", text=\"Cancel\", icon='X')", "def export_material(self, bo, bm):\n\n # Sometimes, a material might need to be single-use. Right now, the most apparent example\n # of that situation is when a lightmap image is baked. Wavesets are in the same boat, but\n # that's a special case as of the writing of this code.\n single_user = self._requires_single_user_material(bo, bm)\n if single_user:\n mat_name = \"{}_AutoSingle\".format(bm.name) if bo.name == bm.name else \"{}_{}\".format(bo.name, bm.name)\n self._report.msg(\"Exporting Material '{}' as single user '{}'\", bm.name, mat_name, indent=1)\n hgmat = None\n else:\n mat_name = bm.name\n self._report.msg(\"Exporting Material '{}'\", mat_name, indent=1)\n hsgmat = self._mgr.find_key(hsGMaterial, name=mat_name, bl=bo)\n if hsgmat is not None:\n return hsgmat\n\n hsgmat = self._mgr.add_object(hsGMaterial, name=mat_name, bl=bo)\n slots = [(idx, slot) for idx, slot in enumerate(bm.texture_slots) if self._can_export_texslot(slot)]\n\n # There is a major difference in how Blender and Plasma handle stencils.\n # In Blender, the stencil is on top and applies to every layer below is. In Plasma, the stencil\n # is below the SINGLE layer it affects. The main texture is marked BindNext and RestartPassHere.\n # The pipeline indicates that we can render 8 layers simultaneously, so we will collect all\n # stencils and apply this arrangement. We're going to limit to 6 stencils however. 1 layer for\n # main texture and 1 piggyback.\n num_stencils = sum((1 for i in slots if i[1].use_stencil))\n if num_stencils > _MAX_STENCILS:\n raise ExportError(\"Material '{}' uses too many stencils. The maximum is {}\".format(bm.name, _MAX_STENCILS))\n stencils = []\n restart_pass_next = False\n\n # Loop over layers\n for idx, slot in slots:\n # Prepend any BumpMapping magic layers\n if slot.use_map_normal:\n if bo in self._bump_mats:\n raise ExportError(\"Material '{}' has more than one bumpmap layer\".format(bm.name))\n du, dw, dv = self.export_bumpmap_slot(bo, bm, hsgmat, slot, idx)\n hsgmat.addLayer(du.key) # Du\n hsgmat.addLayer(dw.key) # Dw\n hsgmat.addLayer(dv.key) # Dv\n\n if slot.use_stencil:\n stencils.append((idx, slot))\n else:\n tex_layer = self.export_texture_slot(bo, bm, hsgmat, slot, idx)\n if restart_pass_next:\n tex_layer.state.miscFlags |= hsGMatState.kMiscRestartPassHere\n restart_pass_next = False\n hsgmat.addLayer(tex_layer.key)\n if slot.use_map_normal:\n self._bump_mats[bo] = (tex_layer.UVWSrc, tex_layer.transform)\n # After a bumpmap layer(s), the next layer *must* be in a\n # new pass, otherwise it gets added in non-intuitive ways\n restart_pass_next = True\n if stencils:\n tex_state = tex_layer.state\n if not tex_state.blendFlags & hsGMatState.kBlendMask:\n tex_state.blendFlags |= hsGMatState.kBlendAlpha\n tex_state.miscFlags |= hsGMatState.kMiscRestartPassHere | hsGMatState.kMiscBindNext\n curr_stencils = len(stencils)\n for i in range(curr_stencils):\n stencil_idx, stencil = stencils[i]\n stencil_name = \"STENCILGEN_{}@{}_{}\".format(stencil.name, bm.name, slot.name)\n stencil_layer = self.export_texture_slot(bo, bm, hsgmat, stencil, stencil_idx, name=stencil_name)\n if i+1 < curr_stencils:\n stencil_layer.state.miscFlags |= hsGMatState.kMiscBindNext\n hsgmat.addLayer(stencil_layer.key)\n\n # Plasma makes several assumptions that every hsGMaterial has at least one layer. If this\n # material had no Textures, we will need to initialize a default layer\n if not hsgmat.layers:\n layer = self._mgr.find_create_object(plLayer, name=\"{}_AutoLayer\".format(bm.name), bl=bo)\n self._propagate_material_settings(bm, layer)\n hsgmat.addLayer(layer.key)\n\n # Cache this material for later\n mat_list = self._obj2mat.setdefault(bo, [])\n mat_list.append(hsgmat.key)\n\n # Looks like we're done...\n return hsgmat.key", "def info_materials_intermediates_get():\n materials = _material_by_group(428) # 428 == intermediate group\n return materials, 200", "def get_material(material):\n for libn,tdict in liblist:\n if material in tdict:\n return tdict[material]\n print (material, \" not found\")\n raise KeyError", "def Density(material):\n if material == \"mild\":\n return 7850.0\n else:\n if material == \"al\":\n return 2700.0\n else:\n raise ValueError(\"Invalid material `\"+material+\"'\")", "def append_material(self, material):\n # First check if asset attribute exists; if not, define the asset attribute\n if not hasattr(self, \"asset\"):\n self.asset = ET.Element(\"asset\")\n # If the material name is not in shared materials, add this to our assets\n if material.name not in self.shared_materials:\n self.asset.append(ET.Element(\"texture\", attrib=material.tex_attrib))\n self.asset.append(ET.Element(\"material\", attrib=material.mat_attrib))\n # Add this material name to shared materials if it should be shared\n if material.shared:\n self.shared_materials.add(material.name)\n self.shared_textures.add(material.tex_attrib[\"name\"])\n # Update prefix for assets\n add_prefix(root=self.asset, prefix=self.naming_prefix, exclude=self.exclude_from_prefixing)", "def Au():\n return load_material(miepy.__path__[0] + \"/materials/au.dat\")", "def plot_material(mat):\n plt.plot(mat.wav, mat.eps.real, 'b', linewidth=2, label=\"eps real\")\n plt.plot(mat.wav, mat.eps.imag, 'b--', linewidth=2, label=\"eps imag\")\n plt.plot(mat.wav, mat.mu.real, 'r', linewidth=2, label=\"mu real\")\n plt.plot(mat.wav, mat.mu.imag, 'r--', linewidth=2, label=\"mu imag\")\n plt.xlabel(\"Wavelength (nm)\")\n plt.ylabel(\"eps/mu\")\n plt.legend()", "def create_blender_material(self, ogremat, mat, meshId, matIdx):\n logger.debug(\"create_blender_material\")\n textures = ogremat.textures\n bmat = None\n idx = 0\n mat_name = mat[\"name\"].split(\"/\")[0]\n try:\n bmat = bpy.data.materials[mat_name]\n if bversion == 3:\n bmat.name = \"tobedeleted\"\n bmat = bpy.data.materials.new(mat_name)\n except:\n bmat = bpy.data.materials.new(mat_name)\n self.set_uuid(bmat, ogremat.uuid)\n # material base properties\n if ogremat.doambient:\n if bversion == 2:\n bmat.setAmb(ogremat.ambient)\n else:\n bmat.ambient = ogremat.ambient\n if ogremat.specular:\n if bversion == 2:\n bmat.setSpec(1.0)\n bmat.setSpecCol(ogremat.specular[:3])\n bmat.setHardness(int(ogremat.specular[3]*4.0))\n else:\n bmat.specular_intensity = 1.0\n ogremat.specular[:3]\n bmat.specular_color = ogremat.specular[:3]\n bmat.specular_hardness = int(ogremat.specular[3]*4.0)\n if ogremat.alpha < 1.0:\n bmat.alpha = ogremat.alpha\n # specular\n for layerName, textureId in ogremat.layers.items():\n if layerName == 'shadowMap':\n if bversion == 2:\n bmat.setMode(Blender.Material.Modes['SHADOWBUF'] & bmat.getMode())\n else:\n bmat.use_cast_buffer_shadows = True\n if textureId:\n textureId = textureId\n pars = (bmat, layerName, mat[\"name\"], ogremat, idx, meshId,\n matIdx)\n if textureId in self._imported_assets:\n btex = self._imported_assets[textureId]\n self.layer_ready(btex, *pars)\n elif self.simrt:\n pars = (textureId,) + pars\n if not self.Asset.downloadAsset(textureId, 0,\n self.texture_downloaded, \n pars,\n main=self.doTextureDownloadTranscode):\n self.add_texture_callback(textureId, self.layer_ready, pars[1:])\n idx += 1\n self._imported_materials[mat[\"name\"]] = bmat\n return bmat", "def layer_properties(freq_vec, material):\n # name of the material\n material_name = material[0]\n # thickness of the material (reshape with freq shape, in a tuple, to\n # allow the sum with the tuple of material properties)\n thickness = (np.array( [material[1]]*len(freq_vec) ), )\n # check if we have to pass extra arguments for non homogenous material\n if material_name == 'meta':\n param = material[2:]\n else:\n param = ()\n # read/compute material properties\n prop = mat.properties(material_name, freq_vec, *param)\n\n return thickness + prop", "def create_materials_from_data(textures):\n\n materials = []\n\n #Set colour to incremenet from 0 - 8\n colour_inc = 1.0 / len(textures)\n colour = 0\n\n for current_material in textures:\n mat = bpy.data.materials.new(current_material[0])\n mat.diffuse_color = (0, colour, 0,)\n mat.diffuse_shader = 'LAMBERT' \n mat.diffuse_intensity = 1.0 \n mat.specular_color = (1, 1, 1,)\n mat.specular_shader = 'COOKTORR'\n mat.specular_intensity = 0.5\n mat.alpha = 1\n mat.ambient = 1\n mat.use_shadeless = True\n\n mtex = mat.texture_slots.add()\n mtex.texture = current_material[1]\n mtex.texture_coords = 'UV'\n mtex.use_map_color_diffuse = True \n\n materials.append(mat)\n colour += colour_inc\n \n return materials", "def __render_material_preview(self, scene):\n\n # Don't render material thumbnails.\n (width, height) = util.get_render_resolution(scene)\n if width <= 96:\n return\n\n # Collect objects and their materials in a object -> [materials] dictionary.\n objects_materials = {}\n for obj in (obj for obj in scene.objects if obj.is_visible(scene) and not obj.hide_render):\n for mat in util.get_instance_materials(obj):\n if mat is not None:\n if obj.name not in objects_materials.keys():\n objects_materials[obj] = []\n objects_materials[obj].append(mat)\n\n # Find objects that are likely to be the preview objects.\n preview_objects = [o for o in objects_materials.keys() if o.name.startswith('preview')]\n if not preview_objects:\n return\n\n # Find the materials attached to the likely preview object.\n likely_materials = objects_materials[preview_objects[0]]\n if not likely_materials:\n return\n\n # Build the path to the output preview project.\n preview_output_dir = os.path.join(tempfile.gettempdir(), \"blenderseed\", \"material_preview\")\n preview_project_filepath = os.path.join(preview_output_dir, \"material_preview.appleseed\")\n\n # Create target directories if necessary.\n if not os.path.exists(preview_output_dir):\n try:\n os.makedirs(preview_output_dir)\n except os.error:\n self.report({\"ERROR\"}, \"The directory {0} could not be created. Check directory permissions.\".format(preview_output_dir))\n return\n\n # Copy assets from template project to output directory.\n preview_template_dir = os.path.join(os.sep.join(util.realpath(__file__).split(os.sep)[:-1]), \"mat_preview\")\n existing_files = os.listdir(preview_output_dir)\n for item in os.listdir(preview_template_dir):\n if item not in existing_files:\n copyfile(os.path.join(preview_template_dir, item), os.path.join(preview_output_dir, item))\n\n prev_mat = likely_materials[0]\n prev_type = prev_mat.preview_render_type.lower()\n\n # Export the project.\n writer = projectwriter.Writer()\n file_written = writer.export_preview(scene,\n preview_project_filepath,\n prev_mat,\n prev_type,\n width,\n height)\n if not file_written:\n print('Error while exporting. Check the console for details.')\n return\n\n # Render the project.\n self.__render_project_file(scene, preview_project_filepath)", "def set_material(properties,object,finish,normal):\n if object not in properties:\n properties[object.getName()]={}\n properties[object.getName()][\"finish\"]=finish\n properties[object.getName()][\"normal\"]=normal", "def create_material_data(self):\n for num, zaid in enumerate(self.enrichmentZaids):\n enriched_isotope_dict = {}\n for isoNum, isotopes in enumerate(self.enrichmentIsotopes[num]):\n enriched_isotope_dict[isotopes] = self.enrichmentVector[num][isoNum]\n self.enrichmentDict[zaid] = enriched_isotope_dict\n for num, element in enumerate(self.elements):\n self.elementDict[self.zaids[num]] = Element.Element(element)\n\n if self.isotopicAtomPercents:\n self.atomDensity = self.density\n self.set_atom_fractions()\n else:\n self.set_elemental_enrichment()\n self.set_weight_percent()\n self.atomDensity, self.atomPercent = set_atom_percent(self.weightPercent, self.density,\n self.elementDict)", "def MaterialTool(*args):\n return _XCAFDoc.XCAFDoc_DocumentTool_MaterialTool(*args)", "def get_material_set(**kw):\n mat_ids = set()\n volumes = get_volume_list()\n for v in volumes:\n d = volume_metadata( v )\n if( kw.get('with_rho') is True ):\n # rho is undefined for the void material and dagmc may return anything.\n if d['material'] == 0:\n mat_ids.add( (d['material'], 0.0) )\n else:\n mat_ids.add( (d['material'], d['rho']) )\n else:\n mat_ids.add( d['material'] )\n return mat_ids", "def get_materials_properties(dbpath): #<un-named>nook\n odb = openOdb(path=dbpath)\n data = []\n for _name,_mat in odb.materials.items():\n _elastic_mod = _mat.elastic.table[0][0]\n _poisson = _mat.elastic.table[0][1]\n if hasattr(_mat,\"plastic\"):\n _plastic = _mat.plastic.table\n else:\n _plastic = []\n data.append((_name,_elastic_mod,_poisson,_plastic))\n odb.close()\n return data", "def define_material(self):\n\n # Check which class should be called.\n const_eqn = self.config['material']['const_eqn']\n if isclass(const_eqn):\n mat_class = self.config['material']['const_eqn']\n elif const_eqn == 'lin_elastic':\n mat_class = materials.solid_materials.LinearIsoMaterial\n elif const_eqn == 'neo_hookean':\n mat_class = materials.solid_materials.NeoHookeMaterial\n elif const_eqn == 'demiray':\n mat_class = materials.solid_materials.DemirayMaterial\n elif const_eqn == 'fung':\n mat_class = materials.solid_materials.FungMaterial\n elif const_eqn == 'guccione':\n mat_class = materials.solid_materials.GuccioneMaterial\n elif const_eqn == 'holzapfel_ogden':\n mat_class = materials.solid_materials.HolzapfelOgdenMaterial\n elif const_eqn == 'newtonian' or const_eqn == 'stokes':\n mat_class = materials.fluids.NewtonianFluid\n else:\n raise NotImplementedError(\"Shouldn't be in here...\")\n\n # Create an instance of the material class and store\n # as member data.\n try:\n inverse = self.config['formulation']['inverse']\n except KeyError:\n inverse = False\n self._material = mat_class(inverse=inverse,\n **self.config['material'])\n\n return None", "def getMaterialString(self):\n matString = 'm10 1001.70c -0.080538 $Lucite (PMMA / Plexiglass) rho = 1.19 g/cc\\n'\n matString += ' 6012.70c -0.599848 8016.70c -0.319614 \\n'\n matString += 'm204 7014.70c -0.755636 $air (US S. Atm at sea level) rho = 0.001225 \\n'\n matString += ' 8016.70c -0.231475 18036.70c -3.9e-005 18038.70c -8e-006\\n'\n matString += ' 18040.70c -0.012842 \\n'\n matString += 'm5 98252.66c 1 $ Cf-252, rho =15.1 g/cc wiki \\n'\n matString += 'm406 82204.70c -0.013781 $Lead, \\n'\n matString += ' 82206.70c -0.239557 82207.70c -0.220743 82208.70c -0.525919\\n'\n matString += 'm456 1001.70c -0.143716 $Polyethylene - rho = 0.93 g/cc \\n'\n matString += ' 6000.70c -0.856284 \\n'\n matString += 'm488 14028.70c -0.009187 $Steel, Stainless 316 rho = 7.92 \\n'\n matString += ' 14029.70c -0.000482 14030.70c -0.000331 24050.70c -0.007095\\n'\n matString += ' 24052.70c -0.142291 24053.70c -0.016443 24054.70c -0.004171\\n'\n matString += ' 25055.70c -0.02 26054.70c -0.037326 26056.70c -0.601748\\n'\n matString += ' 26057.70c -0.014024 26058.70c -0.001903 28058.70c -0.080873\\n'\n matString += ' 28060.70c -0.031984 28061.70c -0.001408 28062.70c -0.004546\\n'\n matString += ' 28064.70c -0.001189 42092.70c -0.003554 42094.70c -0.002264\\n'\n matString += ' 42095.70c -0.003937 42096.70c -0.004169 42097.70c -0.002412\\n'\n matString += ' 42098.70c -0.006157 42100.70c -0.002507 \\n'\n matString += 'mt3 poly.01t \\n'\n matString += 'mt456 poly.01t \\n'\n matString += 'mt10 poly.01t \\n'\n return matString", "def get_smeared_material(materials, void_material='', void_percent=1.0):\n smear_material = {}\n for material, materialWeightPercent in materials.items():\n void_multiplier = 1.0\n if material == 'Void':\n pass\n else:\n base_material = Material()\n base_material.set_material(material)\n\n if base_material.materialName == void_material:\n void_multiplier = void_percent\n\n for isotope, isotopeWeightPercent in base_material.weightPercent.items():\n element = str(isotope)\n if len(element) < 5:\n current_element = element[:1] + '000'\n else:\n current_element = element[:2] + '000'\n current_element = int(current_element)\n try:\n smear_material[isotope] += isotopeWeightPercent * materialWeightPercent * base_material.density \\\n * AVOGADROS_NUMBER * void_multiplier / \\\n base_material.elementDict[current_element].molecularMassDict[isotope]\n except KeyError:\n smear_material[isotope] = isotopeWeightPercent * materialWeightPercent * base_material.density \\\n * AVOGADROS_NUMBER * void_multiplier / \\\n base_material.elementDict[current_element].molecularMassDict[isotope]\n smeared_material = Material()\n smeared_material.name = \"{}\".format([val for val in materials])\n smeared_material.atomDensity = sum(smear_material.values())\n smeared_atom_percent = {}\n for k, v in smear_material.items():\n smeared_atom_percent[k] = v / smeared_material.atomDensity\n smeared_material.atomPercent = smeared_atom_percent\n return smeared_material", "def generate_materials_dict(self):\n c = 299792458.0\n w_mat = 2 * np.pi * c / self.l_mat - self.w0\n l2_mat = (self.l_mat * 1e6) ** 2\n\n n_air = 1 + 0.05792105 * l2_mat / (238.0185 * l2_mat - 1) + 0.00167917 * l2_mat / (57.362 * l2_mat - 1)\n air_ip = interp1d(w_mat, n_air, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['air'] = air_ip\n\n n_fs = np.sqrt(1 + 0.6961663 * l2_mat / (l2_mat - 0.0684043 ** 2) +\n 0.4079426 * l2_mat / (l2_mat - 0.1162414 ** 2) +\n 0.8974794 * l2_mat / (l2_mat - 9.896161 ** 2))\n fs_ip = interp1d(w_mat, n_fs, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['fs'] = fs_ip\n\n n_mgf2 = np.sqrt(1 + 0.48755108 * l2_mat / (l2_mat - 0.04338408 ** 2) +\n 0.39875031 * l2_mat / (l2_mat - 0.09461442 ** 2) +\n 2.3120353 * l2_mat / (l2_mat - 23.793604 ** 2))\n mgf2_ip = interp1d(w_mat, n_mgf2, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['mgf2'] = mgf2_ip\n\n n_sapphire_o = np.sqrt(1 + 1.4313493 * l2_mat / (l2_mat - 0.0726631 ** 2) +\n 0.65054713 * l2_mat / (l2_mat - 0.1193242 ** 2) +\n 5.3414021 * l2_mat / (l2_mat - 18.028251 ** 2))\n sapphire_o_ip = interp1d(w_mat, n_sapphire_o, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['sapphire_o'] = sapphire_o_ip\n\n n_sapphire_e = np.sqrt(1 + 1.5039759 * l2_mat / (l2_mat - 0.0740288 ** 2) +\n 0.55069141 * l2_mat / (l2_mat - 0.1216529 ** 2) +\n 6.5927379 * l2_mat / (l2_mat - 20.072248 ** 2))\n sapphire_e_ip = interp1d(w_mat, n_sapphire_e, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['sapphire_e'] = sapphire_e_ip\n\n n_bbo_o = np.sqrt(2.7405 + 0.0184 / (l2_mat - 0.0179) - 0.0155 * l2_mat)\n bbo_o_ip = interp1d(w_mat, n_bbo_o, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['bbo_o'] = bbo_o_ip\n\n n_bbo_e = np.sqrt(2.3730 + 0.0128 / (l2_mat - 0.0156) - 0.0044 * l2_mat)\n bbo_e_ip = interp1d(w_mat, n_bbo_e, bounds_error=False, fill_value=np.nan, kind=\"quadratic\")\n self.materials['bbo_e'] = bbo_e_ip\n\n materials_files = os.listdir(self.materials_path)\n logger.info(\"Found {0:d}\".format(materials_files.__len__()))\n for mat_file in materials_files:\n logger.debug(mat_file)\n self.read_material(''.join((self.materials_path, '/', mat_file)))", "def write_material_data(ka_red=255.0 / 255, ka_green=255.0 / 255, ka_blue=255.0 / 255,\n ka_texture_ID=9223372036854775807, # ambient\n ks_red=255.0 / 255, ks_green=255.0 / 255, ks_blue=255.0 / 255,\n ks_texture_ID=9223372036854775807, # specular\n kd_red=255.0 / 255, kd_green=255.0 / 255, kd_blue=255.0 / 255,\n kd_texture_ID=9223372036854775807, # diffuse\n ns=0.1, # specular exponent\n alpha=1 # opacity\n ):\n\n input_ = [(ka_red, 'float32'), (ka_green, 'float32'), (ka_blue, 'float32'),\n (ka_texture_ID, 'uint64'),\n (ks_red, 'float32'), (ks_green, 'float32'), (ks_blue, 'float32'),\n (ks_texture_ID, 'uint64'),\n (kd_red, 'float32'), (kd_green, 'float32'), (kd_blue, 'float32'),\n (kd_texture_ID, 'uint64'),\n (ns, 'float32'), (alpha, 'float32')]\n\n block_bytes = encode(input_)\n return block_bytes", "def find_material(material):\n for libn,tdict in liblist:\n if material in tdict:\n print(libn)", "def getContents(self):\n return None if (self.pot.d() == None) else CraftMagicNumbers.getMaterial(self.pot.getItem()).getNewData(int(self.pot.getData()))\n # PAIL: rename", "def read_material(self, filename):\n l_mat = np.linspace(200e-9, 2000e-9, 5000)\n c = 299792458.0\n w_mat = 2 * np.pi * c / l_mat - self.w0\n l2_mat = (l_mat * 1e6) ** 2\n n_tmp = 0.0\n\n e = ElementTree.parse(filename)\n mat = e.getroot()\n name = mat.get('name')\n sm = mat.findall('sellmeier')\n for s in sm:\n at = s.find('A')\n if at is not None:\n a = np.double(at.text)\n else:\n a = 0.0\n bt = s.find('B')\n if bt is not None:\n b = np.double(bt.text)\n else:\n b = 0.0\n ct = s.find('C')\n if ct is not None:\n c = np.double(ct.text)\n else:\n c = 0.0\n n_tmp += a + b*l2_mat / (l2_mat - c)\n n = np.sqrt(1 + n_tmp)\n n_ip = interp1d(w_mat, n, bounds_error=False, fill_value=np.nan)\n self.materials[name] = n_ip", "def create_material(name, diffuse, alpha):\n mat = bpy.data.materials.new(name)\n mat.diffuse_color = diffuse\n mat.diffuse_intensity = 1.0\n mat.alpha = alpha\n if alpha:\n mat.use_transparency = True\n return mat", "def create_object_material(obj, mat_name):\n if not has_material(obj, mat_name):\n if bpy.data.materials.get(mat_name, None):\n # XXX if material with this name already exists in another object\n # append the object name to this material name\n mat_name += \".{}\".format(obj.name)\n\n mat = bpy.data.materials.new(mat_name)\n link_material(obj, mat)\n return mat\n return obj.data.materials.get(mat_name)", "def GetHandle(self):\n return _XCAFDoc.XCAFDoc_Material_GetHandle(self)", "def testUsdSingleMaterial(self):\n self._StartTest('singleMaterial')\n mayaPathSegment = mayaUtils.createUfePathSegment('|stage|stageShape')\n \n cubeUsdPathSegment = usdUtils.createUfePathSegment('/cube')\n cubePath = ufe.Path([mayaPathSegment, cubeUsdPathSegment])\n cubeItem = ufe.Hierarchy.createItem(cubePath)\n\n materialInterface = ufe.Material.material(cubeItem)\n\n materials = materialInterface.getMaterials()\n self.assertEqual(len(materials), 1)\n\n if(os.getenv('UFE_PREVIEW_VERSION_NUM', '0000') >= '5003'):\n hasAnyMaterial = materialInterface.hasMaterial()\n self.assertTrue(hasAnyMaterial)", "def __init__(self, point, normal, material):\n self.point = point\n self.norm = unit(normal)\n self.mat = material", "def getMaterialPhysics():\r\n physicsProperties = {}\r\n for material in bpy.data.materials:\r\n properties = utils.extract_cryblend_properties(material.name)\r\n if properties:\r\n physicsProperties[properties[\"Name\"]] = properties[\"Physics\"]\r\n return physicsProperties", "def _get_med(self):\n return self.__med", "def UpdateUI(self, materialHandle):\r\n \r\n self._materialHandle = materialHandle\r\n \r\n # alpha blend\r\n alphaBlendHandle = Material.GetAlphaBlendProperty(materialHandle)\r\n bEnable, srcBlend, destBlend = MPAlphaBlend.GetAlphaBlend(alphaBlendHandle)\r\n self._cbEnableAlphaBlend.SetValue(bEnable)\r\n self._comboSrcBlend.SetSelection(srcBlend)\r\n self._comboDestBlend.SetSelection(destBlend)\r\n \r\n # alpha test\r\n alphaTestHandle = Material.GetAlphaTestProperty(materialHandle)\r\n bEnable, alphaTestType, ref = MPAlphaTest.GetAlphaTest(alphaTestHandle)\r\n self._cbEnableAlphaTest.SetValue(bEnable)\r\n self._comboAlphaTestType.SetSelection(alphaTestType)\r\n self._editAlphaTestRef.SetValue(str(ref))\r\n \r\n # culling mode\r\n cullingModeHandle = Material.GetCullingModeProperty(materialHandle)\r\n cullingMode = MPCullingMode.GetCullingMode(cullingModeHandle)\r\n self._comboCullingMode.SetSelection(cullingMode)\r\n \r\n # depth state\r\n depthStateHandle = Material.GetDepthStateProperty(materialHandle)\r\n bEnable, bWriteEnable = MPDepthState.GetDepthState(depthStateHandle)\r\n self._cbEnableDepthTest.SetValue(bEnable)\r\n self._cbEnableDepthWrite.SetValue(bWriteEnable)\r\n \r\n # material ambient\r\n ambientHandle = Material.GetAmbientProperty(materialHandle)\r\n bEnable, r, g, b, a = MPAmbient.GetAmbient(ambientHandle)\r\n r, g, b, a = map(lambda n : int(n * 255.0), (r, g, b, a))\r\n self._cbEnableAmbient.SetValue(bEnable)\r\n self._colorMaterialAmbientColor.SetValue(wx.Color(r, g, b, a))\r\n \r\n # material diffuse\r\n diffuseHandle = Material.GetDiffuseProperty(materialHandle)\r\n bEnable, r, g, b, a = MPDiffuse.GetDiffuse(diffuseHandle)\r\n r, g, b, a = map(lambda n : int(n * 255.0), (r, g, b, a))\r\n self._cbEnableDiffuse.SetValue(bEnable)\r\n self._colorMaterialDiffuseColor.SetValue(wx.Color(r, g, b, a))\r\n \r\n # material specular\r\n specularHandle = Material.GetSpecularProperty(materialHandle)\r\n bEnable, r, g, b, a, shiness = MPSpecular.GetSpecular(specularHandle)\r\n r, g, b, a = map(lambda n : int(n * 255.0), (r, g, b, a))\r\n self._cbEnableSpecular.SetValue(bEnable)\r\n self._colorMaterialSpecularColor.SetValue(wx.Color(r, g, b, a))\r\n self._editMaterialSpecularShiness.SetValue(str(shiness))", "def setMaterial(obj=None,mat=None):\n\n\tif obj is None:\n\t\tobj = bpy.context.object\n\tif obj.data.materials:\n\t\t# assign to 1st material slot\n\t\tobj.data.materials[0] = mat\n\telse:\n\t\t# no slots\n\t\tobj.data.materials.append(mat)", "def add_material_page(wiz, title, params):\n add_grid_page(wiz, u\"Material properties\", title, params)", "def __init__(self, meta: SceneDescription):\n super().__init__(meta)\n self.scenes = []\n self.nodes = []\n self.meshes = []\n self.materials = []\n self.images = []\n self.samplers = []\n self.textures = []\n\n self.path = None\n self.scene = None\n self.gltf = None", "def getAllAttribute(self):\n\n self.shape_type = OpenMaya.MPlug(self.thisObj, self.iShapeType).asShort()\n self.draw_type = OpenMaya.MPlug(self.thisObj, self.iDrawingType).asShort()\n self.up_axis = OpenMaya.MPlug(self.thisObj, self.iUpAxis).asShort()\n self.xRay = OpenMaya.MPlug(self.thisObj, self.iXRay).asBool()\n self.billBoard = OpenMaya.MPlug(self.thisObj, self.iBillBoard).asBool()\n self.forceRefresh = OpenMaya.MPlug(self.thisObj, self.iForceRefresh).asBool()\n\n plug_edge_color = OpenMaya.MPlug(self.thisObj, self.iEdgeColor)\n self.edge_color = self.getMPoint(plug_edge_color)\n self.edge_opacity = OpenMaya.MPlug(self.thisObj, self.iEdgeOpacity).asFloat()\n\n plug_polygon_color = OpenMaya.MPlug(self.thisObj, self.iPolygonColor)\n self.polygon_color = self.getMPoint(plug_polygon_color)\n self.polygon_opacity = OpenMaya.MPlug(self.thisObj, self.iPolygonOpacity).asFloat()\n\n self.shape_size = OpenMaya.MPlug(self.thisObj, self.iShapeSize).asFloat()\n self.edge_size = OpenMaya.MPlug(self.thisObj, self.iEdgeSize).asFloat()\n\n plug_offset_position = OpenMaya.MPlug(self.thisObj, self.iPositionOffset)\n self.offset_position = self.getMPoint(plug_offset_position)\n plug_offset_rotation = OpenMaya.MPlug(self.thisObj, self.iRotationOffset)\n self.offset_rotation = self.getMPoint(plug_offset_rotation)", "def add_face_materials(engine, mesh):\n texture_image = bpy.data.images.load(os.path.join(basepath, settings.TEXTURE_FILE))\n image_texture = bpy.data.textures.new('export_texture', type = 'IMAGE')\n image_texture.image = texture_image\n image_material = bpy.data.materials.new('TextureMaterials')\n image_material.use_shadeless = True\n\n material_texture = image_material.texture_slots.add()\n material_texture.texture = image_texture\n material_texture.texture_coords = 'UV'\n bpy.ops.object.mode_set(mode='OBJECT')\n context_obj = bpy.context.object\n context_obj_data = context_obj.data\n context_obj_data.materials.append(image_material)\n bpy.types.SpaceView3D.show_textured_solid = True", "def test_create_material_multi_basic(self):\n expected_materials = [\n ['cotton', 'AAA', 'BBB', 'CCC'],\n ['cotton', 'AAA', 'BBB', 'CCC'],\n ['wool', 'AAA', 'BBB', 'CCC'],\n ]\n\n select_listings_to_edit(self.driver)\n d = self.driver\n bp = BulkPage(d)\n\n send_keys(bp.operation_input(), 'AAA,BBB ,CCC')\n click(bp.operation_apply())\n\n material_names = bp.material_names()\n assert material_names == expected_materials", "def GetMaterialLabels(self, *args):\n return _XCAFDoc.XCAFDoc_MaterialTool_GetMaterialLabels(self, *args)", "def info_materials_composites_get():\n materials = _material_by_group(429) # 429 == intermediate group\n return materials, 200", "def get_player_material(board):\n # clear board to read out information\n information = str(board).replace(\"\\n\", \" \")\n\n # get black and white material\n materialPlayer_B = information.count('r') + information.count('n') + information.count('b') + information.count('q') + information.count('k') + information.count('p')\n materialPlayer_W = information.count('R') + information.count('N') + information.count('B') + information.count('Q') + information.count('K') + information.count('P')\n \n # return materials for each player\n return materialPlayer_W, materialPlayer_B", "def test_create_material(self):\n expected_materials = [\n ['cotton', 'AAA'],\n ['cotton', 'AAA'],\n ['wool', 'AAA'],\n ]\n\n select_listings_to_edit(self.driver)\n d = self.driver\n bp = BulkPage(d)\n\n send_keys(bp.operation_input(), 'AAA')\n click(bp.operation_apply())\n\n material_names = bp.material_names()\n assert material_names == expected_materials\n\n apply_class = bp.operation_apply().get_attribute('class')\n assert 'inactive' in apply_class.split(' ')", "def __init__(self, name, i_node, j_node, m_node, n_node, t, material, model, kx_mod=1.0,\n ky_mod=1.0):\n\n self.name = name\n self.ID = None\n self.type = 'Rect'\n\n self.i_node = i_node\n self.j_node = j_node\n self.m_node = m_node\n self.n_node = n_node\n\n self.t = t\n \n self.kx_mod = kx_mod\n self.ky_mod = ky_mod\n\n self.pressures = [] # A list of surface pressures [pressure, case='Case 1']\n\n # Plates need a link to the model they belong to\n self.model = model\n\n # Get material properties for the plate from the model\n try:\n self.E = self.model.Materials[material].E\n self.nu = self.model.Materials[material].nu\n except:\n raise KeyError('Please define the material ' + str(material) + ' before assigning it to plates.')", "def XCAFDoc_DocumentTool_MaterialTool(*args):\n return _XCAFDoc.XCAFDoc_DocumentTool_MaterialTool(*args)", "def MaterialsLabel(*args):\n return _XCAFDoc.XCAFDoc_DocumentTool_MaterialsLabel(*args)", "def isMaterialEntity(*args):\n return _libsbml.SBO_isMaterialEntity(*args)", "def __init__(self, material=None, length=None, entrance_length=None):\n self.material = material\n self.length = length\n self.entrance_length = entrance_length", "def __init__(self, name, colour, mass, system):\n self.name = name\n self.colour = colour\n self.mass = mass\n self.system = system", "def _getFacesAndMaterials(self):\r\n room = self.obj\r\n polygonDict = {} # a dict that holds faces (dict), their vertices (dict: positions and materials)\r\n mesh = room.meshes[0] # WARNING: supposed to work with a single mesh material\r\n poly = mesh.getPolygon(0) # get polygon list\r\n\r\n for n in range(0,mesh.numPolygons):\r\n polygonDict[n+1] = {}\r\n\r\n # get face (poly) materials\r\n poly = mesh.getPolygon(n)\r\n polygonDict[n+1]['material'] = poly.material_name.replace('MA','') # since blender add 'MA' to each material name\r\n\r\n # get face (poly) vertices positions\r\n v1_xyz = room.worldTransform * mesh.getVertex(poly.material_id, poly.v1).XYZ\r\n v2_xyz = room.worldTransform * mesh.getVertex(poly.material_id, poly.v2).XYZ\r\n v3_xyz = room.worldTransform * mesh.getVertex(poly.material_id, poly.v3).XYZ\r\n v4_xyz = room.worldTransform * mesh.getVertex(poly.material_id, poly.v4).XYZ\r\n polygonDict[n+1]['vertices'] = [v1_xyz, v2_xyz, v3_xyz, v4_xyz]\r\n # if gl.dbg: print (' ' + 'face ' + str(n) + ' - materials '+ poly.material_name.replace('MA',''))\r\n return polygonDict", "def SBO_isMaterialEntity(*args):\n return _libsbml.SBO_isMaterialEntity(*args)", "def GetHandle(self):\n return _XCAFDoc.XCAFDoc_MaterialTool_GetHandle(self)", "def _propagate_material_settings(self, bm, layer):\n state = layer.state\n\n # Shade Flags\n if not bm.use_mist:\n state.shadeFlags |= hsGMatState.kShadeNoFog # Dead in CWE\n state.shadeFlags |= hsGMatState.kShadeReallyNoFog\n\n if bm.use_shadeless:\n state.shadeFlags |= hsGMatState.kShadeWhite\n\n # Colors\n layer.ambient = utils.color(bpy.context.scene.world.ambient_color)\n layer.preshade = utils.color(bm.diffuse_color)\n layer.runtime = utils.color(bm.diffuse_color)\n layer.specular = utils.color(bm.specular_color)\n\n layer.specularPower = min(100.0, float(bm.specular_hardness))\n layer.LODBias = -1.0 # Seems to be the Plasma default\n\n if bm.emit > 0.0:\n # Use the diffuse colour as the emit, scaled by the emit amount\n # (maximum 2.0, so we'll also scale that by 0.5)\n emit_scale = bm.emit * 0.5\n layer.ambient = hsColorRGBA(bm.diffuse_color.r * emit_scale,\n bm.diffuse_color.g * emit_scale,\n bm.diffuse_color.b * emit_scale,\n 1.0)", "def info_materials_type_id_get(type_id):\n session = info_map.Session()\n q = session.query(info_map.Material).filter(info_map.Material.type == type_id)\n\n mat = q.one_or_none() # return the only result or `None`\n\n if mat is not None:\n material_info = MaterialInfo(\n type=mat.type,\n group=mat.group_id,\n name=mat.name,\n volume=mat.volume)\n return material_info, 200\n else:\n error = Error('Type {} Not Found'.format(type_id))\n return error, 404", "def apply_material(self, material, click_speed=0.02):\n if material == self.COSMIC_CUBE_FRAGMENT:\n self.emulator.click_button(self.ui['ENHANCE_POTENTIAL_COSMIC_CUBES'].button, min_duration=click_speed,\n max_duration=click_speed)\n if material == self.BLACK_ANTI_MATTER:\n self.emulator.click_button(self.ui['ENHANCE_POTENTIAL_ANTI_MATTER'].button, min_duration=click_speed,\n max_duration=click_speed)\n if material == self.NORN_STONE_OF_CHAOS:\n self.emulator.click_button(self.ui['ENHANCE_POTENTIAL_NORN_STONES'].button, min_duration=click_speed,\n max_duration=click_speed)", "def _make_openmc_input(self):\n # Define material\n mat = openmc.Material()\n mat.add_nuclide(self.nuclide, 1.0)\n if self.thermal is not None:\n name, suffix = self.thermal.split('.')\n thermal_name = openmc.data.thermal.get_thermal_name(name)\n mat.add_s_alpha_beta(thermal_name)\n mat.set_density('g/cm3', self.density)\n materials = openmc.Materials([mat])\n if self.xsdir is not None:\n xs_path = (self.openmc_dir / 'cross_sections.xml').resolve()\n materials.cross_sections = str(xs_path)\n materials.export_to_xml(self.openmc_dir / 'materials.xml')\n\n # Set up geometry\n x1 = openmc.XPlane(x0=-1.e9, boundary_type='reflective')\n x2 = openmc.XPlane(x0=+1.e9, boundary_type='reflective')\n y1 = openmc.YPlane(y0=-1.e9, boundary_type='reflective')\n y2 = openmc.YPlane(y0=+1.e9, boundary_type='reflective')\n z1 = openmc.ZPlane(z0=-1.e9, boundary_type='reflective')\n z2 = openmc.ZPlane(z0=+1.e9, boundary_type='reflective')\n cell = openmc.Cell(fill=materials)\n cell.region = +x1 & -x2 & +y1 & -y2 & +z1 & -z2\n geometry = openmc.Geometry([cell])\n geometry.export_to_xml(self.openmc_dir / 'geometry.xml')\n\n # Define source\n source = openmc.Source()\n source.space = openmc.stats.Point((0,0,0))\n source.angle = openmc.stats.Isotropic()\n source.energy = openmc.stats.Discrete([self.energy], [1.])\n\n # Settings\n settings = openmc.Settings()\n if self._temperature is not None:\n settings.temperature = {'default': self._temperature}\n settings.source = source\n settings.particles = self.particles // self._batches\n settings.run_mode = 'fixed source'\n settings.batches = self._batches\n settings.create_fission_neutrons = False\n settings.export_to_xml(self.openmc_dir / 'settings.xml')\n \n # Define tallies\n energy_bins = np.logspace(np.log10(self._min_energy),\n np.log10(1.0001*self.energy), self._bins+1)\n energy_filter = openmc.EnergyFilter(energy_bins)\n tally = openmc.Tally(name='tally')\n tally.filters = [energy_filter]\n tally.scores = ['flux']\n tallies = openmc.Tallies([tally])\n tallies.export_to_xml(self.openmc_dir / 'tallies.xml')", "def __init__(self, center, radius, material):\n self.center = center\n self.radius = radius\n self.material = material", "def __init__(self, center, radius, material):\n self.center = center\n self.radius = radius\n self.material = material", "def read_one(family_id, material_id):\n # Query the database for the material\n material = (\n Material.query.join(Family, Family.family_id == Material.family_id)\n .filter(Family.family_id == family_id)\n .filter(Material.material_id == material_id)\n .one_or_none()\n )\n\n # Was a material found?\n if material is not None:\n material_schema = MaterialSchema()\n data = material_schema.dump(material).data\n return data\n\n # Otherwise, nope, didn't find that material\n else:\n abort(404, f\"Material not found for Id: {material_id}\")", "def AssembleStructuralMaterialsJson(KratosWindowManager):\n for key in KratosWindowManager.MatSave.keys():\n if(DEBUG):\n print(key)\n print(type(KratosWindowManager.MatSave[key]))\n sm.structuralmaterials_dict[\"properties\"][0][\"Material\"][\"Variables\"][key]=KratosWindowManager.MatSave[key]\n for bclistobject in KratosWindowManager.boundaryConditionEditor:\n if(DEBUG):\n print(bclistobject.name)\n if bclistobject.entityType=='Element':\n sm.structuralmaterials_dict[\"properties\"][0][\"model_part_name\"]=bclistobject.name\n\n\n if KratosWindowManager.is2D:\n sm.structuralmaterials_dict[\"properties\"][0][\"Material\"][\"constitutive_law\"][\"name\"]=\"KratosMultiphysics.StructuralMechanicsApplication.LinearElasticPlaneStrain2DLaw\"\n else:\n sm.structuralmaterials_dict[\"properties\"][0][\"Material\"][\"constitutive_law\"][\"name\"]=\"KratosMultiphysics.StructuralMechanicsApplication.LinearElastic3DLaw\"\n \n\n if(DEBUG):\n print(sm.structuralmaterials_dict)\n return sm.WriteMaterialToJson(sm.structuralmaterials_dict)", "def XCAFDoc_MaterialTool_Set(*args):\n return _XCAFDoc.XCAFDoc_MaterialTool_Set(*args)", "def mezclar_bolsa(self):", "def parse_material(\n file_path: str,\n node: Node,\n verbose=False,\n):\n binary_file = open(file_path, 'rb')\n node.name = os.path.splitext(os.path.basename(file_path))[0]\n g = BinaryReader(binary_file)\n current_offset = g.tell()\n node.offset = current_offset\n\n # Handle MTR file\n material_list = []\n g.seek(current_offset)\n B = g.i(4)\n g.seek(current_offset + B[2])\n\n count = g.i(1)[0]\n\n lll = []\n for m in range(B[3]):\n C = g.i(8)\n D = g.i(C[0] * 2)\n logger.debug({\n \"[C, D]\": [C, D],\n })\n lll.append(C)\n\n logger.debug({\n \"B\": B,\n \"count\": count,\n \"lll\": lll,\n })\n\n # Loop through materials\n for m in range(B[3]):\n logger.debug(\"%s>\" % ('=' * 200))\n tm = g.tell()\n C = g.i(8)\n logger.debug({\n \"tm\": tm,\n \"C\": C,\n })\n found_material_names = []\n found_material_texture_names = []\n material_name = \"UNKNOWN_MAT\"\n for i in range(8):\n logger.debug(\"%s Loop %s %s>\" % (('=' * 24), (i + 1), ('=' * 24)))\n logger.debug(\"Current offset is: %s\" % g.tell())\n c = C[i]\n name = None\n if c != 0:\n logger.debug(\"%s>\" % ('=' * 32))\n g.seek(tm + 4 * i + c)\n name = g.find(b\"\\x00\")\n if name and 'MAT' in name:\n logger.debug(\"Name found: %s\" % name)\n material_name = name\n elif name:\n found_material_texture_names.append(name)\n\n found_material_names.append({\n \"mtl\": material_name,\n \"tex\": found_material_texture_names,\n })\n\n logger.debug({\n \"found_material_names\": found_material_names,\n })\n material_list.append(found_material_names)\n g.seek(tm + 32)\n\n node.data[\"material_list\"] = material_list\n logger.debug({\n \"material_list\": material_list,\n })\n g.close()", "def material_matrix(self):\n out = Tmatrix()\n out.translate(Vector([.5, .5, .5]))\n out.scale(Vector([self.radius, self.radius, self.radius]) *\n (.5 / (self.radius + self.thickness)))\n return out", "def _getMaterials(self):\n warnings.warn(\"This function is deprecated; call Document.getMaterialNodes() instead.\", DeprecationWarning, stacklevel = 2)\n return self.getMaterialNodes()", "def test_local(self):\n m = mats.Materials(\"mats_test.json\", NoneVisited())\n self.assertTrue( m.local( '164 G. Canis Majoris', '5 c a'))\n self.assertFalse( m.local( '164 G. Canis Majoris', '5 c b'))", "def save_material(material, data_class):\n data_class.material_bind[\"version\"] = \"0.7\"\n add_to_json = True\n\n warning_text = (\"Material with same name and same properties already \"\n \"exists in JSON, consider this material or revising your \"\n \"properties\")\n\n for id, check in data_class.material_bind.items():\n if id != \"version\":\n if check[\"name\"] == material.name and \\\n check[\"density\"] == material.density and \\\n check[\"thermal_conduc\"] == material.thermal_conduc and \\\n check[\"heat_capac\"] == material.heat_capac and \\\n check[\n \"thickness_default\"] == material.thickness_default and \\\n check[\"thickness_list\"] == material.thickness_list:\n\n warnings.warn(warning_text)\n print(material.name)\n add_to_json = False\n break\n\n if add_to_json is True:\n data_class.material_bind[\n material.material_id] = collections.OrderedDict()\n data_class.material_bind[\n material.material_id][\"name\"] = material.name\n data_class.material_bind[\n material.material_id][\"density\"] = material.density\n data_class.material_bind[\n material.material_id][\"thermal_conduc\"] = material.thermal_conduc\n data_class.material_bind[\n material.material_id][\"heat_capac\"] = material.heat_capac\n data_class.material_bind[\n material.material_id][\n \"thickness_default\"] = material.thickness_default\n data_class.material_bind[\n material.material_id][\"thickness_list\"] = material.thickness_list\n data_class.material_bind[\n material.material_id][\"solar_absorp\"] = material.solar_absorp\n\n with open(utilities.get_full_path(data_class.path_mat), 'w') as file:\n file.write(json.dumps(\n data_class.material_bind,\n indent=4,\n separators=(',', ': ')))", "def get_mats_from_args(self, args):\n try:\n lhslist = args.split(\"/\")\n material = CraftingMaterialType.objects.get(name__iexact=lhslist[0])\n amt = int(lhslist[1])\n if amt < 1:\n raise ValueError(\n \"You must specify a positive value of a material to send.\"\n )\n # different errors\n except (IndexError, AttributeError, TypeError):\n self.msg(\"You must specify materials to send.\")\n except CraftingMaterialType.DoesNotExist:\n self.msg(\"That is not a valid material type.\")\n except OwnedMaterial.DoesNotExist:\n self.msg(\"You don't have any of that material.\")\n except ValueError as err:\n self.msg(err)\n # succeeded, return amount. It'll be decremented when sent off later\n else:\n return material.id, amt", "def scattering_factors(self, material, density):\n raise NotImplementedError(\n \"need radiation type in <%s> to compute sld for %s\"\n % (self.filename, material))", "def _addMaterial(self, name):\n warnings.warn(\"This function is deprecated; call Document.addMaterialNode() instead.\", DeprecationWarning, stacklevel = 2)\n return self.addMaterialNode(name)", "def testUsdMultipleMaterials(self):\n self._StartTest('multipleMaterials')\n mayaPathSegment = mayaUtils.createUfePathSegment('|stage|stageShape')\n \n cubeUsdPathSegment = usdUtils.createUfePathSegment('/cube')\n cubePath = ufe.Path([mayaPathSegment, cubeUsdPathSegment])\n cubeItem = ufe.Hierarchy.createItem(cubePath)\n\n materialInterface = ufe.Material.material(cubeItem)\n\n materials = materialInterface.getMaterials()\n self.assertEqual(len(materials), 2)\n\n if(os.getenv('UFE_PREVIEW_VERSION_NUM', '0000') >= '5003'):\n hasAnyMaterial = materialInterface.hasMaterial()\n self.assertTrue(hasAnyMaterial)" ]
[ "0.70637417", "0.6998246", "0.6904607", "0.6898203", "0.6760521", "0.6738587", "0.66976964", "0.663665", "0.65911734", "0.6587051", "0.6576873", "0.6487353", "0.64594245", "0.63723415", "0.6360623", "0.63506037", "0.63310814", "0.631915", "0.62650704", "0.62001616", "0.6197354", "0.6185776", "0.6176775", "0.612973", "0.61154443", "0.6112562", "0.61037254", "0.60521984", "0.6015051", "0.6010192", "0.5997605", "0.59941316", "0.5970983", "0.59605527", "0.5955921", "0.5910142", "0.58974177", "0.58813787", "0.58804387", "0.5871307", "0.58315384", "0.57925284", "0.5776381", "0.57756513", "0.5748122", "0.57374406", "0.57235074", "0.5713708", "0.5710302", "0.5709714", "0.5669457", "0.5637546", "0.56193745", "0.56151843", "0.5614817", "0.56115526", "0.56110656", "0.5610741", "0.5609423", "0.55724746", "0.556086", "0.5507266", "0.5499921", "0.54937303", "0.5483734", "0.54831374", "0.54818404", "0.547713", "0.5474596", "0.5458044", "0.5455907", "0.5440251", "0.5433815", "0.5426507", "0.5423759", "0.541258", "0.5393755", "0.5391177", "0.53869843", "0.5378966", "0.5350107", "0.5342398", "0.53364", "0.5334243", "0.53140473", "0.53070277", "0.52978486", "0.52978486", "0.5277097", "0.5270297", "0.526323", "0.52542764", "0.524414", "0.5243965", "0.5241205", "0.52402854", "0.5237173", "0.5236303", "0.5223734", "0.5218368", "0.52061933" ]
0.0
-1
everything about a liquid
def __init__(self, name=None, species=None, concentration=None, conductivity=None, pH=None, density=None, viscosity=None, permittivity=None, temperature=None, valence=1.0): # identity self.name = name # electro/chemical self.species = species self.concentration = concentration # (mmol) = (mmol/L) = (mol/m3) self.conductivity = conductivity if permittivity: self.permittivity = permittivity if pH: self.pH = pH self.c_H = 10**-pH * 1e3 # (mmol) = (mmol/L) = (mol/m3); (concentration of Hydrogen ions (H+) self.valence = valence # mechanical self.density = density self.viscosity = viscosity self.temperature = temperature self.diffusivity = 2e-9 # (m^2/s) Diffusivity of KCl in DI water [Soni]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_received_liquidation(self, liquidation):\n pass", "def liquid_viscosity(id, temperature=298.15, pressure=constants.atm): # noqa: A002\n return rx._misc._get_chemical(id, temperature, pressure).mul # noqa: SLF001", "def vault(self):", "def main():\n ## real SWAT tags", "def capteur_info_relever1():\n return render_template(\n \"relever_capt.html\",\n liste = get_capteurs())", "def polution(request):\r\n return render(request, 'polution.html')", "def getSlaves():", "def items():", "def portfolio_detail():\n return render_template('portfolio/portfolio.html')", "def addVitals(self):\n\n def getBP(vt):\n \"\"\" Format a bloodPressure for templating into Vitals documents. \"\"\"\n vt['indivo_prefix'] = 'bp_' + vt['name']\n return getVital(vt)\n \n def getVital(vt):\n \"\"\" Format a vitalSign for templating into Vitals documents. \"\"\"\n\n if hasattr(v, vt['name']):\n val = getattr(v, vt['name'])\n sys, title, ident = self.coded_value(vt['uri'])\n return VITAL_SIGN.sub(\n {'unit': vt['unit'],\n 'val': val,\n 'name_title': title,\n 'name_id': ident,\n 'name_system': sys\n }\n ).sub(\n {'prefix': vt['indivo_prefix'] if 'indivo_prefix' in vt else vt['name']}, \n escape=False\n ).done()\n\n def cleanVitalsDate(date_str):\n \"\"\" Convert dates coming from raw Vitals data into UTC ISO8601 Timestamps.\"\"\"\n if date_str[-1] != 'Z':\n date_str += 'Z'\n return date_str.replace(' ', 'T')\n \n if self.pid in VitalSigns.vitals:\n for v in VitalSigns.vitals[self.pid]:\n measurements = []\n for vt in VitalSigns.vitalTypes:\n measurements.append(getVital(vt))\n\n if v.systolic:\n measurements.append(getBP(VitalSigns.systolic))\n measurements.append(getBP(VitalSigns.diastolic))\n\n encounter_str = ENCOUNTER.sub(\n {'start':cleanVitalsDate(v.start_date),\n 'end':cleanVitalsDate(v.end_date)\n }\n ).sub(\n {'encounterType':ENCOUNTER_TYPE.done() if v.encounter_type == 'ambulatory' else ''}, \n escape=False\n ).done()\n\n vitals_str = VITAL_SIGNS.sub(\n {'date': cleanVitalsDate(v.timestamp),\n }\n ).sub(\n {'encounter': encounter_str,\n 'vitals_str': ''.join(measurements)}, \n escape=False\n ).done()\n self.data.append(SDMX.sub({'models':vitals_str}, escape=False).done())", "def test_add_liquid(\n decoy: Decoy,\n mock_engine_client: EngineClient,\n subject: ProtocolCore,\n) -> None:\n liquid = PE_Liquid.construct(\n id=\"water-id\",\n displayName=\"water\",\n description=\"water desc\",\n displayColor=HexColor(__root__=\"#fff\"),\n )\n\n expected_result = Liquid(\n _id=\"water-id\",\n name=\"water\",\n description=\"water desc\",\n display_color=\"#fff\",\n )\n\n decoy.when(\n mock_engine_client.add_liquid(\n name=\"water\", color=\"#fff\", description=\"water desc\"\n )\n ).then_return(liquid)\n\n result = subject.define_liquid(\n name=\"water\", description=\"water desc\", display_color=\"#fff\"\n )\n\n assert result == expected_result", "def tags():", "def general(request):\n\treturn render(request, 'general.html', {})", "def falcon():", "def info_pollu(request):\r\n return render(request, 'info_pollu.html')", "def tag_cloud():\n\n return LOAD('plugin_wiki','cloud')", "def liquidity1_interact():\r\n date = wd.DatePicker(value = yesterday, description = \"选择日期\")\r\n threshold = wd.FloatSlider(value = 20, min = 0, max = 200, step = 1,\r\n description = \"单日可变现能力监测数值(%)\", tooltip = \"单击数值可进行编辑\", style = style)\r\n showAll = wd.Checkbox(value = False, description = '查看所有组合')\r\n verbose = wd.Checkbox(value = False, description = \"查看单日可变现能力的具体信息\")\r\n liqui_layout = wd.interact_manual(port_liquidity1_pre, date = date, threshold = threshold, verbose = verbose, showAll = showAll)\r\n liqui_layout.widget.children[4].description = \"开始查询\"\r\n liqui_layout.widget.children[4].button_style = \"danger\"\r\n display(liqui_layout)\r\n return None", "def get_furniture():", "def about(request):\n obj_dict = States.objects.all()\n total_sum = total()\n return render(request, 'virus_form/about.html', {'obj_dict': obj_dict, 'total': total_sum})", "def healthcare():", "def presenetCar():", "def tagger():", "def home():\n stocks = preprocess()\n\n return render_template(\"main.html\",stocks=stocks)", "def parterre_info(id):\n parterre = get_parterre(id)\n return render_template(\n \"parterre-info.html\",\n parterre = parterre,\n title = parterre.get_name(),\n capteurs = get_capteurs_parterre(id))", "def pets():\n \n pets_owned = db.execute(\"SELECT pets.id, pet_types.imgsrc, pet_types.pet_type, pets.created, pets.exp, pets.name, users.active_pet_id FROM owners JOIN pets ON pets.id = owners.pet_id JOIN pet_types ON pets.type = pet_types.id JOIN users ON users.id = owners.owner_id WHERE owner_id = ?\", (session_get_int(\"user_id\"), )).fetchall()\n return render_template(\"list.html\", pets_owned=pets_owned)", "def home(request):\n featured_image = ProductFeatured.objects.first()\n products = Product.objects.all().order_by('?')\n const_products = Product.objects.filter(section='m')\n home_products = Product.objects.filter(section='h')\n elect_products = Product.objects.filter(section='e')\n art_products = Product.objects.filter(section='a')\n furn_products = Product.objects.filter(section='f')\n bed_products = Product.objects.filter(section='s')\n kitch_products = Product.objects.filter(section='k')\n book_products = Product.objects.filter(section='b')\n drawer_products = Product.objects.filter(section='d')\n cabin_products = Product.objects.filter(section='c')\n dress_products = Product.objects.filter(section='Dressers')\n Oven_products = Product.objects.filter(section='o')\n ref_products = Product.objects.filter(section='r')\n vac_products = Product.objects.filter(section='v')\n gas_products = Product.objects.filter(section='g')\n smart_products = Product.objects.filter(section='p')\n trend_products = ProductTrend.objects.all()\n\n context = {\n \"featured_image\": featured_image,\n \"products\": products,\n \"const_products\": const_products,\n \"home_products\": home_products,\n \"elect_products\": elect_products,\n \"art_products\": art_products,\n \"furn_products\": furn_products,\n \"bed_products\": bed_products,\n \"book_products\": book_products,\n \"kitch_products\": kitch_products,\n \"drawer_products\": drawer_products,\n \"cabin_products\": cabin_products,\n \"dress_products\": dress_products,\n \"Oven_products\": Oven_products,\n \"ref_products\": ref_products,\n \"vac_products\": vac_products,\n \"gas_products\": gas_products,\n \"smart_products\":smart_products,\n \"trend_products\":trend_products\n }\n\n if request.GET:\n print request.GET\n\n return render(request, \"home.html\", context)", "def render(self):", "def meta_est(request):\n return request.param", "def body(self):", "def index() -> object:\n return render_template('ue_bootstrap.j2', title='UENERGO TAGS')", "def treat(self, page, item):\n if willstop:\n raise KeyboardInterrupt\n self.current_page = page\n item.get()\n titre = page.title()\n \n #param -b\n if self.param_first:\n if self.param_first in titre:\n self.param_first = None\n else:\n pywikibot.output('Skipping')\n return\n \n pagetext = page.get()\n # on met de côté les tableaux entraîneur et junior\n pagetext = re.sub(r'carrière entraîneur *= *{{', 'carrière entraîneur = {{Pouet', pagetext)\n pagetext = re.sub(r'parcours junior *= *{{', 'parcours junior = {{Pouet', pagetext)\n \n if self.param_debug:\n pywikibot.output(\n 'self.fields %s' \n % self.fields)\n \n if self.param_debug:\n pywikibot.log(\n 'pagetext : %s' \n % pagetext)\n\n templates = textlib.extract_templates_and_params(pagetext)\n if self.param_debug:\n pywikibot.log(\n 'templates : %s' \n % templates) \n for (template, fielddict) in templates:\n # Clean up template\n try:\n template = pywikibot.Page(page.site, template,\n ns=10).title(withNamespace=False)\n except pywikibot.exceptions.InvalidTitle:\n pywikibot.error(\n \"Failed parsing template; '%s' should be the template name.\"\n % template)\n continue\n\n # We found the template we were looking for\n if template in self.templateTitles:\n \n qualif = \"\"\n for field, value in fielddict.items():\n field = field.strip()\n value = value.strip()\n if not field or not value:\n continue\n if self.param_debug:\n pywikibot.output(\n 'hastings-test0 %s -> %s (%s)' \n % (field, value, int(field) % 2))\n # dans 3 colonnes Le champ précédant la value contient le qualifier \n #if field not in self.fields:\n if int(field) % 2 == 1:\n qualif = value\n # This field contains something useful for us\n #else:\n elif int(field) % 2 == 0:\n claim = pywikibot.Claim(self.repo, self.fields[\"2\"])\n \n if claim.type == 'wikibase-item':\n # Try to extract a valid page\n match = re.search(pywikibot.link_regex, value)\n if not match:\n pywikibot.output(\n '%s field %s value %s is not a '\n 'wikilink. Skipping.'\n % (claim.getID(), field, value))\n continue\n\n link_text = match.group(1)\n linked_item = self._template_link_target(item, link_text)\n if not linked_item:\n continue\n\n claim.setTarget(linked_item)\n elif claim.type == 'string':\n claim.setTarget(value.strip())\n elif claim.type == 'commonsMedia':\n commonssite = pywikibot.Site(\"commons\", \"commons\")\n imagelink = pywikibot.Link(value, source=commonssite,\n defaultNamespace=6)\n image = pywikibot.FilePage(imagelink)\n if image.isRedirectPage():\n image = pywikibot.FilePage(image.getRedirectTarget())\n if not image.exists():\n pywikibot.output(\n '[[%s]] doesn\\'t exist so I can\\'t link to it'\n % (image.title(),))\n continue\n claim.setTarget(image)\n else:\n pywikibot.output(\n '%s is not a supported datatype.'\n % claim.type)\n continue\n\n if self.param_debug:\n pywikibot.output(\n '%s field %s value : %s'\n % (claim.getID(), field, value))\n \n #******** h4stings, nettoyage des qualifiers\n qualif = qualif.replace ('–', '-')\n qualif = qualif.replace ('avant ', '-')\n qualif = qualif.replace ('{{Clr}}', '')\n qualif = qualif.replace ('{{Year|', '')\n qualif = qualif.replace ('{{prêt}}', '')\n qualif = re.sub(r'{{0(\\|0+)?}}', '', qualif)\n qualif = re.sub(r'[a-zA-Zéû&; \\.\\[\\?\\]]', '', qualif)\n #si pas de tiret, \n if (qualif.find('-') == -1): \n qualif = qualif + '-' + qualif \n dates = qualif.split('-')\n wp_debut = None\n wp_fin = None\n qualifier_debut = None\n qualifier_fin = None\n if dates[0]:\n wp_debut = dates[0][:4]\n qualifier_debut = pywikibot.Claim(self.repo, u'P580', isQualifier=True)\n qualifier_debut.setTarget(pywikibot.WbTime(year=wp_debut))\n if self.param_debug:\n pywikibot.output(' from %s'\n % qualifier_debut.getTarget().toTimestr())\n if dates[1]:\n wp_fin = dates[1][:4]\n qualifier_fin = pywikibot.Claim(self.repo, u'P582', isQualifier=True)\n qualifier_fin.setTarget(pywikibot.WbTime(year=wp_fin))\n if self.param_debug:\n pywikibot.output(' to %s'\n % qualifier_fin.getTarget().toTimestr())\n\n skip = False\n \n if claim.getID() in item.claims:\n existing_claims = item.claims[claim.getID()] # Existing claims on page of same property\n skip = False\n \n for existing in existing_claims:\n existing580 = None\n existing582 = None\n \n # If some attribute of the claim being added matches some attribute in an existing claim\n # of the same property, skip the claim, unless the 'exists' argument overrides it.\n if claim.getTarget() == existing.getTarget():\n \n #******** on va chercher les qualifiers existants :\n wd_debut = None\n wd_fin = None\n for qfield, qvalue in existing.qualifiers.items():\n if qfield.strip() == 'P580':\n existing580 = qvalue\n wd_debut = existing580[0].getTarget().toTimestr()[8:12]\n if qfield.strip() == 'P582':\n existing582 = qvalue\n wd_fin = existing582[0].getTarget().toTimestr()[8:12] \n if self.param_debug:\n if existing580 is not None:\n pywikibot.output('from %s -> %s'\n % (existing580[0].getTarget().toTimestr(), wd_debut))\n if existing582 is not None:\n pywikibot.output(' to %s -> %s'\n % (existing582[0].getTarget().toTimestr(), wd_fin))\n \n #si existant sans qualif -> on ajoute les qualif\n if not existing580 and not existing582:\n if dates[0]:\n existing.addQualifier(qualifier_debut)\n pywikibot.output(color_format('{green}adding %s as a qualifier of %s'\n % (wp_debut,value)))\n if dates[1]:\n existing.addQualifier(qualifier_fin)\n pywikibot.output(color_format('{green}adding %s as a qualifier of %s'\n % (wp_fin,value)))\n skip=True\n break\n \n #sinon, même qualifier : on passe (skip=true)\n elif wd_debut == wp_debut and qualifier_fin is None:\n pywikibot.output(\n 'Skipping %s because claim with same target already exists.' \n % value)\n skip=True\n break\n\n elif qualifier_debut is None and wd_fin == wp_fin:\n pywikibot.output(\n 'Skipping %s because claim with same target already exists.' \n % value)\n skip=True\n break\n elif wd_debut == wp_debut and wd_fin == wp_fin:\n pywikibot.output(\n 'Skipping %s because claim with same target already exists.' \n % value)\n skip=True\n break\n \n #sinon, si les dates ne se chevauchent pas, on envisage la création...\n elif wp_debut >= wd_fin or wp_fin <= wd_debut: \n pywikibot.output('maybe %s'\n % value)\n skip=False\n \n #sinon, c'est bizarre : on signale. \n else:\n pywikibot.output(color_format(\n '{red}Error ? Incohérence détectée : %s %s %s' \n % (claim.getID(), field, value)))\n skip=True\n \n #******* h4stings, si le club n'est pas dans wikidata : la totale, on se pose pas la question\n if not skip:\n pywikibot.output(color_format('{green}adding %s --> %s : %s, from %s to %s'\n % (claim.getID(), claim.getTarget(), value, wp_debut, wp_fin)))\n item.addClaim(claim)\n # A generator might yield pages from multiple languages\n source = self.getSource(page.site)\n if source:\n claim.addSource(source, bot=True)\n if dates[0]:\n claim.addQualifier(qualifier_debut)\n if dates[1]:\n claim.addQualifier(qualifier_fin)", "def capteur_info_relever(id):\n if id==0:\n id=request.form['del']\n print(id)\n capteur = get_capteur(id)\n return render_template(\n \"relever-capteur.html\",\n capteur = capteur)", "def under_construction(request):\n return render(request, 'under_construction.html', {})", "def get_designs(self):", "def getVotacion(self, url):", "def footer(node):\n\n return '''\n <img src=\"/images/colosseo.png\" title=\"disegno del colosseo\" alt=\"colosseo\" class=\"colosseo\"/>\n\t \t </section>\n </div>\n\t\t\t <div class=\"clear\"></div>\n </div>\n </div>\n\t\t<div class=\"container_12 clearfix\">\n\t <footer class=\"grid_12\">\n\t\t\t\t <p>&copy <a href=\"http://luca.postregna.name\">lucapost</a> ''' + str(current_time.year) + '''; <a rel=\"license\" href=\"http://creativecommons.org/licenses/by-nc/3.0/\">license</a>; <a href=\"/privacy.html\" title=\"normativa per la privacy\">privacy</a>; edit: ''' + time.strftime(\"%Y%m%d %I:%M:%S %p\", node.page.last_edit) + '''</p>\n <p>email: <a href=\"mailto:patdilenardo@gmail.com\" title=\"contatto email\">patdilenardo@gmail.com</a>; phone: +39 3389456208</p>\n\t\t </footer>\t\n\t\t\t<div class=\"clear\"></div>\n \t\t<script src=\"/js/hashgrid.js\"></script> \n\t\t<script src=\"/js/flux.min.js\" type=\"text/javascript\" charset=\"utf-8\"></script>\n\t\t<script type=\"text/javascript\" charset=\"utf-8\">\n\t\t\t$(function(){\n\t\t\t\tif(!flux.browser.supportsTransitions)\n\t\t\t\t\talert(\"Flux Slider requires a browser that supports CSS3 transitions\");\n\t\t\t\t\t\n\t\t\t\twindow.f = new flux.slider('#slider', {\n\t\t\t\t\tpagination: false,\n controls: true,\n captions: true,\n\t\t\t\t\ttransitions: [ 'dissolve' ],\n\t\t\t\t\tdelay: 5500\n\t\t\t\t});\n\t\t\t});\n\t\t</script> \n <script>\n (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){\n (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),\n m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)\n })(window,document,'script','//www.google-analytics.com/analytics.js','ga');\n ga('create', 'UA-6164762-14', 'nicelyventilated.it');\n ga('send', 'pageview');\n </script>\n\n<!--[if lt IE 7]><p class=chromeframe>Your browser is <em>ancient!</em> <a href=\"http://browsehappy.com/\">Upgrade to a different browser</a> or <a href=\"http://www.google.com/chromeframe/?redirect=true\">install Google Chrome Frame</a> to experience this site.</p><![endif]-->\n</body>\n</html>'''", "def portfolio(request):\n assert isinstance(request, HttpRequest)\n\n category = request.GET.get('category')\n if category == None:\n Images = models.Photo.objects.all()\n else :\n Images = models.Photo.objects.filter(category__name__contains=category)\n\n \n Categories = models.Category.objects.all()\n return render(\n request,\n 'app/Portfolio.html',\n {\n 'title':'Portfolio',\n 'message':'Here i will add my photo collections.',\n 'year':datetime.now().year,\n 'Images': Images,\n 'Categories' : Categories\n }\n )", "def index_if(request,id,date):\n ammount=get_article_ammount(date)\n article_url_list=['https://www.ptt.cc'+i[0] for i in url_dict[date]]\n article_url=article_url_list[int(id)-1]\n #pic_path\n path='worldcloud/'+date+'_'+id+'.png'\n\n #info for display\n index_info=id+'/'+str(ammount)+' ('+str(int((float(id)/ammount*100)))+'% )'\n \n # check if last/first\n if(int(id)==1):\n P_N=-1\n elif(int(id)==ammount):\n P_N=1\n else:\n P_N=0\n\n #dump article_url pass to Web\n url_list = simplejson.dumps(article_url_list)\n\n return render(request, 'WebPtt/Gossip_index.html',{'pic_path':path,'index_info':index_info,\\\n 'P_N':P_N,'date':date,'id':id,'startDate':startDate,'endDate':endDate ,\\\n 'article_url':article_url, 'url_list':url_list ,'ammount':ammount})", "def items(self):", "def info(self):", "def info(self):", "def video_single(request, vid):\n mongodb = get_db() \n [data, peaks] = video_single_query(vid)\n videos = video_info_query()\n # from edinsights.core.render import render\n return render(request, \"single-view.html\", {\n 'video_id': vid, 'data': data, 'videos': videos, 'peaks': peaks\n })", "def on_main(self, request):\n return self.render_template('main.html', ads=self.get_adds())", "def manage_info():", "def painel():\n return render_template('home/painel.html', title=\"Painel\")", "def detail(): \n\n # get contentid\n content_id = request.args.get('contentid')\n\n # get shortest places\n title, places = get_shortest(content_id)\n print(content_id)\n\n return render_template('detail.html', \n title=title,\n content_id=content_id,\n places=places, \n count=len(places))", "def __call__(request):", "def view(self):", "def home(request):\n cart = Cart(request)\n products = Product.objects.order_by('-created')[:3]\n last = Blog.objects.order_by('-posted')[:3]\n assert isinstance(request, HttpRequest)\n return render(\n request,\n 'app/index.html',\n {\n 'title':'СТО',\n 'cart': cart,\n 'last': last,\n 'products': products,\n 'year':datetime.now().year,\n }\n )", "def Gossip_index(request,id):\n #get today article infomations\n date=get_today()\n ammount=get_article_ammount(date)\n article_url_list=['https://www.ptt.cc'+i[0] for i in url_dict[date]]\n article_url=article_url_list[int(id)-1]\n\n #pic_path\n path='worldcloud/'+date+'_'+id+'.png'\n\n #info for display\n index_info=id+'/'+str(ammount)+' ('+str(int((float(id)/ammount*100)))+'% )'\n \n # check if last/first\n if(int(id)==1):\n P_N=-1\n elif(int(id)==ammount):\n P_N=1\n else:\n P_N=0\n\n #dump article_url pass to Web\n url_list = simplejson.dumps(article_url_list)\n\n return render(request, 'WebPtt/Gossip_index.html',{'pic_path':path,'index_info':index_info,\\\n 'P_N':P_N,'date':date,'id':id,'startDate':startDate,'endDate':endDate ,\\\n 'article_url':article_url, 'url_list':url_list ,'ammount':ammount})", "def render_context(self):\n menu_pages = self.config.dbs.pages.for_slot(\"menu\")\n footer_pages = self.config.dbs.pages.for_slot(\"footer\")\n\n # kinda complicated to just replace query parameter\n url2 = werkzeug.urls.URL(*werkzeug.urls.url_parse(self.request.url))\n query = url2.decode_query()\n\n # now replace it or add it \n query['__l'] = \"de\"\n de_query = werkzeug.urls.url_encode(query)\n query['__l'] = \"en\"\n en_query = werkzeug.urls.url_encode(query)\n\n # re-encode urls\n de_url = url2.replace(query = de_query).to_url()\n en_url = url2.replace(query = en_query).to_url()\n \n payload = dict(\n wf_map = self.wf_map,\n user = self.user,\n barcamp = self.barcamp,\n title = self.config.title,\n url = self.request.url,\n de_url = de_url,\n en_url = en_url,\n description = self.config.description,\n vpath = self.config.virtual_path,\n vhost = self.config.virtual_host,\n is_admin = self.is_admin,\n is_main_admin = self.is_main_admin,\n menu_pages = menu_pages,\n user_id = self.user_id,\n mapbox_access_token = self.config.mapbox_access_token,\n mapbox_map_id = self.config.mapbox_map_id,\n footer_pages = footer_pages,\n ga = self.config.ga,\n userview = partial(UserView, self.app),\n image_tag = self.get_image_tag,\n )\n if self.barcamp is not None:\n payload['slug'] = self.barcamp.slug\n if self.page is not None:\n payload['page_slug'] = self.page.slug\n return payload", "def base(request):\n\treturn render(request, 'sett_base.html', {})", "def index(request):\n return render(request, 'items/index.html', {\n 'globalvars': globalvars,\n 'fart': 'fart'\n })", "def capteur_info(id):\n capteur = get_capteur(id)\n return render_template(\n \"capteur-info.html\",\n capteur = capteur,\n title = capteur.get_name(),\n parterre = get_parterre(capteur.get_parterre()),\n mesure = get_typeMesure(capteur.get_typeMesure()))", "def show_homepage():\n\n pets = Pet.query.all()\n rando_pet = get_info_random_pet()\n\n name = rando_pet['petfinder']['pet']['name']['$t']\n age = rando_pet['petfinder']['pet']['age']['$t']\n image = rando_pet['petfinder']['pet']['media']['photos']['photo'][0]['$t']\n\n return render_template('homepage.html', pets=pets,\n name=name, age=age, image=image)", "def shop(request):\n return render(request, 'shop/shop.html')", "def render(self):\n self.increase_view_count()\n return render_to_string(self.template.template_file, {'advert':self})", "def navebarre_soluce(request):\r\n return render(request, 'menu/navebarre_soluce.html')", "def main_route():\n\n pets = Pet.query.all()\n #[pet1, pet2, pet3]\n return render_template(\"index.html\", pets=pets)", "def homepage():\n prods = db(db.product.prod_starred == True).select()\n return dict(\n prods = prods\n )", "def interface(request):\n factoryState=FactoryState.objects.get(id=1)\n panel=Panel.objects.get(id=1)\n points=Point.objects.all()\n programs=os.listdir(\"/home/japhy/solarPocketFactory/templates/programs\")\n musics=os.listdir(\"/home/japhy/solarPocketFactory/templates/music\")\n return render(request, 'interface.html', {'factoryState' : factoryState, 'panel' :panel, 'programs' : programs, 'musics': musics, 'points':points})", "def sth():", "def get_context(self):\n from scoop.editorial.models import Excerpt\n identifier = self.value\n # Vérifier que l'image existe\n try:\n excerpt = Excerpt.objects.get(name=identifier)\n except Excerpt.DoesNotExist:\n excerpt = None\n return {'excerpt': excerpt}", "def portals(request):\n\n context = {\n 'text': \"Boom!\",\n 'factions': {\n 'alliance': [\n {\n 'target': \"Stormwind\",\n 'location': \"Boralus\",\n 'coordinates': (70.11, 16.77),\n 'additional_information': \"Sanctum of the Sages\"\n },\n {\n 'target': \"Silithus\",\n 'location': \"Boralus\",\n 'coordinates': (69.77, 15.67),\n 'additional_information': \"Sanctum of the Sages\"\n },\n {\n 'target': \"Exodar\",\n 'location': \"Boralus\",\n 'coordinates': (70.37, 14.97),\n 'additional_information': \"Sanctum of the Sages\"\n },\n {\n 'target': \"Ironforge\",\n 'location': \"Boralus\",\n 'coordinates': (70.86, 15.4),\n 'additional_information': \"Sanctum of the Sages\"\n },\n {\n 'target': \"Hellfire Peninsula\",\n 'location': \"Stormwind\",\n 'coordinates': (49.93, 87.02),\n 'additional_information': \"Wizard's Sanctum\"\n },\n {\n 'target': \"Boralus\",\n 'location': \"Stormwind\",\n 'coordinates': (48.93, 86.44),\n 'additional_information': \"Wizard's Sanctum\"\n },\n {\n 'target': \"Blasted Lands\",\n 'location': \"Stormwind\",\n 'coordinates': (48.99, 87.32),\n 'additional_information': \"Wizard's Sanctum\"\n },\n {\n 'target': \"Uldum\",\n 'location': \"Stormwind\",\n 'coordinates': (75.24, 20.49),\n 'additional_information': \"The Eastern Earthshrine\"\n },\n {\n 'target': \"Hyjal\",\n 'location': \"Stormwind\",\n 'coordinates': (76.17, 18.70),\n 'additional_information': \"The Eastern Earthshrine\"\n },\n {\n 'target': \"Twilight Highlands\",\n 'location': \"Stormwind\",\n 'coordinates': (75.34, 16.43),\n 'additional_information': \"The Eastern Earthshrine\"\n },\n {\n 'target': \"Vashj'ir\",\n 'location': \"Stormwind\",\n 'coordinates': (73.28, 16.88),\n 'additional_information': \"The Eastern Earthshrine\"\n },\n {\n 'target': \"Tol Barad\",\n 'location': \"Stormwind\",\n 'coordinates': (73.22, 18.37),\n 'additional_information': \"The Eastern Earthshrine\"\n },\n {\n 'target': \"Deepholm\",\n 'location': \"Stormwind\",\n 'coordinates': (73.20, 19.64),\n 'additional_information': \"The Eastern Earthshrine\"\n },\n {\n 'target': \"Ashran\",\n 'location': \"Stormwind\",\n 'coordinates': (87.55, 35.23),\n 'additional_information': \"Stormwind Keep\"\n },\n {\n 'target': \"Dalaran (Broken Isles)\",\n 'location': \"Stormwind\",\n 'coordinates': (80.24, 34.84),\n 'additional_information': \"Petitioner's Chamber\"\n },\n {\n 'target': \"Darnassus\",\n 'location': \"Stormwind\",\n 'coordinates': (23.85, 56.06),\n 'additional_information': \"Stormwind Harbor\"\n },\n {\n 'target': \"Boralus\",\n 'location': \"Silithus\",\n 'coordinates': (41.49, 44.85),\n 'additional_information': \"Magni's Encampment\"\n },\n {\n 'target': \"Hellfire Peninsula\",\n 'location': \"Exodar\",\n 'coordinates': (48.14, 63.01),\n 'additional_information': \"The Vault of Lights\"\n },\n {\n 'target': \"Darnassus\",\n 'location': \"Exodar\",\n 'coordinates': (47.60, 62.13),\n 'additional_information': \"The Vault of Lights\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Hellfire Peninsula\",\n 'coordinates': (89.22, 51.00),\n 'additional_information': \"The Stair of Destiny 1\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Hellfire Peninsula\",\n 'coordinates': (88.62, 52.81),\n 'additional_information': \"The Stair of Destiny 2\"\n },\n {\n 'target': \"Hellfire Peninsula\",\n 'location': \"Ironforge\",\n 'coordinates': (27.23, 7.01),\n 'additional_information': \"Hall of Mysteries\"\n },\n {\n 'target': \"Paw'don Village\",\n 'location': \"Stormwind\",\n 'coordinates': (68.74, 17.13),\n 'additional_information': \"Stormwind City\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"The jade Forest\",\n 'coordinates': (46.23, 85.17),\n 'additional_information': \"Paw'don Village\"\n },\n {\n 'target': \"Dalaran (Northrend)\",\n 'location': \"Shrine of the Seven Stars\",\n 'coordinates': (61.65, 39.55),\n 'additional_information': \"The Imperial Exchange\"\n },\n {\n 'target': \"Shattrath (Outland)\",\n 'location': \"Shrine of the Seven Stars\",\n 'coordinates': (68.35, 52.93),\n 'additional_information': \"The Imperial Exchange\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Shrine of the Seven Stars\",\n 'coordinates': (71.62, 35.93),\n 'additional_information': \"The Imperial Exchange\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Dalaran (Northrend)\",\n 'coordinates': (40.08, 62.79),\n 'additional_information': \"The Silver Enclave\"\n },\n {\n 'target': \"Caverns of Time\",\n 'location': \"Dalaran (Northrend)\",\n 'coordinates': (25.49, 51.54),\n 'additional_information': \"The Violet Citadel\"\n },\n {\n 'target': \"The Purple Parlor\",\n 'location': \"Dalaran (Northrend)\",\n 'coordinates': (25.95, 44.18),\n 'additional_information': \"The Violet Citadel\"\n },\n {\n 'target': \"The Violet Citadel\",\n 'location': \"Dalaran (Northrend)\",\n 'coordinates': (22.33, 38.64),\n 'additional_information': \"The Violet Citadel (top)\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Mount Hyjal\",\n 'coordinates': (62.62, 23.12),\n 'additional_information': \"Nordrassil\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Deepholm\",\n 'coordinates': (48.53, 53.82),\n 'additional_information': \"Temple of Earth\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Twilight Highlands\",\n 'coordinates': (79.43, 77.84),\n 'additional_information': \"Highbank\"\n },\n {\n 'target': \"Caverns of Time\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (38.52, 79.66),\n 'additional_information': \"Chamber of the Guardian\"\n },\n {\n 'target': \"Shattrath (Outland)\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (35.53, 85.16),\n 'additional_information': \"Chamber of the Guardian\"\n },\n {\n 'target': \"Wyrmrest Temple\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (30.90, 84.26),\n 'additional_information': \"Chamber of the Guardian\"\n },\n {\n 'target': \"Dalaran Crater\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (28.99, 77.42),\n 'additional_information': \"Chamber of the Guardian\"\n },\n {\n 'target': \"Karazhan\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (32.06, 71.48),\n 'additional_information': \"Chamber of the Guardian\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (39.54, 63.20),\n 'additional_information': \"Greyfang Enclave\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (39.54, 63.20),\n 'additional_information': \"Greyfang Enclave\"\n },\n {\n 'target': \"Ironforge\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (38.87, 64.40),\n 'additional_information': \"Greyfang Enclave\"\n },\n {\n 'target': \"Darnassus\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (38.27, 65.51),\n 'additional_information': \"Greyfang Enclave\"\n },\n {\n 'target': \"Exodar\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (37.59, 66.75),\n 'additional_information': \"Greyfang Enclave\"\n },\n {\n 'target': \"Shrine of the Seven Stars\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (36.54, 67.06),\n 'additional_information': \"Greyfang Enclave\"\n },\n {\n 'target': \"Argus\",\n 'location': \"Dalaran (Broken Isles)\",\n 'coordinates': (74.27, 49.31),\n 'additional_information': \"Krasus' Landing\"\n },\n {\n 'target': \"Dalaran (Broken Isles)\",\n 'location': \"Argus\",\n 'coordinates': (43.39, 25.32),\n 'additional_information': \"The Vindicaar (lower level)\"\n },\n {\n 'target': \"Lion's Watch (Tanaan Jungle)\",\n 'location': \"Ashran\",\n 'coordinates': (36.39, 41.16),\n 'additional_information': \"Stormshield\"\n },\n {\n 'target': \"Stormshield (Ashran)\",\n 'location': \"Tanaan Jungle\",\n 'coordinates': (57.45, 60.50),\n 'additional_information': \"Lion's Watch\"\n },\n {\n 'target': \"Darnassus\",\n 'location': \"Ashran\",\n 'coordinates': (63.39, 64.26),\n 'additional_information': \"Stormshield\"\n },\n {\n 'target': \"Ironforge\",\n 'location': \"Ashran\",\n 'coordinates': (51.40, 50.87),\n 'additional_information': \"Stormshield\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Ashran\",\n 'coordinates': (60.80, 37.88),\n 'additional_information': \"Stormshield\"\n },\n {\n 'target': \"Gorgrond\",\n 'location': \"Pandaria\",\n 'coordinates': (64.89, 77.16),\n 'additional_information': \"Timeless Isle (underwater cave)\"\n },\n {\n 'target': \"Timeless Isle (Pandaria)\",\n 'location': \"Gorgrond\",\n 'coordinates': (74.02, 24.58),\n 'additional_information': \"Barrier Sea\"\n },\n {\n 'target': \"Isle of Quel'Danas\",\n 'location': \"Shattrath\",\n 'coordinates': (48.62, 41.99),\n 'additional_information': \"Terrace of Light\"\n },\n {\n 'target': \"Stormwind\",\n 'location': \"Shattrath\",\n 'coordinates': (57.17, 48.22),\n 'additional_information': \"Terrace of Light\"\n },\n ],\n 'horde': []\n }\n }\n\n # sort portals by \"target\"\n context['factions']['alliance'] = sorted(\n context['factions']['alliance'], key=lambda portal: portal['target']\n )\n context['factions']['horde'] = sorted(\n context['factions']['horde'], key=lambda portal: portal['target']\n )\n\n return render(request, 'general_website/portals.html', context)", "def donnée(request):\r\n\r\n particles, weather, wind, temperature, season, deaparture,\\\r\n day, rank, pressure, demonstration = function_donnee_pep()\r\n\r\n socio, plugs, erup, dollars,\\\r\n fire, fertilizer, periode, pole = function_donnee_pep1()\r\n\r\n\r\n return render(request, 'donnée.html', {'lyon':particles[0],\r\n 'paris':particles[1],\r\n 'marseille':particles[2],\r\n 'weather_lyon':weather[0],\r\n 'weather_marseille':weather[1],\r\n 'weather_paris':weather[2],\r\n 'wind_lyon':wind[0],\r\n 'wind_paris':wind[1],\r\n 'wind_marseille':wind[2],\r\n 'temperature_lyon':round(temperature[0]),\r\n 'temperature_paris':round(temperature[1]),\r\n 'temperature_marseille':round(temperature[2]),\r\n 'current_season':season,\r\n 'departure_lyon':deaparture[0],\r\n 'regular_day_lyon':deaparture[2],\r\n 'hour_point_lyon':deaparture[1],\r\n 'no_point_lyon':deaparture[3],\r\n 'departure_marseille':deaparture[0],\r\n 'hour_point_paris':deaparture[1],\r\n 'regular_day_marseille':deaparture[2],\r\n 'no_point_marseille':deaparture[3],\r\n 'departure_paris':deaparture[0],\r\n 'hour_point_marseille':deaparture[1],\r\n 'regular_day_paris':deaparture[2],\r\n 'no_point_paris':deaparture[3],\r\n 'weekend':day[0],\r\n 'week_day':day[1],\r\n 'ranking_lyon':rank[0],\r\n 'ranking_paris':rank[1],\r\n 'ranking_marseille':rank[2],\r\n 'pole_lyon':pole[0],\r\n 'pole_paris':pole[1],\r\n 'pole_marseille':pole[2],\r\n 'pressure_lyon':pressure[0],\r\n 'pressure_paris':pressure[1],\r\n 'pressure_marseille':pressure[2],\r\n 'demonstration_lyon':demonstration[0],\r\n 'demonstration_paris':demonstration[1],\r\n 'demonstration_marseille':demonstration[2],\r\n 'socio_lyon':socio[0],\r\n 'socio_marseille':socio[2],\r\n 'socio_paris':socio[1],\r\n 'plugs_lyon':plugs[0],\r\n 'plugs_paris':plugs[1],\r\n 'eruption':erup,\r\n 'diesel':dollars[0],\r\n 'dollars':dollars[1],\r\n 'fire_lyon':fire[0],\r\n 'fire_marseille':fire[1],\r\n 'fire_paris':fire[2],\r\n 'fertilizer':fertilizer,\r\n 'periode':periode[0],\r\n 'po_lyon':periode[1],\r\n 'po_paris':periode[2],\r\n 'po_marseille':periode[3]})\r\n\r\n\r\n return render(request, 'donnée.html')", "def about():\n return render_template('about.html', name=\"COMP3161\")", "def plans():", "def index():\n inventory = db.execute(\"SELECT symbol,quantity FROM inventory WHERE userid = :uid\", uid=session[\"user_id\"])\n cash = float(db.execute(\"SELECT cash FROM users WHERE id = :userid\", userid=session[\"user_id\"])[0][\"cash\"])\n total = cash\n for i in inventory:\n stock = lookup(i[\"symbol\"])\n i[\"price\"] = stock[\"price\"]\n i[\"name\"] = stock[\"name\"]\n i[\"total\"] = usd(stock[\"price\"] * i[\"quantity\"])\n total += stock[\"price\"] * i[\"quantity\"]\n return render_template(\"index.html\", context={\"inventory\":inventory,\"total\":usd(total),\"cash\":usd(cash)})", "def liquid_with_loans(self):\n from Game.models import LoanOffer\n loan_offers = LoanOffer.objects.filter(lender=self)\n return self.liquid - sum(l.offered for l in loan_offers)", "def products():\n\n\treturn render_template(\"products.html\")", "def recipe(request, recipe_slug):\n current_recipe = get_object_or_404(Recipe, pk=recipe_slug)\n ingredients = IngredientDetails.objects.filter(recipe=current_recipe)\n\n ingredients_list = []\n\n price = 0\n for ingredient in ingredients:\n ingredients_list.append(str(ingredient.ingredient) + \" - \" + str(ingredient.amount_name))\n price += ingredient.ingredient.price * ingredient.amount\n\n print(current_recipe.image.url)\n\n price = int(price)\n\n context = {\n 'slug': recipe_slug,\n 'author': current_recipe.author,\n 'title': current_recipe.title,\n 'published_date': current_recipe.published_date,\n 'content': current_recipe.content,\n 'image_url': current_recipe.image.url,\n 'ingredients': ingredients_list,\n 'price': price,\n 'up_votes': current_recipe.up_votes,\n 'down_votes': current_recipe.down_votes,\n }\n\n return render(request, 'recipes_detail.html', context)", "def release():\n\tif not request.vars.id:\n\t\tredirect(URL('index'))\n\tid = request.vars.id\n\treleasename = db.executesql(\"select m1.name from release_name as m1, release as m2 where m1.id = m2.name and m2.id = \"+id+\";\")\n\ttracklist = db.executesql(\"select m4.id, m5.name, m4.position, m4.length from release m1,medium m2,tracklist m3,track m4,track_name m5 where m5.id = m4.name and m4.tracklist = m3.id and m3.id = m2.tracklist and m2.release = m1.id and m1.id = \"+id+\" order by m4.position;\")\n\turls = db.executesql(\"select m2.url from l_release_url m1, url m2 where m1.entity1 = m2.id and m1.entity0 = \"+id+\";\")\n\tmbid = db.executesql(\"select m1.gid from release m1 where m1.id = \"+id+\";\")\n\ttry:\n\t\tproxy = urllib2.ProxyHandler({'http': 'http://cs5090240:phone01202767129@10.10.78.62:3128'})\n\t\tauthentication = urllib2.HTTPBasicAuthHandler()\n\t\topener = urllib2.build_opener(proxy, authentication, urllib2.HTTPHandler)\n\t\turllib2.install_opener(opener)\n\t\tdestination = 'http://www.coverartarchive.org/release/'+mbid[0][0]\n\t\t#dbg.set_trace()\n\t\treq = urllib2.Request(url=destination,data=\"\")\n\t\tf = urllib2.urlopen(req)\n\t\t#dbg.set_trace()\n\t\tdata =json.load(f)\n\t\t#dbg.set_trace()\n\t\tcoverarturl = data['images'][0]['image']\n\texcept:\n\t\tcoverarturl = URL('static','images/no_image_found.jpg')\n\tlinks = []\n\twiki = \"\"\n\tfor url in urls:\n\t\tif \"wikipedia\" in url[0]:\n\t\t\twiki = url[0]\n\t\telse:\n\t\t\tlinks.append(url[0])\n\ttracklist1 = []\n\tfor row in tracklist:\n\t\trow1 = list(row)\n\t\tif (row1[3]):\n\t\t\td = datetime.datetime.fromtimestamp(int(row1[3])//1000)\n\t\t\trow1[3] = str(d.hour)+\":\"+str(d.minute)\n\t\telse:\n\t\t\trow1[3] = 'N/A'\n\t\ttracklist1.append(row1)\n\tif auth.user:\n\t\tplaylists = db(db.playlists.user_id == auth.user.id).select()\n\telse:\n\t\tplaylists = None\n\treturn dict(tracklist=tracklist1, releasename=releasename, wiki=wiki, links=links, playlists=playlists, coverarturl=coverarturl)", "def specialoccasion(request):\n products = Product.objects.all()\n return render(request, \"specialoccasion.html\", {\"products\": products})", "def getItems(self): \n if self.itemCount > 0:\n \n site = getSite()\n \n \n # Make string path relative to the site root\n # E.g. string path \"news\" becomes \"/yoursiteid/news\"\n site_path = site.getPhysicalPath();\n \n path = \"/\".join(site_path) + \"/\" + self.path \n \n types = [self.itemPortalType]\n \n items = []\n \n #if self.itemPortalType2 != None:\n # types.append(self.itemPortalType2) \n \n #print \"Querying by:\" + type + \" \" + path\n content_by_type = self.context.portal_catalog(path={ \"query\": path, \"depth\" :9 }, \n portal_type=self.itemPortalType, \n sort_on=\"created\", \n sort_order=\"reverse\")[0:self.itemCount]\n\n content_by_type = list(content_by_type)\n \n if self.itemPortalType2 != None:\n content_by_type2 = self.context.portal_catalog(path={ \"query\": path, \"depth\" :9 }, \n portal_type=self.itemPortalType2, \n sort_on=\"created\", \n sort_order=\"reverse\")[0:self.itemCount]\n\n content_by_type += list(content_by_type2)\n\n \n items += [ brain.getObject() for brain in content_by_type ]\n else:\n items = []\n \n #if self.title == \"Daily deals\":\n # import pdb ; pdb.set_trace()\n \n # XXX: custom hack for deals\n def is_expired_deal(i):\n \"\"\"\n \"\"\"\n if hasattr(i, \"validUntil\"):\n now = datetime.datetime.utcnow()\n if now > i.validUntil:\n return True\n \n return False\n \n items = [ i for i in items if not is_expired_deal(i) ]\n \n return items", "def dvs(request):\n cart = Cart(request)\n assert isinstance(request, HttpRequest)\n return render(\n request,\n 'app/dvs.html',\n {\n 'title':'Капитальный ремонт ДВС',\n 'cart': cart,\n 'message':'Что такое капитальный ремонт двигателя автомобиля?',\n 'year':datetime.now().year,\n }\n )", "def navebarre_info(request):\r\n return render(request, 'menu/navebarre_info.html')", "def index():\n # ip1= jsonify({'ip': request.remote_addr})\n # xx = request.remote_addr\n # gg = request.remote_user\n ip2=request.environ.get('HTTP_X_REAL_IP', request.remote_addr) \n # ip = requests.get('https://api.ipify.org').text\n\n # if request.environ.get('HTTP_X_FORWARDED_FOR') is None:\n # print('22',request.environ['REMOTE_ADDR'])\n # else:\n # print('11',request.environ['HTTP_X_FORWARDED_FOR']) # if behind a proxy\n\n # return f'request.remote_addr {xx} request.remote_user {gg} request.environ.get {ip2} requests.get(https: {ip} '\n \n\n # query = '103.194.67.94'\n # query = ip\n # url = f\"http://ip-api.com/json/{query}\"\n # payload = \"{\\\"ips\\\": [\\\"1.1.1.1\\\", \\\"1.2.3.4\\\"]}\"\n # response_ip = requests.request(\"POST\", url, data=payload)\n # y=response_ip.json()\n\n \"\"\"get weathrer condition \"\"\"\n\n key = '53d7f1dde8564a69838135859212907'\n q = ip2\n url = f'http://api.weatherapi.com/v1/current.json?key={key}&q={q}&aqi=no'\n response = requests.request(\"POST\", url)\n\n weather_json = response.json()\n current_temp = weather_json[\"current\"][\"temp_c\"]\n print (current_temp)\n temps = Temp.query.all()\n for temp in temps:\n if temp.mintemp < current_temp < temp.maxtemp:\n condition = temp\n photo = random.choice(condition.photo)\n return render_template('main.html', weather_json=weather_json, photo=photo)\n abort (404)\n return render_template('main.html',weather_json=weather_json, photo =photo)", "def __local_sc(soup):\n return __get_local_g1_news(soup)", "def get():", "def get():", "def get(self):\n items = self.request.get_all(\"food\")\n # pases the value in item variable into the jinja2 template\n self.render(\"shopping_list.html\", items=items)", "def display():\n\n #still needs some cleanup on imagry and what the site is about. \n\n return render_template(\"index.html\")", "def basket(request):\n return {'basket': Basket(request)}", "def information(request):\n print('Hello, world')\n context = {\n\n }\n\n return render(request, 'information/information.html', context)", "def smartadata():\n\n\treturn render_template('smartadata.html')", "def fase1():\n\n pagesClassIDs = {\n \"fase1\": {\n \"bannertitle\": [],\n \"subtitle\": [],\n \"firstText\": [],\n \"secondText\": []\n }\n }\n for key in pagesClassIDs[\"fase1\"].keys():\n pagesClassIDs[\"fase1\"][key].append(\n str(\n pageTexts.query.filter_by(pageID=key,\n htmlName=\"fase1\").first()))\n\n spat_aspects = Spat_aspect.query.all()\n spat_aspectsList = [\n spat_aspect.__dict__[\"name\"] for spat_aspect in spat_aspects\n ]\n\n temp_aspects = Temp_aspect.query.all()\n temp_aspectsList = [\n temp_aspect.__dict__[\"name\"] for temp_aspect in temp_aspects\n ]\n\n nodes = Node.query.all()\n nodesList = [node.__dict__ for node in nodes]\n for nd in nodesList:\n del nd['_sa_instance_state']\n\n factorDict = {}\n for node in nodesList:\n spat_asp = int(node[\"spat_aspect_id\"]) - 1\n temp_asp = int(node[\"temp_aspect_id\"]) - 1\n factorDict.setdefault(spat_aspectsList[spat_asp], {})\n factorDict[spat_aspectsList[spat_asp]].setdefault(\n temp_aspectsList[temp_asp], [])\n factorDict[spat_aspectsList[spat_asp]][\n temp_aspectsList[temp_asp]].append(node[\"factor\"])\n\n print(nodesList)\n print(factorDict)\n\n return render_template(\n 'home/fase1.html',\n factorDict=factorDict,\n nodes=nodesList,\n spat_aspects=spat_aspectsList,\n temp_aspects=temp_aspectsList,\n pageDicts=pagesClassIDs,\n title=\"fase1\")", "def __local_rs(soup):\n return __get_local_g1_news(soup)", "def __local_rs(soup):\n return __get_local_g1_news(soup)", "def comicmain(request, page_title=\"\"):\n challenge_short_name = settings.MAIN_PROJECT_NAME\n\n try:\n site = Challenge.objects.get(short_name__iexact=challenge_short_name)\n except Challenge.DoesNotExist:\n link = reverse(\"challenges:create\")\n link = link + \"?short_name=%s\" % challenge_short_name\n link_html = create_HTML_a(\n link, \"Create project '%s'\" % challenge_short_name\n )\n html = \"\"\"I'm trying to show the first page for main project '%s' here,\n but '%s' does not exist. %s.\"\"\" % (\n challenge_short_name,\n challenge_short_name,\n link_html,\n )\n page = create_temp_page(title=\"no_pages_found\", html=html)\n\n return render(\n request,\n \"temppage.html\",\n {\"site\": page.challenge, \"currentpage\": page},\n )\n\n pages = site.page_set.all()\n\n if len(pages) == 0:\n link = reverse(\n \"pages:list\", kwargs={\"challenge_short_name\": challenge_short_name}\n )\n link_html = create_HTML_a(link, \"admin interface\")\n html = \"\"\"I'm trying to show the first page for main project '%s' here,\n but '%s' contains no pages. Please add\n some in the %s.\"\"\" % (\n challenge_short_name,\n challenge_short_name,\n link_html,\n )\n page = create_temp_page(title=\"no_pages_found\", html=html)\n\n return render(\n request,\n \"temppage.html\",\n {\"site\": page.challenge, \"currentpage\": page},\n )\n\n if page_title:\n pages = [p for p in pages if p.title.lower() == page_title.lower()]\n\n if len(pages) != 1:\n raise Http404(\n f\"{len(pages)} pages with title {page_title} were found for {site}\"\n )\n\n page = pages[0]\n page.html = renderTags(request, page)\n\n return render(request, \"page.html\", {\"currentpage\": page})", "def index(request):\n products = Product.objects.all()\n highlights = Original.objects.filter(status='h')\n context = {\n \"index_page\": \"active\",\n \"products\": products,\n \"highlights\": highlights,\n \"title\": \"Home\"\n }\n return render(request, \"index.html\", context)", "def assets():", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def Site(self) -> str:", "def substantiate():", "def vp():\n if g.active.is_empty:\n txt = F('advise search') if g.model.is_empty else F('advise add')\n g.message = F('pl empty') + \" \" + txt\n\n else:\n g.browse_mode = \"normal\"\n g.model.songs = g.active.songs\n g.message = F('current pl')\n\n g.content = generate_songlist_display(zeromsg=g.message)", "def T(request):\n\treturn all_templates[request.param]" ]
[ "0.5671805", "0.5175566", "0.5175554", "0.516426", "0.50640386", "0.50614387", "0.50591075", "0.49937275", "0.4983017", "0.49820462", "0.49783403", "0.49738714", "0.49507678", "0.4948753", "0.4924024", "0.48813003", "0.4849777", "0.48445272", "0.483933", "0.48176083", "0.48085022", "0.47767058", "0.47729102", "0.4763828", "0.475347", "0.47512197", "0.4736495", "0.47360304", "0.47351485", "0.47311595", "0.47305107", "0.47286752", "0.47099444", "0.470444", "0.47022855", "0.47001556", "0.46963298", "0.46845356", "0.46820825", "0.46716225", "0.46716225", "0.4668829", "0.46600708", "0.46553743", "0.46537516", "0.46418437", "0.4630312", "0.46291485", "0.46241012", "0.4623188", "0.46196312", "0.46195087", "0.46193397", "0.46048525", "0.4604194", "0.46041816", "0.46030444", "0.459902", "0.45957255", "0.45873144", "0.45847282", "0.45838684", "0.4582945", "0.4581141", "0.45767185", "0.45750123", "0.45746046", "0.45738328", "0.45716617", "0.4568748", "0.4568119", "0.45656618", "0.45626608", "0.45616934", "0.45609933", "0.4558296", "0.45572293", "0.45554438", "0.4555158", "0.4555158", "0.45534432", "0.4547427", "0.4546566", "0.45447776", "0.45446545", "0.45407233", "0.45403594", "0.45403594", "0.453835", "0.45340276", "0.4532909", "0.45327595", "0.45327595", "0.45327595", "0.45327595", "0.45327595", "0.45327595", "0.4531033", "0.452682", "0.4526453", "0.452251" ]
0.0
-1
Telt de lijst op.
def som(getallenlijst): total = sum(getallenlijst) return total
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _ltz(self):\n raise NotImplementedError(\"_ltz is not implemented\")", "def LDLT(self):\n\t\tpass", "def l_un_degenerate(self):\n self.right = self.tmp", "def l_degenerate(self):\n self.tmp = self.right\n self.right = self.left", "def tilt(self) -> int:", "def dl2(vec):\n return vec", "def LD(x, y, lodict={}):\n return needleman_wunsch(x, y, lodict=lodict, gop=-1, gep=-1)", "def le(self, x, y):", "def test_un_shift(self):\n l_list = DoubleLinkedList()\n l_list.push(15)\n l_list.push(150)\n l_list.push(1)\n l_list.un_shift(111)\n self.assertEqual(l_list.get_list()[0].get_elem(), 111)", "def _left(self, j):\n return 2 * j + 1", "def _left(self, j):\n return 2 * j + 1", "def LZp(self,i):\n\t\tv=self.deltai_plus(i)\n\t\treturn v[0][0]", "def tl(lst):\n return lst[1:] if len(lst) > 1 else None", "def lt_inplace(a,b):", "def LZm(self,i):\n\t\tv=self.deltai_minus(i)\n\t\treturn v[0][0]", "def test_un_shift_when_empty(self):\n l_list = DoubleLinkedList()\n l_list.un_shift(111)\n self.assertEqual(l_list.get_list()[0].get_elem(), 111)", "def SLE_DL(t, y):\n DyFun = SLEfun(y,C)\n Dygrand = tf.gradients(y, t)[0]\n return Dygrand - DyFun", "def lt(self, y):\n return (self - y)._ltz()", "def left(t):\r\n return t(1)", "def r_un_degenerate(self):\n self.left = self.tmp", "def jmatswap(ind: int):\n return _jmswap[ind - 1]", "def sign(self):\n return 1 - 2 * self._ltz()", "def __le__(self, *args):\n return _ida_frame.stkpnt_t___le__(self, *args)", "def revise():", "def retract(self, la):\n if la == []:\n return self([])\n if la[0] <= self.k:\n return self(la)\n if self.t == 1:\n return self.zero()\n else:\n kHLP = self._kBoundedRing.kHallLittlewoodP()\n return self(kHLP._m_to_kHLP_on_basis(la))", "def __lt__(*args, **kwargs):\n return _uhd_swig.__lt__(*args, **kwargs)", "def cast_ltree(s, cur):\n if s is not None:\n return Ltree(s)", "def _tarjan_body(ctx, it, v):\n for w in it:\n if w not in ctx.index:\n ctx.T.append((it, True, v, w))\n _tarjan_head(ctx, w)\n return\n if w in ctx.S_set:\n ctx.lowlink[v] = min(ctx.lowlink[v], ctx.index[w])\n if ctx.lowlink[v] == ctx.index[v]:\n scc = []\n w = None\n while v != w:\n w = ctx.S.pop()\n scc.append(w)\n ctx.S_set.remove(w)\n ctx.ret.append(scc)", "def den_evolve(self, delt, txp, src):\n self.ne += (-txp.dfluxe + src.se)*delt\n self.ni += (-txp.dfluxi + src.si)*delt", "def __lshift__(self,g):\r\n\t\t\r\n\t\treturn self.substitute(*g)", "def Findlt(l,sp,rhs):\n m = sp.M(l)\n return (m / l**3) - rhs", "def unify_walk(vil, l, U):\r\n v = vil.variable\r\n ov = OrVariable(\"?\", l)\r\n return unify_walk(v, ov, U)", "def lower_twist(lo_arm_ik_jnt, wrist_ik_jnt, lo_arm_jnt, lo_arm_twist_jnts, wrist_jnt=None):\n\n # Create a group that does not rotate and parent under the ik arm parent (shoulder)\n stable_reader_grp = utils.create_node('transform', n=lo_arm_ik_jnt+'_stable_reader', p=lo_arm_ik_jnt)\n\n # Create a grp that will rotate with ik arm\n twist_reader_grp = utils.create_node('transform', n=lo_arm_ik_jnt+'_twist_reader', p=lo_arm_ik_jnt)\n mc.addAttr(twist_reader_grp, ln='twist', k=1)\n\n mc.delete(mc.pointConstraint(wrist_ik_jnt, twist_reader_grp))\n mc.parent(twist_reader_grp, wrist_ik_jnt)\n\n # Now set up mult matrix and decomp nodes to extract the twist between the two nodes\n mult_mtx = mc.createNode('multMatrix')\n decomp_mtx = mc.createNode('decomposeMatrix')\n quat_to_euler = mc.createNode('quatToEuler')\n\n mc.connectAttr(stable_reader_grp+'.worldInverseMatrix', mult_mtx+'.matrixIn[1]')\n mc.connectAttr(twist_reader_grp+'.worldMatrix', mult_mtx+'.matrixIn[0]')\n mc.connectAttr(mult_mtx+'.matrixSum', decomp_mtx+'.inputMatrix')\n mc.connectAttr(decomp_mtx+'.outputQuatX', quat_to_euler+'.inputQuatX')\n mc.connectAttr(decomp_mtx+'.outputQuatW', quat_to_euler+'.inputQuatW')\n\n utils.connect_negative(quat_to_euler+'.outputRotateX', twist_reader_grp+'.twist')\n\n # Connect joints\n mc.parentConstraint(lo_arm_ik_jnt, lo_arm_jnt, mo=1)\n if wrist_jnt:\n mc.parentConstraint(wrist_ik_jnt, wrist_jnt, mo=1)\n\n div = 1.0 / (len(lo_arm_twist_jnts))\n\n mdl = mc.createNode('multDoubleLinear')\n mc.setAttr(mdl+'.input1', div)\n mc.connectAttr(quat_to_euler+'.outputRotateX', mdl+'.input2')\n\n for i, joint in enumerate(lo_arm_twist_jnts):\n mc.connectAttr(mdl+'.output', joint+'.rx')", "def _poputil_remap_deduce_layer_backward(op, grads):\n return grads", "def le_inplace(a,b):", "def decr(j, ps):\n return ps[:j] + (ps[j] - 1,) + ps[j+1:]", "def sereflect(Bi):\n Bo = serot(Bi, 180)\n return Bo", "def deconstruct_head(self):\n ret = []\n for ii in range(len(self.__data)):\n op = self.__data[ii].deconstruct()\n if not op:\n return (ret, self.__data[ii:])\n ret += op\n return (ret, [])", "def lindblad(C):\n if is_scalar(C):\n return ZeroSuperOperator\n return SPre(C) * SPost(C.adjoint()) - (sympyOne / 2) * anti_commutator(\n C.adjoint() * C\n )", "def reverse_difference():", "def deltas(L):\n return map(sub, tuple(L)[1:], L)", "def right(t):\r\n return t(2)", "def _UnaryOp(self, t):\n self.write(\"(\")\n self.write(self.unop[t.op.__class__.__name__])\n self.dispatch(t.operand)\n self.write(\")\")", "def vj(vj, pol, ant) :\n s.vj(pol, vj, ant)", "def tarjan(g):\n ctx = TarjanContext(\n g=g,\n S=[],\n S_set=set(),\n index={},\n lowlink={},\n T=[],\n ret=[])\n main_iter = iter(g)\n while True:\n try:\n v = next(main_iter)\n except StopIteration:\n return ctx.ret\n if v not in ctx.index:\n _tarjan_head(ctx, v)\n while ctx.T:\n it, inside, v, w = ctx.T.pop()\n if inside:\n ctx.lowlink[v] = min(ctx.lowlink[w],\n ctx.lowlink[v])\n _tarjan_body(ctx, it, v)", "def invert_L2_wdls():\n print()", "def lro(self) -> global___Snippet.Lro:", "def __rsub__(self, tensor):\n return -self + tensor", "def ll_j(self, j: Union[int, np.ndarray]) -> Union[np.ndarray, dask.array.core.Array]:\n # Make sure that dimensionality of sliced array is kept:\n if isinstance(j, int) or isinstance(j, np.int32) or isinstance(j, np.int64):\n j = np.full(1, j)\n loc_j = self.location_j(j=j)\n log_loc = np.log(loc_j)\n x_times_log_loc = self.x[:, j] * log_loc\n log_x_factorial = np.log(scipy.special.gammaln(self.x[:, j] + np.ones_like(self.x[:, j])))\n ll = x_times_log_loc - loc_j - log_x_factorial\n return np.asarray(self.np_clip_param(ll, \"ll\"))", "def _tarjan_head(ctx, v):\n ctx.index[v] = len(ctx.index)\n ctx.lowlink[v] = ctx.index[v]\n ctx.S.append(v)\n ctx.S_set.add(v)\n it = iter(ctx.g.get(v, ()))\n ctx.T.append((it, False, v, None))", "def lagr(self, x):\n return", "def lap_mat(self):", "def correct_subscript(sv, tree):\r\n if not tree or tree[0]!=Special: return tree\r\n O, A, B=tree # subscripting ( e.g. cumul(L)(0) ) \r\n O=tree_join(A) # make operator from first term \r\n A, B = B, None # make subscript from second term\r\n return O, A, B", "def sprout_leaves(t, vals):", "def lltnum(self,):\n m = self.m\n n = self.n\n diag = self.diag\n perm = self.perm\n AAt = self.AAt\n kAAt = self.kAAt\n iAAt = self.iAAt\n mark = self.mark\n self.denwin\n\n m2 = m+n\n #/*------------------------------------------------------+\n #| initialize constants */\n\n temp = np.zeros(m2)\n first = np.zeros(m2, dtype=np.int)\n link = np.empty(m2, dtype=np.int)\n for i in range(m2):\n link[i] = -1\n\n maxdiag=0.0\n for i in range(m2):\n if abs(diag[i]) > maxdiag:\n maxdiag = abs(diag[i])\n\n self.ndep=0\n\n #/*------------------------------------------------------+\n #| begin main loop - this code is taken from George and |\n #| Liu's book, pg. 155, modified to do LDLt instead |\n #| of LLt factorization. */\n\n for i in range(m2):\n diagi = diag[i]\n sgn_diagi = -1 if perm[i] < n else 1\n j = link[i]\n while j != -1:\n newj = link[j]\n k = first[j]\n lij = AAt[k]\n lij_dj = lij*diag[j]\n diagi -= lij*lij_dj\n k_bgn = k+1\n k_end = kAAt[j+1]\n if k_bgn < k_end:\n first[j] = k_bgn\n row = iAAt[k_bgn]\n link[j] = link[row]\n link[row] = j\n if j < self.denwin:\n for kk in range(k_bgn, k_end):\n temp[iAAt[kk]] += lij_dj*AAt[kk]\n else:\n ptr = row\n for kk in range(k_bgn, k_end):\n temp[ptr] += lij_dj*AAt[kk]\n ptr+=1\n\n j=newj\n\n k_bgn = kAAt[i]\n k_end = kAAt[i+1]\n for kk in range(k_bgn, k_end):\n row = iAAt[kk]\n AAt[kk] -= temp[row]\n\n if abs(diagi) <= self.epsnum*maxdiag or mark[i] == False:\n\n #if (sgn_diagi*diagi <= epsnum*maxdiag || mark[i] == FALSE)\n\n self.ndep+=1\n maxoffdiag = 0.0\n for kk in range(k_bgn, k_end):\n maxoffdiag = max( maxoffdiag, abs( AAt[kk] ) )\n\n if maxoffdiag < 1.0e+6*self._EPS:\n mark[i] = False\n else:\n diagi = sgn_diagi * self._EPS\n\n diag[i] = diagi\n if k_bgn < k_end:\n first[i] = k_bgn\n row = iAAt[k_bgn]\n link[i] = link[row]\n link[row] = i\n for kk in range(k_bgn, k_end):\n row = iAAt[kk]\n if mark[i]:\n AAt[kk] /= diagi\n else:\n AAt[kk] = 0.0\n\n temp[row] = 0.0\n\n del(link)\n del(first)\n del(temp)", "def fL():\n for n in b.allNodes():\n n.autoplace()", "def __le__(self, *args):\n return _ida_frame.stkpnts_t___le__(self, *args)", "def ttd_l_func(self):\n i2 = self.inl[1].to_flow()\n o1 = self.outl[0].to_flow()\n return (self.ttd_l.val - T_mix_ph(o1, T0=self.outl[0].T.val_SI) +\n T_mix_ph(i2, T0=self.inl[1].T.val_SI))", "def f(self,un,tn):\n return -self.a(tn)*un + self.b(tn)", "def reverse_distribute(node: NodeT) -> NodeT:\n\n def visitor(node: NodeT, left_distribute: bool) -> NodeT:\n \"\"\"Apply left- or right-distributive property in reverse, if possible\n\n Args:\n node: ir.Node to process.\n left_distribute: Whether to apply *left*-distributive property.\n\n Returns:\n Processed node.\n \"\"\"\n if isinstance(node, ir.AddSub):\n items = OrderedDict() # type: Dict[ir.Node, List[Tuple[str, ir.Node]]]\n new_operators = []\n new_operands = []\n for operator, operand in zip(('+',) + getattr(node, 'operator'),\n getattr(node, 'operand')):\n if (operator == '+' and isinstance(operand, ir.MulDiv) and\n getattr(operand, 'operator') == ('*',)):\n if left_distribute:\n coeff, item = getattr(operand, 'operand')\n else:\n item, coeff = getattr(operand, 'operand')\n items.setdefault(coeff, []).append((operator, item))\n else:\n new_operators.append(operator)\n new_operands.append(operand)\n for coeff, item in items.items():\n operator, operand = zip(*item)\n assert operator[0] == '+'\n new_operators.append(operator[0])\n if len(operand) > 1:\n new_item = ir.AddSub(operator=operator[1:], operand=operand)\n else:\n new_item = operand[0]\n if left_distribute:\n children = coeff, new_item\n else:\n children = new_item, coeff\n new_operands.append(ir.MulDiv(operator=('*',), operand=children))\n if len(new_operands) > 1:\n assert new_operators[0] == '+'\n new_node = ir.AddSub(operator=tuple(new_operators[1:]),\n operand=tuple(new_operands))\n if new_node != node:\n return new_node # type: ignore\n elif new_operands and new_operands[0] != node:\n return new_operands[0]\n return node\n\n return node.visit(visitor, True).visit(visitor, False)", "def linear_backward(dZ, cache):\n pass", "def t_fleche(t):\r\n ev=donne_evenement()\r\n type_ev=type_evenement(ev)\r\n if type_ev==\"Deplacement\":\r\n t=clic_x(ev)\r\n else:\r\n t=t\r\n return t", "def kl(self, other, xs, reversesd=False, **kwargs):\n raise NotImplementedError", "def rmdup2(sll):\n start = sll.head\n while start:\n node = start\n while node and node.next_:\n if node.next_.payload == start.payload:\n node.next_ = node.next_.next_\n node = node.next_\n start = start.next_\n return sll", "def tv(i, r=None):\n if r is None:\n return i[0]\n else:\n return Operation.tv(r[i])", "def le_func(rn, g, h):\n le = np.copy(rn)\n le -= g\n le -= h\n return le", "def mull_stpt_oaeTrans_iw(action_raw, stptLmt, ob_this_raw):\n OAT_RAW_IDX = 0;\n SWT_RAW_IDX = 7;\n oat_cur = ob_this_raw[OAT_RAW_IDX]\n swt_ssp_cur = ob_this_raw[SWT_RAW_IDX];\n # Transfer the mull op from 1/0 to OAE setpoint\n if action_raw[0] == 0.0:\n res_oae_ssp = oat_cur - 5.0; # If OAE setpoint < next step OAT, mull op is off\n else:\n res_oae_ssp = oat_cur + 5.0; # If OAE setpoint > next step OAT, mull op is on\n # Get the next step SWT ssp\n res_swt_ssp = swt_ssp_cur + action_raw[1];\n res_oae_ssp = max(min(res_oae_ssp, stptLmt[0][1]), stptLmt[0][0]);\n res_swt_ssp = max(min(res_swt_ssp, stptLmt[1][1]), stptLmt[1][0]);\n return ((res_oae_ssp, res_swt_ssp),\n (action_raw[0], res_swt_ssp - swt_ssp_cur))", "def modified_gram_schmidt_step_arnoldi(j, vals):\n vector, krylov_vectors, n, H = vals\n v = krylov_vectors[j, :]\n h = jax.numpy.vdot(v, vector)\n H = jax.ops.index_update(H, jax.ops.index[j, n], h)\n vector = vector - h * jax.numpy.reshape(v, vector.shape)\n return [vector, krylov_vectors, n, H]", "def translate_leet(phrase):", "def d_j(self,q):\n dj = q.shift(y=-1) - q # works with chunks too\n return dj", "def forward(self, graph, feat, lambda_max=None):\n def unnLaplacian(feat, D_invsqrt, graph):\n \n \"\"\" Operation Feat * D^-1/2 A D^-1/2 但是如果写成矩阵乘法:D^-1/2 A D^-1/2 Feat\"\"\"\n graph.ndata['h'] = feat * D_invsqrt\n graph.update_all(fn.copy_u('h', 'm'), fn.sum('m', 'h'))\n return graph.ndata.pop('h') * D_invsqrt\n\n with graph.local_scope():\n #一点修改,这是原来的代码\n if self.is_mnist:\n graph.update_all(fn.copy_edge('v','m'), fn.sum('m','h')) # 'v'与coordinate.py有关\n D_invsqrt = th.pow(graph.ndata.pop('h').float().clamp(min=1), -0.5).unsqueeze(-1).to(feat.device)\n \n #D_invsqrt = th.pow(graph.in_degrees().float().clamp(\n # min=1), -0.5).unsqueeze(-1).to(feat.device)\n #print(\"in_degree : \",graph.in_degrees().shape)\n else:\n D_invsqrt = th.pow(graph.in_degrees().float().clamp(min=1), -0.5).unsqueeze(-1).to(feat.device)\n #print(\"D_invsqrt : \",D_invsqrt.shape)\n #print(\"ndata : \",graph.ndata['h'].shape)\n if lambda_max is None:\n try:\n lambda_max = laplacian_lambda_max(graph)\n except BaseException:\n # if the largest eigenvalue is not found\n dgl_warning(\n \"Largest eigonvalue not found, using default value 2 for lambda_max\",\n RuntimeWarning)\n lambda_max = th.Tensor(2).to(feat.device)\n\n if isinstance(lambda_max, list):\n lambda_max = th.Tensor(lambda_max).to(feat.device)\n if lambda_max.dim() == 1:\n lambda_max = lambda_max.unsqueeze(-1) # (B,) to (B, 1)\n\n # broadcast from (B, 1) to (N, 1)\n lambda_max = broadcast_nodes(graph, lambda_max)\n re_norm = 2. / lambda_max\n\n # X_0 is the raw feature, Xt refers to the concatenation of X_0, X_1, ... X_t\n Xt = X_0 = feat\n\n # X_1(f)\n if self._k > 1:\n h = unnLaplacian(X_0, D_invsqrt, graph)\n X_1 = - re_norm * h + X_0 * (re_norm - 1)\n # Concatenate Xt and X_1\n Xt = th.cat((Xt, X_1), 1)\n\n # Xi(x), i = 2...k\n for _ in range(2, self._k):\n h = unnLaplacian(X_1, D_invsqrt, graph)\n X_i = - 2 * re_norm * h + X_1 * 2 * (re_norm - 1) - X_0\n # Concatenate Xt and X_i\n Xt = th.cat((Xt, X_i), 1)\n X_1, X_0 = X_i, X_1\n\n # linear projection\n h = self.linear(Xt)\n\n # activation\n if self.activation:\n h = self.activation(h)\n #print('ChebConv.py Line163 h : ',h.shape)\n return h", "def _build_lt(self) -> str:\n return dedent(\n f\"\"\"\n // SP--\n @SP\n M=M-1\n // D = *SP\n A=M\n D=M\n // SP--\n @SP\n M=M-1\n // D = *SP - D <--> x - y\n A=M\n D=M-D\n M=D\n\n // if x < 0\n @IS_LESS_THAN{self.label_suffix}\n D;JLT\n // else\n @ELSE{self.label_suffix}\n D;JGE\n\n (IS_LESS_THAN{self.label_suffix})\n // True in Hack ASM is -1\n @SP\n A=M\n M=-1\n // SP++\n @SP\n M=M+1\n @END_IF{self.label_suffix}\n 0;JEQ\n\n (ELSE{self.label_suffix})\n // False in Hack ASM is 0\n @SP\n A=M\n M=0\n // SP++\n @SP\n M=M+1\n \n (END_IF{self.label_suffix})\n D=0\n \"\"\"\n )", "def visit_unary(spec):", "def r_degenerate(self):\n self.tmp = self.left\n self.left = self.right", "def _right(self, j):\n return 2 * j + 2", "def _right(self, j):\n return 2 * j + 2", "def _untransform(self, X: Tensor) -> Tensor:\n pass # pragma: no cover", "def dehidratate(traj, other=(\"Na+\", \"Cl-\")):\n system_indexes = []\n if not system_indexes:\n for residue in traj.topology.residues:\n if residue.name not in (\"HOH\", \"WAT\") and residue.name not in other:\n for atom in residue.atoms:\n system_indexes.append(atom.index)\n traj = traj.atom_slice(system_indexes)\n return traj", "def lt (x,y):\n\n return not le(y,x)", "def timeStamp2LST(self, t):\n if isinstance(t, list):\n t = np.array(t)\n return self.lst_start + t*1e-6/(3590.)", "def jtag(self, shp):\n if isinstance(shp, Inferrer):\n return JInferrer(shp, TupleShape)\n else:\n return shp", "def lrs(st):\n\n length, shifts = __lrs(st.root, 0)\n result = [length, []]\n for shift in shifts:\n lrs_string = st.text[shift[0]-length:shift[0]]\n result[1].append((lrs_string, [x-length for x in shift]))\n return result", "def laplacian(expr):\n\n delop = Del()\n if expr.is_Vector:\n return (gradient(divergence(expr)) - curl(curl(expr))).doit()\n return delop.dot(delop(expr)).doit()", "def lindbladian_dt(dop, Lk):\n dop_dt = numpy.zeros(dop.shape)\n for L in Lk:\n L_adj = L.conj().transpose()\n L_adj_L = numpy.dot(L_adj, L)\n dop_dt = dop_dt + numpy.dot(L,numpy.dot(dop,L_adj)) \\\n -0.5*util.acomm(L_adj_L,dop)\n return dop_dt", "def __lshift__(self, other) -> 'MultiVector':\n return self.lc(other)", "def reverse_doubly_ll(dll):\n head_node = dll.head\n cur_node = dll.head\n cur_node.next.prev = None\n while cur_node.next is not None:\n cur_node = cur_node.next\n\n cur_node.next = head_node\n head_node.prev = cur_node", "def forward(self, s):", "def lrt_lts_i(Y):\n ll_null = mle_lts(Y, bound = True)['ll']\n ll_lts = mle_lts(Y)['ll']\n return -2 * (ll_null - ll_lts)", "def toeplitz(x):\n t = []\n for i in range(len(x)):\n row = []\n for j in range(len(x)):\n if i < j:\n row.append(x[j-i])\n elif i == j:\n row.append(x[0])\n else:\n row.append(x[i-j])\n t.append(row)\n return t", "def visitor(node: NodeT, left_distribute: bool) -> NodeT:\n if isinstance(node, ir.AddSub):\n items = OrderedDict() # type: Dict[ir.Node, List[Tuple[str, ir.Node]]]\n new_operators = []\n new_operands = []\n for operator, operand in zip(('+',) + getattr(node, 'operator'),\n getattr(node, 'operand')):\n if (operator == '+' and isinstance(operand, ir.MulDiv) and\n getattr(operand, 'operator') == ('*',)):\n if left_distribute:\n coeff, item = getattr(operand, 'operand')\n else:\n item, coeff = getattr(operand, 'operand')\n items.setdefault(coeff, []).append((operator, item))\n else:\n new_operators.append(operator)\n new_operands.append(operand)\n for coeff, item in items.items():\n operator, operand = zip(*item)\n assert operator[0] == '+'\n new_operators.append(operator[0])\n if len(operand) > 1:\n new_item = ir.AddSub(operator=operator[1:], operand=operand)\n else:\n new_item = operand[0]\n if left_distribute:\n children = coeff, new_item\n else:\n children = new_item, coeff\n new_operands.append(ir.MulDiv(operator=('*',), operand=children))\n if len(new_operands) > 1:\n assert new_operators[0] == '+'\n new_node = ir.AddSub(operator=tuple(new_operators[1:]),\n operand=tuple(new_operands))\n if new_node != node:\n return new_node # type: ignore\n elif new_operands and new_operands[0] != node:\n return new_operands[0]\n return node", "def shift(self, t, word):\n return t[1:] + (word,)", "def backward_pass(self, grad):\n pass", "def sh_order(j):\n l = sh_degree(j)\n return j + l + 1 - dimension(l)", "def t_STR_LITER(t):\n return t", "def _primerElem(l):\n return l[0]", "def sld(self, sf):\n raise NotImplementedError()", "def do_jls(self, arg):\n\n arg = str(arg).split(' ') \n arg.insert(0,'jls')\n arg = [i for i in arg if i != '']\n \n jail_table(arg)", "def _ldlj(movement, fs, data_type='speed'):\n _amp, _dur, _jerk = _dlj(movement, fs, data_type)\n return np.log(_amp), np.log(_dur), np.log(_jerk)", "def _reduce(self, action):\n assert len(self.stack) >= 2, \"ERROR: Cannot reduce with stack length less than 2\"\n \n # STUDENT\n # hint: use list.pop()\n # END STUDENT\n rightarc = self.stack.pop()\n leftarc = self.stack.pop()\n head = rightarc if action == Actions.REDUCE_L else leftarc\n mod = leftarc if action == Actions.REDUCE_L else rightarc\n self.stack.append( StackEntry(head.headword, head.headword_pos, self.combiner(head.embedding,mod.embedding)) )\n return DepGraphEdge((head.headword, head.headword_pos),(mod.headword, mod.headword_pos))", "def reverse_other(t):\n\n ###############\n # My Solution #\n ###############\n\n def deep(t, depth):\n depth+=1\n\n if t.is_leaf():\n return\n \n branches = []\n for b in t.branches:\n branches.append(b.label)\n deep(b, depth)\n\n branches = branches[::-1]\n\n if depth % 2 != 0:\n i = 0\n for b in t.branches:\n b.label = branches[i]\n i+=1\n\n return deep(t, 0)", "def union2(s, t):\n if empty(s):\n return t\n elif empty(t):\n return s\n else:\n e1, e2 = s.first, t.first\n if e1 == e2:\n return Link(e1, union2(s.rest, t.rest))\n elif e1 < e2:\n return Link(e1, union2(s.rest, t))\n elif e2 < e1:\n return Link(e2, union2(s, t.rest))" ]
[ "0.54447126", "0.53171045", "0.53147423", "0.5244896", "0.5144233", "0.51063067", "0.50482875", "0.5036076", "0.49402636", "0.49142268", "0.49142268", "0.48477957", "0.48162255", "0.47698143", "0.47529215", "0.47407043", "0.47301647", "0.46932566", "0.46881244", "0.46654546", "0.46581656", "0.46534562", "0.46505523", "0.46486446", "0.46457347", "0.46319664", "0.462161", "0.4613855", "0.46125624", "0.46105668", "0.4609357", "0.460382", "0.4598675", "0.45909497", "0.45616537", "0.4558875", "0.45524028", "0.4543821", "0.45397583", "0.45349368", "0.45177805", "0.45133105", "0.45127836", "0.45042428", "0.44919002", "0.4490247", "0.44814035", "0.44793937", "0.44785976", "0.44751534", "0.4474546", "0.44728637", "0.4461413", "0.44531533", "0.44529492", "0.44402063", "0.44360122", "0.44255418", "0.44230428", "0.44136515", "0.4403959", "0.44033703", "0.44005314", "0.43993595", "0.43975952", "0.43937087", "0.43934864", "0.4389768", "0.43850958", "0.438235", "0.43770665", "0.43760362", "0.43733168", "0.43657246", "0.43626097", "0.43626097", "0.43624488", "0.4361277", "0.43589228", "0.43572563", "0.43511578", "0.43479928", "0.43470547", "0.43459314", "0.43458262", "0.4341984", "0.4341412", "0.43343", "0.43320283", "0.4326349", "0.43221092", "0.43179402", "0.43150333", "0.4314716", "0.43123773", "0.4311349", "0.43078682", "0.4305239", "0.43050522", "0.42998645", "0.42994106" ]
0.0
-1
Tests that 1 + 1 always equals 2.
def test_basic_addition(self): self.assertEqual(1 + 1, 2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_sanity(self):\n self.assertEquals(2 + 2, 4)", "def test_basic_addition(self):\n self.failUnlessEqual(1 + 1, 2)", "def test_basic_addition(self):\n self.failUnlessEqual(1 + 1, 2)", "def test_basic_addition(self):\n self.failUnlessEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.failUnlessEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.failUnlessEqual(1 + 1, 2)", "def test_numbers(number):\n assert number ** 2 == number ** 2", "def test_03_pass(self):\n if x==1:\n pass", "def test_02_pass(self):\n if x==1:\n pass", "def _eq(a, b):\n return (a - b) % 2 == 0", "def test_add():\n\n assert add(1, 1) == 2\n assert add(1, 2) == add(2, 1) == 3", "def test_v2_correct(self):\r\n\r\n for index, expected_result in enumerate(self.prime_booleans):\r\n\r\n n = index + 1\r\n self.assertEqual(prime_numbers_v2(n), expected_result)", "def test_01_pass(self):\n if x==1:\n pass", "def test_01_pass(self):\n if x==1:\n pass", "def test_domino_with_2_numbers():\n assert compute(2) == 1, \"Not ok\"", "def test_add_two_numbers(self):\n self.assertEqual(add(5, 9), 14)", "def test_dummy():\n one = 1\n assert one + 1 == 2", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test_basic_addition(self):\r\n self.assertEqual(1 + 1, 2)", "def test():\n \n test1 = TestClass()\n test2 = TestClass()\n\n test1.inc()\n\n if not(test1.count == test2.count):\n return False\n else:\n return True", "def test_two_plus_two():\n assert add.add(2, 2) == 4", "def main():\n\n testEqual(odd_numbers_in_list([0, 1, 2, 3, 4]), 2)\n testEqual(odd_numbers_in_list([0, 0, 0]), 0)\n testEqual(odd_numbers_in_list([1]), 1)\n testEqual(odd_numbers_in_list([3, 5, 7, 9, 11]), 5)", "def test_mul2():\n assert (2 * (x + 1)).is_Mul", "def test_example():\n x = 0\n y = 1\n assert x != y", "def test_twoSum(self):\n self.assertEqual(twoSum([2, 7, 11, 15], 9), [0, 1])", "def test_power_of_2(value: int, expected_result: bool):\n assert check_power_of_2(value) == expected_result", "def test_v1_correct(self):\r\n\r\n for index, expected_result in enumerate(self.prime_booleans):\r\n\r\n n = index + 1\r\n self.assertEqual(prime_numbers_v1(n), expected_result)", "def test_dummy():\n # ARRANGE\n number = 1\n # ACT\n number += 1\n # ASSERT\n assert number == 2", "def test_add_integer(self):\n assert cr.add(3, 2) == 3 + 2", "def test_getSum_twoNumbers(self):\r\n self.assertEqual(17, Arith().add(8, 9))", "def test_even(self):\n for i, r in self.params:\n with self.subTest(i=i, r=r):\n self.assertEqual(i % 2, r)", "def test_add_returns_correct_result(self):\n result = self.calc.add(2, 2)\n self.assertEqual(4, result)", "def test_correct_value(self):\n self.assertTrue(py_function(6) == 36)\n self.assertFalse(py_function(5) == 9)\n for i in range(0, 10):\n self.assertTrue(py_function(i) == i**2 if i != 0 else 100)", "def test_and_numbers(self):\n self.assertEqual(add(3,8), 11)", "def test_foo(self):\n self.ran = True\n 1 / 0", "def test_even(self):", "def equality():\n\n Assert(1) == 1\n Assert(1) != 0\n\n with Assert.raises(AssertionError):\n Assert(1) == 0\n\n with Assert.raises(AssertionError):\n Assert(1) != 1", "def equality():\r\n\r\n Assert(1) == 1\r\n Assert(1) != 0\r\n\r\n with Assert.raises(AssertionError):\r\n Assert(1) == 0\r\n\r\n with Assert.raises(AssertionError):\r\n Assert(1) != 1", "def test_add_numbers(self):\n a, b = 5, 6\n expected = a + b\n # check for equality, real vs expected\n self.assertEqual(add(a, b), expected)", "def testEquality(self):\n pass", "def add_one(x: int) -> int:\n assert x >= 0\n return x + 1", "def test_v3_correct(self):\r\n\r\n for index, expected_result in enumerate(self.prime_booleans):\r\n n = index + 1\r\n self.assertEqual(prime_numbers_v3(n), expected_result)", "def test_0_2_id_inc(self):\n\n self.b1.id = 1\n test = self.b1.id\n test2 = self.b2.id\n self.assertEqual(test, test2 - 1)", "def test_prime_2(self):\n\t self.assertTrue(prime_generator(2), [2])", "def test_domino_with_1_numbers():\n assert compute(1) == 0, \"Not ok\"", "def test_task554_main_logic(number, expected_value):\r\n assert algo.Task554.main_logic(number + 1) == expected_value", "def test_add_numbers(self):\n self.assertEqual(add(3, 8), 11)", "def test_add_numbers(self):\n self.assertEqual(add(3, 8), 11)", "def test_add_numbers(self):\n self.assertEqual(add(3, 8), 11)", "def test_odd(self):", "def test_single_value(self, test_input, expected, sc):\n assert sc.add(test_input) == expected", "def test_positive_integer_2():\n assert 1 == positive_integer('1')", "def _sideffect_test_23(self, arg):\n if self.iter < 3:\n self.iter += 1\n return False\n else:\n return True", "def test_one():\n expected = 1\n actual = one()\n assert actual == expected", "def test_domino_with_3_numbers():\n assert compute(3) == 2, \"Not ok\"", "def test_abundant_sequence_second_term(self):\n\n\t\tfirst_term = nth_abundant(1)\n\t\texcepted_output = 18\n\t\tself.assertEquals(first_term, excepted_output)", "def test_maths(self):\n\n # Test that basic integers work\n self.assertEqual(int(1) + int(1), int(2), \"Basic addition failed\")\n self.assertNotEqual(int(1) + int(1), int(3), \"Basic addition failed\")\n\n # Test doubles\n # FIXME: Deployment fails for some reason. Maybe bug in CPU? Commenting it out.\n # self.assertEqual(float(0.1) + float(0.2), float(0.3), \"Floating addition failed\")\n self.assertNotEqual(float(1) + float(1), float(3), \"Floating Addition failed\")", "def test_0_3_id_inc2(self):\n\n test = self.b1.id\n test2 = self.b2.id\n test3 = self.b3.id\n self.assertEqual(test, test2 - 1)\n self.assertEqual(test3, 22)", "def test_1():\n assert primes(1) == [2]", "def testEqual(a, b):\n if a == b:\n print('Pass')\n else:\n print('Fail')", "def test_2():\n assert primes(2) == [2, 3]", "def test_example():\n answer = True\n expected = True\n assert answer == expected", "def test_increment(self):\n x0 = 0\n y0 = increment(x0) # y0 should be 1\n self.assertEqual(y0, 1)\n\n x1 = 100\n y1 = increment(x1) # y1 should be 101\n self.assertTrue(y1, 101)\n\n x2 = -1\n y2 = increment(x2) # y2 should be 0\n self.assertEqual(y2, 0)\n\n x3 = -1.5\n y3 = increment(x3) # y3 should be -0.5\n self.assertEqual(y3, -0.5)", "def test_add(self):\n self.assertEqual(3, add(1, 2))\n self.assertNotEqual(3, add(2, 2))", "def test_add_numbers():\n assert add(3, 8) == 11", "def test_add(self):\n self.assertEqual(add(1, 1), 2, \"Wrong answer\")\n self.assertEqual(add(10, 1), 11, \"Wrong answer\")\n self.assertEqual(add(15, 15), 30, \"Wrong answer\")", "def test_pytest():\n _x = 1\n _y = 2\n res = _x + _y\n assert res == 3", "def test_one(self):\n\n input_ = 1\n output = math.factorial(input_)\n expected = 1\n\n self.assertEqual(expected, output,\n f'Result: {output}, expectd: {expected}')", "def test_add_integers(self):\n print(\"---running test_add_integers\")\n result = some_math.add(1, 2)\n assert result == 3", "def test_01_basic(self):\n self.assertTrue(True)\n self.assertEqual(0, 0)", "def test_num_buses_2(self):\n actual = a1.num_buses(33)\n expected = 1\n self.assertEqual(actual,expected)" ]
[ "0.700804", "0.69351697", "0.69351697", "0.69351697", "0.688757", "0.688757", "0.666264", "0.64881384", "0.64450884", "0.64153033", "0.6410425", "0.6410405", "0.6379688", "0.6379688", "0.63732797", "0.6352445", "0.6350113", "0.6348631", "0.6348631", "0.6348631", "0.6348631", "0.6348631", "0.6348631", "0.6348631", "0.6348631", "0.6348631", "0.6348631", "0.6348631", "0.63375616", "0.6332115", "0.631259", "0.6289889", "0.6240136", "0.61950207", "0.618773", "0.614949", "0.61482555", "0.6121635", "0.6085795", "0.6077782", "0.6070021", "0.6065915", "0.6048652", "0.6040379", "0.60223", "0.60066074", "0.5998299", "0.5985", "0.59843135", "0.5956844", "0.59489816", "0.5944745", "0.5927351", "0.5916267", "0.59055275", "0.5902364", "0.5902364", "0.5902364", "0.5898304", "0.58958435", "0.5890092", "0.5877944", "0.58751845", "0.5857067", "0.58503723", "0.58376676", "0.58354616", "0.5831447", "0.58295923", "0.58174604", "0.581353", "0.5811365", "0.5810556", "0.5809765", "0.58071357", "0.5790873", "0.5790519", "0.5778365", "0.57525456", "0.5751488" ]
0.63992167
31
Test the flow for 3.6 and up, async generators are not supported in 3.5.
async def test_tornado_list_tables(self): tables = self.r.table_list().run(self.conn) assert isinstance(tables, list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def test2(self):\n return True", "def test_future_event(self):\n pass", "def test_annotated_async_from_coro(dut):\n v = yield produce.async_annotated(Value(1))\n assert v == 1\n\n try:\n yield produce.async_annotated(Error(SomeException))\n except SomeException:\n pass\n else:\n assert False", "async def test_annotated_async_from_async(dut):\n v = await produce.async_(Value(1))\n assert v == 1\n\n try:\n await produce.async_(Error(SomeException))\n except SomeException:\n pass\n else:\n assert False", "async def test1(self):\n return True", "async def test_coro_from_async(dut):\n v = await produce.coro(Value(1))\n assert v == 1\n\n try:\n await produce.coro(Error(SomeException))\n except SomeException:\n pass\n else:\n assert False", "async def async_generator() -> Generator[float, None, None]:\n for _ in range(10):\n await asyncio.sleep(1)\n yield random.random() * 10", "async def async_generator() -> Generator[float, None, None]:\n\n for i in range(10):\n yield random.random()\n await asyncio.sleep(1)", "async def test_fgsm(): \n #fgsm algo option:\n r = {}\n async with AsyncClient(app=app, base_url=\"http://test\") as ac:\n \n ALGO_NAME = AlteritAlgoName.fgsm_algo\n ffile = {'input_image': open(TEST_IMAGE_PATH, 'rb'),\n \"input_image_path\": TEST_IMAGE_PATH,\n \"alter_parameters\":json.dumps({\"acall\":True,\n \"epsilon\":0.01})\n }\n \n for epsilon_, result_ in zip([0.01, 0.1], ['saluki', 'weimaranner',]):\n r = await ac.post(f'/alterit/?algo_name={ALGO_NAME}', files = ffile)\n assert r.status_code == 200\n i1, i2, i3, i4 = await a_from_zip_stream_to_att_data(r)\n assert i1['0'][1] == result_ \n \n # async for i in mygen(5):\n # print(f'step {i}:')\n # #ALGO_NAME = AlteritAlgoName.fgsm_algo\n # ffile = {'input_image': open(TEST_IMAGE_PATH, 'rb'),\n # \"input_image_path\": TEST_IMAGE_PATH,\n # \"alter_parameters\":json.dumps({\"acall\":True,\n # \"epsilon\":0.01}),}\n # r[i] = await ac.post(f'/alterit/?algo_name={ALGO_NAME}', files = ffile)\n # assert r[i].status_code == 200\n # i1, i2, i3, i4 = await a_from_zip_stream_to_att_data(r[i])\n # assert i1['0'][1] == \"saluki\"", "def test_async_function(self):\n @self.eventloop.wait_for(timeout=0.1)\n async def go():\n self.assertTrue(self.reactor.in_call_from_thread)\n return 17\n\n self.assertEqual((go(), go()), (17, 17))\n self.assertFalse(inspect.iscoroutinefunction(go))", "async def test_annotated_async_from_async(dut):\n v = await produce.async_annotated(Value(1))\n assert v == 1\n\n try:\n await produce.async_annotated(Error(SomeException))\n except SomeException:\n pass\n else:\n assert False", "async def pypck_client_2() -> AsyncGenerator[PchkConnectionManager, None]:\n pcm = MockPchkConnectionManager(\n HOST, PORT + 1, USERNAME, PASSWORD, settings={\"SK_NUM_TRIES\": 0}\n )\n yield pcm\n await pcm.async_close()\n assert len(pcm.task_registry.tasks) == 0", "async def async_generator() -> Generator[float, None, None]:\n for i in range(10):\n yield (random.uniform(0, 10))\n await asyncio.sleep(1)", "async def async_setup(self):\n pass", "async def handle_async_stream(req):\n yield await logic_async(req)", "def generator_checker_py3(gen, gen_type, bound_Generic, bound_typevars,\n bound_typevars_readonly, follow_fwd_refs, _recursion_check):\n initialized = False\n sn = None\n try:\n while True:\n a = gen.send(sn)\n if initialized or not a is None:\n if not gen_type.__args__[0] is Any and \\\n not _isinstance(a, gen_type.__args__[0], bound_Generic, bound_typevars,\n bound_typevars_readonly, follow_fwd_refs,\n _recursion_check):\n tpa = deep_type(a)\n msg = _make_generator_error_message(deep_type(a), gen, gen_type.__args__[0],\n 'has incompatible yield type')\n _raise_typecheck_error(msg, True, a, tpa, gen_type.__args__[0])\n initialized = True\n sn = yield a\n if not gen_type.__args__[1] is Any and \\\n not _isinstance(sn, gen_type.__args__[1], bound_Generic, bound_typevars,\n bound_typevars_readonly, follow_fwd_refs, _recursion_check):\n tpsn = deep_type(sn)\n msg = _make_generator_error_message(tpsn, gen, gen_type.__args__[1],\n 'has incompatible send type')\n _raise_typecheck_error(msg, False, sn, tpsn, gen_type.__args__[1])\n except StopIteration as st:\n # Python 3:\n # todo: Check if st.value is always defined (i.e. as None if not present)\n if not gen_type.__args__[2] is Any and \\\n not _isinstance(st.value, gen_type.__args__[2], bound_Generic, bound_typevars,\n bound_typevars_readonly, follow_fwd_refs, _recursion_check):\n tpst = deep_type(st.value)\n msg = _make_generator_error_message(tpst, gen, gen_type.__args__[2],\n 'has incompatible return type')\n _raise_typecheck_error(msg, True, st.value, tpst, gen_type.__args__[2])\n return", "def step_async(self, actions):", "def get_asyncgen_hooks(): # real signature unknown; restored from __doc__\n pass", "async def pypck_client() -> AsyncGenerator[PchkConnectionManager, None]:\n pcm = MockPchkConnectionManager(\n HOST, PORT, USERNAME, PASSWORD, settings={\"SK_NUM_TRIES\": 0}\n )\n yield pcm\n await pcm.async_close()\n assert len(pcm.task_registry.tasks) == 0", "def pytest_fixture_setup(fixturedef):\n if isasyncgenfunction(fixturedef.func):\n func = fixturedef.func\n\n strip_request = False\n if 'request' not in fixturedef.argnames:\n fixturedef.argnames += ('request',)\n strip_request = True\n\n def wrapper(*args, **kwargs):\n request = kwargs['request']\n\n if strip_request:\n del kwargs['request']\n\n if 'loop' not in request.fixturenames:\n raise Exception(\n \"Asynchronous fixtures must depend on the 'loop' fixture or \"\n \"be used in tests depending from it.\"\n )\n\n loop = request.getfixturevalue('loop')\n # for async generators, we need to advance the generator once,\n # then advance it again in a finalizer\n gen = func(*args, **kwargs)\n\n def finalizer():\n try:\n return loop.run_until_complete(gen.__anext__())\n except StopAsyncIteration: # NOQA\n pass\n\n request.addfinalizer(finalizer)\n return loop.run_until_complete(gen.__anext__())\n\n fixturedef.func = wrapper\n\n elif asyncio.iscoroutinefunction(fixturedef.func):\n func = fixturedef.func\n\n strip_request = False\n if 'request' not in fixturedef.argnames:\n fixturedef.argnames += ('request',)\n strip_request = True\n\n def wrapper(*args, **kwargs):\n request = kwargs['request']\n if 'loop' not in request.fixturenames:\n raise Exception(\n \"Asynchronous fixtures must depend on the 'loop' fixture or \"\n \"be used in tests depending from it.\"\n )\n\n loop = request.getfixturevalue('loop')\n\n if strip_request:\n del kwargs['request']\n\n return loop.run_until_complete(func(*args, **kwargs))\n\n fixturedef.func = wrapper\n\n else:\n return", "def is_generator(func):\n return (\n inspect.isgeneratorfunction(func) or\n isinstance(func, asyncio.Future) or\n inspect.isgenerator(func)\n )", "async def _setup(self):", "def test_async_function(self):\n myreactor = FakeReactor()\n c = EventLoop(lambda: myreactor, lambda f, g: None)\n c.no_setup()\n calls = []\n\n @c.run_in_reactor\n async def go():\n self.assertTrue(myreactor.in_call_from_thread)\n calls.append(1)\n return 23\n\n self.assertEqual((go().wait(0.1), go().wait(0.1)), (23, 23))\n self.assertEqual(len(calls), 2)\n self.assertFalse(inspect.iscoroutinefunction(go))", "def test_no_listeners(testloop, testchannel):\n\n async def run():\n \"\"\"run\"\"\"\n async for i in aiter(range(10)):\n await testchannel.send(i)\n await asyncio.sleep(0)\n\n with testchannel.open():\n testchannel.start(asyncfunc=False)\n testloop.run_until_complete(run())", "def testGeneratorType(self):", "async def test_create_async_connector() -> None:\n connector = await create_async_connector()\n assert connector._loop == asyncio.get_running_loop()\n await connector.close_async()", "def test_datachannel_async(testloop, testchannel, asend):\n\n val = []\n dataval = 42\n stop = False\n\n @testchannel(anchortype=AnchorType.first)\n async def one(data):\n \"\"\"one\"\"\"\n val.append((1, data))\n\n @testchannel(anchortype=AnchorType.first)\n async def two(data):\n \"\"\"two\"\"\"\n val.append((2, data))\n\n @testchannel\n async def three(data):\n \"\"\"three\"\"\"\n val.append((3, data))\n\n @testchannel\n async def four(data):\n \"\"\"four\"\"\"\n val.append((4, data))\n\n @testchannel(anchortype=AnchorType.last)\n async def five(data):\n \"\"\"five\"\"\"\n val.append((5, data))\n\n @testchannel(anchortype=AnchorType.last)\n async def six(data):\n \"\"\"six\"\"\"\n nonlocal stop\n val.append((6, data))\n stop = True\n\n async def run():\n \"\"\"run\"\"\"\n nonlocal stop\n assert testchannel.state == ChannelState.listening\n assert testchannel.isopen()\n if asend:\n await testchannel.send(dataval)\n else:\n testchannel.put(dataval)\n while True:\n if stop:\n break\n await asyncio.sleep(0)\n\n assert not testchannel.isopen()\n with testchannel.open():\n testchannel.start()\n testloop.run_until_complete(run())\n assert not testchannel.isopen()\n\n first = set((i, dataval) for i in [1, 2])\n middle = set((i, dataval) for i in [3, 4])\n last = set((i, dataval) for i in [5, 6])\n\n assert set(val[0:2]) == first\n assert set(val[2:4]) == middle\n assert set(val[4:6]) == last", "async def test_full_user_flow_multiple_installations(\n hass: HomeAssistant,\n mock_setup_entry: AsyncMock,\n mock_verisure_config_flow: MagicMock,\n) -> None:\n result = await hass.config_entries.flow.async_init(\n DOMAIN, context={\"source\": config_entries.SOURCE_USER}\n )\n assert result.get(\"step_id\") == \"user\"\n assert result.get(\"type\") == FlowResultType.FORM\n assert result.get(\"errors\") == {}\n\n result2 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n {\n \"email\": \"verisure_my_pages@example.com\",\n \"password\": \"SuperS3cr3t!\",\n },\n )\n await hass.async_block_till_done()\n\n assert result2.get(\"step_id\") == \"installation\"\n assert result2.get(\"type\") == FlowResultType.FORM\n assert result2.get(\"errors\") is None\n\n result3 = await hass.config_entries.flow.async_configure(\n result2[\"flow_id\"], {\"giid\": \"54321\"}\n )\n await hass.async_block_till_done()\n\n assert result3.get(\"type\") == FlowResultType.CREATE_ENTRY\n assert result3.get(\"title\") == \"descending (54321th street)\"\n assert result3.get(\"data\") == {\n CONF_GIID: \"54321\",\n CONF_EMAIL: \"verisure_my_pages@example.com\",\n CONF_PASSWORD: \"SuperS3cr3t!\",\n }\n\n assert len(mock_verisure_config_flow.login.mock_calls) == 1\n assert len(mock_setup_entry.mock_calls) == 1", "def test_await_if_coroutine(coroutine, exp_return, args):\n result = asyncio.run(await_if_coroutine(coroutine, *args))\n\n assert result == exp_return", "async def test_full_user_flow_multiple_installations_with_mfa(\n hass: HomeAssistant,\n mock_setup_entry: AsyncMock,\n mock_verisure_config_flow: MagicMock,\n) -> None:\n result = await hass.config_entries.flow.async_init(\n DOMAIN, context={\"source\": config_entries.SOURCE_USER}\n )\n assert result.get(\"step_id\") == \"user\"\n assert result.get(\"type\") == FlowResultType.FORM\n assert result.get(\"errors\") == {}\n\n mock_verisure_config_flow.login.side_effect = VerisureLoginError(\n \"Multifactor authentication enabled, disable or create MFA cookie\"\n )\n\n result2 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n {\n \"email\": \"verisure_my_pages@example.com\",\n \"password\": \"SuperS3cr3t!\",\n },\n )\n await hass.async_block_till_done()\n\n assert result2.get(\"type\") == FlowResultType.FORM\n assert result2.get(\"step_id\") == \"mfa\"\n\n mock_verisure_config_flow.login.side_effect = None\n\n result3 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n {\n \"code\": \"123456\",\n },\n )\n await hass.async_block_till_done()\n\n assert result3.get(\"step_id\") == \"installation\"\n assert result3.get(\"type\") == FlowResultType.FORM\n assert result3.get(\"errors\") is None\n\n result4 = await hass.config_entries.flow.async_configure(\n result3[\"flow_id\"], {\"giid\": \"54321\"}\n )\n await hass.async_block_till_done()\n\n assert result4.get(\"type\") == FlowResultType.CREATE_ENTRY\n assert result4.get(\"title\") == \"descending (54321th street)\"\n assert result4.get(\"data\") == {\n CONF_GIID: \"54321\",\n CONF_EMAIL: \"verisure_my_pages@example.com\",\n CONF_PASSWORD: \"SuperS3cr3t!\",\n }\n\n assert len(mock_verisure_config_flow.login.mock_calls) == 1\n assert len(mock_verisure_config_flow.request_mfa.mock_calls) == 1\n assert len(mock_verisure_config_flow.validate_mfa.mock_calls) == 1\n assert len(mock_setup_entry.mock_calls) == 1", "def async_generator(func):\n @functools.wraps(func)\n def function(*args, **kwargs):\n \"Wrapped function\"\n return GeneratorFuture(func(*args, **kwargs))\n return function", "def testBaseCase(self):\n r = []\n async_fn = utils.make_async()(lambda: r.append(\"a\"))\n async_fn()\n time.sleep(1)\n self.assertListEqual(r, [\"a\"])", "def test_upgrade_with_auto_upgrade_latest_engine_enabled():", "def yieldRPC(remoteYields): #Status: WIP\r\n pass", "async def test_flow_works(hass: HomeAssistant) -> None:\n disc_bridge = get_discovered_bridge(supports_v2=True)\n\n with patch(\n \"homeassistant.components.hue.config_flow.discover_nupnp\",\n return_value=[disc_bridge],\n ):\n result = await hass.config_entries.flow.async_init(\n const.DOMAIN, context={\"source\": config_entries.SOURCE_USER}\n )\n\n assert result[\"type\"] == \"form\"\n assert result[\"step_id\"] == \"init\"\n\n result = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"], user_input={\"id\": disc_bridge.id}\n )\n\n assert result[\"type\"] == \"form\"\n assert result[\"step_id\"] == \"link\"\n\n flow = next(\n flow\n for flow in hass.config_entries.flow.async_progress()\n if flow[\"flow_id\"] == result[\"flow_id\"]\n )\n assert flow[\"context\"][\"unique_id\"] == \"aabbccddeeff\"\n\n with patch.object(config_flow, \"create_app_key\", return_value=\"123456789\"):\n result = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"], user_input={}\n )\n\n assert result[\"type\"] == \"create_entry\"\n assert result[\"title\"] == \"Hue Bridge aabbccddeeff\"\n assert result[\"data\"] == {\n \"host\": \"1.2.3.4\",\n \"api_key\": \"123456789\",\n \"api_version\": 2,\n }", "def test_block_missing_batch_dependency(self):\n pass", "async def dsmr_connection_send_validate_fixture(hass):\n\n transport = MagicMock(spec=asyncio.Transport)\n protocol = MagicMock(spec=DSMRProtocol)\n\n protocol.telegram = {\n EQUIPMENT_IDENTIFIER: CosemObject([{\"value\": \"12345678\", \"unit\": \"\"}]),\n EQUIPMENT_IDENTIFIER_GAS: CosemObject([{\"value\": \"123456789\", \"unit\": \"\"}]),\n P1_MESSAGE_TIMESTAMP: CosemObject([{\"value\": \"12345678\", \"unit\": \"\"}]),\n }\n\n async def connection_factory(*args, **kwargs):\n \"\"\"Return mocked out Asyncio classes.\"\"\"\n if args[1] == \"5L\":\n protocol.telegram = {\n LUXEMBOURG_EQUIPMENT_IDENTIFIER: CosemObject(\n [{\"value\": \"12345678\", \"unit\": \"\"}]\n ),\n EQUIPMENT_IDENTIFIER_GAS: CosemObject(\n [{\"value\": \"123456789\", \"unit\": \"\"}]\n ),\n }\n if args[1] == \"5S\":\n protocol.telegram = {\n P1_MESSAGE_TIMESTAMP: CosemObject([{\"value\": \"12345678\", \"unit\": \"\"}]),\n }\n if args[1] == \"Q3D\":\n protocol.telegram = {\n Q3D_EQUIPMENT_IDENTIFIER: CosemObject(\n [{\"value\": \"12345678\", \"unit\": \"\"}]\n ),\n }\n\n return (transport, protocol)\n\n connection_factory = MagicMock(wraps=connection_factory)\n\n async def wait_closed():\n if isinstance(connection_factory.call_args_list[0][0][2], str):\n # TCP\n telegram_callback = connection_factory.call_args_list[0][0][3]\n else:\n # Serial\n telegram_callback = connection_factory.call_args_list[0][0][2]\n\n telegram_callback(protocol.telegram)\n\n protocol.wait_closed = wait_closed\n\n with patch(\n \"homeassistant.components.dsmr.config_flow.create_dsmr_reader\",\n connection_factory,\n ), patch(\n \"homeassistant.components.dsmr.config_flow.create_tcp_dsmr_reader\",\n connection_factory,\n ):\n yield (connection_factory, transport, protocol)", "def test_normal_basic():\r\n yield check_normal_basic, False\r\n yield check_normal_basic, False, True\r\n yield check_normal_basic, True", "async def test_await_causes_start(dut):\n coro = produce.async_annotated(Value(1))\n assert not coro.has_started()\n await coro\n assert coro.has_started()", "async def test_loop():\n async with Sonarr(HOST, API_KEY) as sonarr:\n assert isinstance(sonarr, Sonarr)", "def iscoroutine(obj):\n return inspect.isgenerator(obj)", "async def setup(self):\n pass", "def public_async_generator(func):\n @functools.wraps(func)\n def function(*args, **kwargs):\n \"Wrapped function\"\n return GeneratorFuture(func(*args, **kwargs)).future\n return function", "def async_lucas():\n yield 2\n a = 2\n b = 1\n while True:\n yield b\n a, b = b, a + b", "def test_yield_in_const_conditional_true():\n if True:\n print((yield 1))", "def test_task_sequence(self):\n\n class Task(tasks.WorkflowTask):\n name = 'task'\n\n def apply_async(self):\n record.append(self.i)\n self.set_state(tasks.TASK_SUCCEEDED)\n self.async_result.result = None\n return self.async_result\n\n task_count = 10\n\n # prepare the task seuqence\n seq_tasks = []\n for i in range(task_count):\n t = Task(mock.Mock())\n seq_tasks.append(t)\n t.i = i\n g = TaskDependencyGraph(MockWorkflowContext())\n seq = g.sequence()\n seq.add(*seq_tasks)\n\n record = []\n\n with limited_sleep_mock():\n g.execute()\n\n expected = list(range(task_count))\n self.assertEqual(expected, record)", "def testExplicitGeneratorExecptionUsage(self):\n\t\tc = Controller()\n\t\tx = c.mock()\n\t\tx.g(8, 9)\n\t\tc.generator()\n\t\tc.setReturn(10)\n\t\tc.setException(Exception(\"bogus\"))\n\t\tc.replay()\n\t\tg = x.g(8, 9)\n\t\tself.failUnless(g.next() == 10)\n\t\tself.failUnlessRaises(Exception, g.next)", "def test_issue_335(self):\n num_tasks_in_seq = 5\n seq = MySequentialCollection([\n Application(\n ['echo', 'test1'],\n [], [],\n os.path.join(self.tmpdir, 'test.%d.d' % i))\n for i in range(num_tasks_in_seq)\n ])\n engine = create_engine(self.cfgfile, auto_enable_auth=True)\n engine.add(seq)\n while True:\n engine.progress()\n if (len([task for task in seq.tasks\n if task.execution.state == Run.State.TERMINATED])\n == num_tasks_in_seq):\n engine.progress()\n # check that final SequentialCollection state is TERMINATED\n assert seq.execution.state == Run.State.TERMINATED\n break\n # check that next() has been called once per each task\n assert seq.next_called_n_times == num_tasks_in_seq", "def test_datachannel_send_wait_notopened(testloop, testchannel):\n\n val = []\n\n @testchannel\n async def one(data):\n \"\"\"one\"\"\"\n val.append(data)\n\n async def run():\n \"\"\"run\"\"\"\n async for i in aiter(range(10)):\n asyncio.ensure_future(testchannel.send(i))\n await asyncio.sleep(0)\n await testchannel.join()\n\n testloop.run_until_complete(run())\n\n assert not val", "def patchAsyncio():\n import nest_asyncio\n nest_asyncio.apply()", "def flow_to_iter(flow):\n if ((sys.version_info.major == 3 and hasattr(flow, \"__next__\"))\n or (sys.version_info.major == 2 and hasattr(flow, \"next\"))):\n return flow\n else:\n return iter(flow)", "def test_cancel_generator_async(serve_instance):\n signal_actor = SignalActor.remote()\n\n @serve.deployment\n class Downstream:\n async def __call__(self, *args):\n yield \"hi\"\n await send_signal_on_cancellation(signal_actor)\n\n @serve.deployment\n class Ingress:\n def __init__(self, handle):\n self._h = handle.options(use_new_handle_api=True, stream=True)\n\n async def __call__(self, *args):\n # Send a request and wait for it to start executing.\n g = self._h.remote()\n assert await g.__anext__() == \"hi\"\n\n # Cancel it and verify that it is cancelled via signal.\n g.cancel()\n\n with pytest.raises(ray.exceptions.TaskCancelledError):\n assert await g.__anext__() == \"hi\"\n\n await signal_actor.wait.remote()\n\n h = serve.run(Ingress.bind(Downstream.bind())).options(use_new_handle_api=True)\n h.remote().result() # Would raise if test failed.", "def test_fibonacci_generator():\n computed_fibonacci_value = fibonacci.fibonacci_generator(8)\n assert isinstance(computed_fibonacci_value, types.GeneratorType) is True", "async def test_full_user_flow_single_installation_with_mfa(\n hass: HomeAssistant,\n mock_setup_entry: AsyncMock,\n mock_verisure_config_flow: MagicMock,\n) -> None:\n result = await hass.config_entries.flow.async_init(\n DOMAIN, context={\"source\": config_entries.SOURCE_USER}\n )\n assert result.get(\"step_id\") == \"user\"\n assert result.get(\"type\") == FlowResultType.FORM\n assert result.get(\"errors\") == {}\n\n mock_verisure_config_flow.login.side_effect = VerisureLoginError(\n \"Multifactor authentication enabled, disable or create MFA cookie\"\n )\n\n result2 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n {\n \"email\": \"verisure_my_pages@example.com\",\n \"password\": \"SuperS3cr3t!\",\n },\n )\n await hass.async_block_till_done()\n\n assert result2.get(\"type\") == FlowResultType.FORM\n assert result2.get(\"step_id\") == \"mfa\"\n\n mock_verisure_config_flow.login.side_effect = None\n mock_verisure_config_flow.get_installations.return_value = {\n k1: {k2: {k3: [v3[0]] for k3, v3 in v2.items()} for k2, v2 in v1.items()}\n for k1, v1 in mock_verisure_config_flow.get_installations.return_value.items()\n }\n\n result3 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n {\n \"code\": \"123456\",\n },\n )\n await hass.async_block_till_done()\n\n assert result3.get(\"type\") == FlowResultType.CREATE_ENTRY\n assert result3.get(\"title\") == \"ascending (12345th street)\"\n assert result3.get(\"data\") == {\n CONF_GIID: \"12345\",\n CONF_EMAIL: \"verisure_my_pages@example.com\",\n CONF_PASSWORD: \"SuperS3cr3t!\",\n }\n\n assert len(mock_verisure_config_flow.login.mock_calls) == 1\n assert len(mock_verisure_config_flow.request_mfa.mock_calls) == 1\n assert len(mock_verisure_config_flow.validate_mfa.mock_calls) == 1\n assert len(mock_setup_entry.mock_calls) == 1", "async def test_task_not_awaitable(arg):\n with pytest.raises(OSError):\n async with Scope() as n:\n n.spawn(arg)", "async def async_setup(opp, config) -> bool:\n\n opp.data.setdefault(DOMAIN, {})\n\n if DOMAIN not in config:\n return True\n\n if not opp.config_entries.async_entries(DOMAIN):\n opp.async_create_task(\n opp.config_entries.flow.async_init(\n DOMAIN,\n context={\"source\": SOURCE_IMPORT},\n data=config[DOMAIN],\n )\n )\n\n return True", "async def run_generator(pos: int):\n LOGGER.info(f\"Final sequence: {[i async for i in sleep_generator(pos)]}\")", "def test_python_after_38():\n import sys\n assert sys.version_info >= (3, 8)", "async def test_Connector_Init_async_context_manager() -> None:\n loop = asyncio.get_running_loop()\n async with Connector(loop=loop) as connector:\n assert connector._ip_type == IPTypes.PUBLIC\n assert connector._enable_iam_auth is False\n assert connector._timeout == 30\n assert connector._credentials is None\n assert connector._loop == loop", "async def test_wrap_async(self):\n result = 987\n wrapped = async_util.wrap_async(result)\n await wrapped\n assert isinstance(wrapped, asyncio.Future)\n assert wrapped.result() == result", "async def my_job():\n\n while test_plan:\n to_yield = test_plan[-1][0]\n test_plan[-1][0] = None\n yield to_yield", "async def async_init(\n config: Optional[Config] = None,\n targets: Optional[List[ConfigUnit]] = None\n) -> AsyncGenerator[Config, None]:\n # pylint: disable=import-outside-toplevel\n from .units import ASYNC_UNIT\n\n if config is None:\n assert targets is not None\n config = Config(targets=targets)\n else:\n assert targets is None\n\n assert isinstance(config, Config)\n\n set_config(config)\n\n config.blacklist_target(ASYNC_UNIT)\n\n wrapper_func = await config.async_init()\n\n if wrapper_func is not None:\n raise Exception(\"Can't call wrapper func {} on init\".format(wrapper_func))\n\n try:\n yield config\n finally:\n await config.async_exit()", "async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:\n await async_setup_pipeline_store(hass)\n async_register_websocket_api(hass)\n\n return True", "def init(self):\n \n self._nc_session = TestBedTests.TBNetconfSession(self.log, self.loop)\n self._nc_proxy = TestBedTests.TBNetconfProxy(self._nc_session, UtCompositeYang, self.log)\n self._netconf_test_objects = []\n self._pbreq_test_objects = []\n\n for cls in NETCONF_TESTS:\n obj = cls(self._dts, self.log, self._nc_proxy, self._loop)\n yield from obj.dts_self_register()\n self._netconf_test_objects.append(obj)\n\n for cls in PBREQ_TESTS:\n obj = cls(self._dts, self.log, self._nc_proxy, self._loop)\n yield from obj.dts_self_register()\n self._pbreq_test_objects.append(obj)\n\n @asyncio.coroutine\n def run_all_tests(xact_info, action, ks_path, msg):\n ro1 = yield from self.run_tests(self._netconf_test_objects, msg.continue_on_failure)\n if ro1.failed_count is 0 or msg.continue_on_failure is True:\n ro2 = yield from self.run_tests(self._pbreq_test_objects, msg.continue_on_failure)\n\n ro = RwAgentTestbedYang.AgentTestsOp()\n ro.total_tests = ro1.total_tests + ro2.total_tests\n ro.passed_count = ro1.passed_count + ro2.passed_count\n ro.failed_count = ro1.failed_count + ro2.failed_count\n #ro.failed_tests = ro1.failed_tests + ro2.failed_tests\n\n xpath = \"O,/agt-tb:agent-tests\"\n xact_info.respond_xpath(rwdts.XactRspCode.ACK, xpath, ro)\n\n @asyncio.coroutine\n def run_all_netconf_tests(xact_info, action, ks_path, msg):\n ro = yield from self.run_tests(self._netconf_test_objects)\n xpath = \"O,/agt-tb:agent-tests\"\n xact_info.respond_xpath(rwdts.XactRspCode.ACK, xpath, ro)\n\n @asyncio.coroutine\n def run_all_pbreqs_tests(xact_info, action, ks_path, msg):\n ro = yield from self.run_tests(self._pbreq_test_objects)\n xpath = \"O,/agt-tb:agent-tests\"\n xact_info.respond_xpath(rwdts.XactRspCode.ACK, xpath, ro)\n \n # Register for all test-cases\n yield from self._dts.register(\n xpath=\"I,/agt-tb:agent-tests/agt-tb:all\",\n flags=rwdts.Flag.PUBLISHER,\n handler=rift.tasklets.DTS.RegistrationHandler(on_prepare=run_all_tests))\n\n # Register for per category all test-cases\n yield from self._dts.register(\n xpath=\"I,/agt-tb:agent-tests/agt-tb:netconf-tests/agt-tb:all\",\n flags=rwdts.Flag.PUBLISHER,\n handler=rift.tasklets.DTS.RegistrationHandler(on_prepare=run_all_netconf_tests))\n\n yield from self._dts.register(\n xpath=\"I,/agt-tb:agent-tests/agt-tb:pb-request-tests/agt-tb:all\",\n flags=rwdts.Flag.PUBLISHER,\n handler=rift.tasklets.DTS.RegistrationHandler(on_prepare=run_all_pbreqs_tests))", "def async_test(\n f: Callable[[TestCase], Coroutine[Deferred[object], object, object]]\n) -> Callable[[TestCase], Deferred[None]]:\n\n @inlineCallbacks\n def g(self: object) -> Generator[Deferred[object], object, None]:\n d: Deferred[object] = Deferred.fromCoroutine(f(self))\n yield d\n\n return g", "async def test_job_async_gen_done(my_job_async_gen):\n\n # Set up callback to get notifications when job state changes.\n job = None\n job_update_counter = 0\n\n def on_job_update(_job):\n \"\"\"The callback updates `job` and `job_update_counter`.\"\"\"\n nonlocal job, job_update_counter\n job = _job\n job_update_counter += 1\n # Make sure job state tells the job is cancelable when job is in\n # `PENDING`, `WORKING`, and `CANCELING` state and is not\n # cancelable in other states.\n if job.state in ['PENDING', 'WORKING', 'CANCELING']:\n assert job.is_cancelable, ('Job is not cancelable when it '\n 'must be cancelable!')\n else:\n assert not job.is_cancelable, ('Job is cancelable when it '\n 'must not be cancelable!')\n\n my_job_async_gen.set_on_update(on_job_update)\n\n # Submit a job which must finish OK.\n new_job = await my_job_async_gen.job(yieldsteps=1, mustfail=False)\n\n # Check that job is cancelable.\n assert new_job.is_cancelable, ('Job instance states that cancelable job '\n 'is not cancelable!')\n\n # Check job state right after job is submitted.\n assert job.state == 'PENDING', ('Submitted job has wrong state '\n f'{job.state}!')\n\n # Process ASGI messages and wait for the job to finish.\n await my_job_async_gen.process_jobs()\n\n # Check job state when job is done.\n assert job.state == 'DONE', f'Finished job has wrong state `{job.state}`!'\n\n # Check that job update callback has been called four times:\n # 1. job is submitted\n # 2. job switches to the working state\n # 3. job reports the progress\n # 4. job finishes\n assert job_update_counter == 4, 'Incorrect number of job updates detected!'", "def _build_async_flow(self):\n\n def process_step(state, step, root):\n if not state._is_local_function(self.context):\n return\n for item in state.next or []:\n next_state = root[item]\n if next_state.async_object:\n next_step = step.to(next_state.async_object)\n process_step(next_state, next_step, root)\n\n default_source, self._wait_for_result = _init_async_objects(\n self.context, self._steps.values()\n )\n\n source = self._source or default_source\n for next_state in self._start_steps:\n next_step = source.to(next_state.async_object)\n process_step(next_state, next_step, self)\n\n for step in self._steps.values():\n # add error handler hooks\n if (step.on_error or self.on_error) and step.async_object:\n error_step = self._steps[step.on_error or self.on_error]\n # never set a step as its own error handler\n if step != error_step:\n step.async_object.set_recovery_step(error_step.async_object)\n\n self._controller = source.run()", "def on_validate(\n self,\n ) -> AsyncIteratorOrIterator[None]: # pragma: no cover # pyright: ignore\n yield None", "async def test_sync_ctx_manager():\n with pytest.raises(RuntimeError):\n with Scope():\n pass", "async def _do_if_possible(self, coroutine: Awaitable[None]) -> None:\n try:\n await coroutine\n except IncorrectStateException:\n pass", "async def test_dependent_fixture(dependent_fixture):\n await asyncio.sleep(0.1)", "async def test_flow_oppio_discovery(opp):\n result = await opp.config_entries.flow.async_init(\n DECONZ_DOMAIN,\n data={\n \"addon\": \"Mock Addon\",\n CONF_HOST: \"mock-deconz\",\n CONF_PORT: 80,\n CONF_SERIAL: BRIDGEID,\n CONF_API_KEY: API_KEY,\n },\n context={\"source\": SOURCE_OPPIO},\n )\n assert result[\"type\"] == RESULT_TYPE_FORM\n assert result[\"step_id\"] == \"oppio_confirm\"\n assert result[\"description_placeholders\"] == {\"addon\": \"Mock Addon\"}\n\n with patch(\n \"openpeerpower.components.deconz.async_setup_entry\",\n return_value=True,\n ) as mock_setup_entry:\n result = await opp.config_entries.flow.async_configure(\n result[\"flow_id\"], user_input={}\n )\n await opp.async_block_till_done()\n\n assert result[\"type\"] == RESULT_TYPE_CREATE_ENTRY\n assert result[\"result\"].data == {\n CONF_HOST: \"mock-deconz\",\n CONF_PORT: 80,\n CONF_API_KEY: API_KEY,\n }\n assert len(mock_setup_entry.mock_calls) == 1", "def __getattribute__(self, name):\n attr = super().__getattribute__(name)\n if name.startswith('test_') and asyncio.iscoroutinefunction(attr):\n return lambda: asyncio.run(self.async_test_wrapper(attr))\n else:\n return attr", "def __next__(self) -> Awaitable:\n return self.read()", "def Next():\n return CheckForError(lib.Generators_Get_Next())", "def awaitable(obj):\n yield from asyncio.sleep(0)\n return obj", "async def long_running_task(interface, generator, cleanup):\n try:\n try:\n while True:\n delay = next(generator)\n if delay:\n await asyncio.sleep(delay)\n except StopIteration as e:\n result = e.value\n except Exception as e:\n print(\"Error in long running handler:\", e, file=sys.stderr)\n traceback.print_exc()\n else:\n if cleanup:\n try:\n cleanup(interface, result)\n except Exception as e:\n print(\"Error in long running handler cleanup:\", e, file=sys.stderr)\n traceback.print_exc()", "async def my_job_async_gen(yieldsteps, *, mustfail):\n\n for i in range(yieldsteps):\n progress = {\n 'message': 'step %s or %s' % (i + 1, yieldsteps),\n 'payload': dict({'step': i + 1, 'total': yieldsteps}),\n 'readiness': (i + 1) / yieldsteps,\n }\n yield progress\n\n if mustfail:\n raise RuntimeError('Job failed, as requested!')", "async def async_comprehension() -> List[float]:\n return [i async for i in async_generator()]", "async def getrandom_number() :\n\n # run an infinite loop to continue generating random numbers\n while True: \n await asyncio.sleep(2) # let this task sleep for a while\n yield random.randint(0, sys.maxsize) # yield a random int", "async def test_verisure_errors(\n hass: HomeAssistant,\n mock_setup_entry: AsyncMock,\n mock_verisure_config_flow: MagicMock,\n side_effect: Exception,\n error: str,\n) -> None:\n result = await hass.config_entries.flow.async_init(\n DOMAIN, context={\"source\": config_entries.SOURCE_USER}\n )\n\n mock_verisure_config_flow.login.side_effect = side_effect\n result2 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n {\n \"email\": \"verisure_my_pages@example.com\",\n \"password\": \"SuperS3cr3t!\",\n },\n )\n await hass.async_block_till_done()\n\n assert result2.get(\"type\") == FlowResultType.FORM\n assert result2.get(\"step_id\") == \"user\"\n assert result2.get(\"errors\") == {\"base\": error}\n\n mock_verisure_config_flow.login.side_effect = VerisureLoginError(\n \"Multifactor authentication enabled, disable or create MFA cookie\"\n )\n mock_verisure_config_flow.request_mfa.side_effect = side_effect\n\n result3 = await hass.config_entries.flow.async_configure(\n result2[\"flow_id\"],\n {\n \"email\": \"verisure_my_pages@example.com\",\n \"password\": \"SuperS3cr3t!\",\n },\n )\n await hass.async_block_till_done()\n\n mock_verisure_config_flow.request_mfa.side_effect = None\n\n assert result3.get(\"type\") == FlowResultType.FORM\n assert result3.get(\"step_id\") == \"user\"\n assert result3.get(\"errors\") == {\"base\": \"unknown_mfa\"}\n\n result4 = await hass.config_entries.flow.async_configure(\n result3[\"flow_id\"],\n {\n \"email\": \"verisure_my_pages@example.com\",\n \"password\": \"SuperS3cr3t!\",\n },\n )\n await hass.async_block_till_done()\n\n assert result4.get(\"type\") == FlowResultType.FORM\n assert result4.get(\"step_id\") == \"mfa\"\n\n mock_verisure_config_flow.validate_mfa.side_effect = side_effect\n\n result5 = await hass.config_entries.flow.async_configure(\n result4[\"flow_id\"],\n {\n \"code\": \"123456\",\n },\n )\n assert result5.get(\"type\") == FlowResultType.FORM\n assert result5.get(\"step_id\") == \"mfa\"\n assert result5.get(\"errors\") == {\"base\": error}\n\n mock_verisure_config_flow.get_installations.return_value = {\n k1: {k2: {k3: [v3[0]] for k3, v3 in v2.items()} for k2, v2 in v1.items()}\n for k1, v1 in mock_verisure_config_flow.get_installations.return_value.items()\n }\n mock_verisure_config_flow.validate_mfa.side_effect = None\n mock_verisure_config_flow.login.side_effect = None\n\n result6 = await hass.config_entries.flow.async_configure(\n result5[\"flow_id\"],\n {\n \"code\": \"654321\",\n },\n )\n await hass.async_block_till_done()\n\n assert result6.get(\"type\") == FlowResultType.CREATE_ENTRY\n assert result6.get(\"title\") == \"ascending (12345th street)\"\n assert result6.get(\"data\") == {\n CONF_GIID: \"12345\",\n CONF_EMAIL: \"verisure_my_pages@example.com\",\n CONF_PASSWORD: \"SuperS3cr3t!\",\n }\n\n assert len(mock_verisure_config_flow.login.mock_calls) == 3\n assert len(mock_verisure_config_flow.request_mfa.mock_calls) == 2\n assert len(mock_verisure_config_flow.validate_mfa.mock_calls) == 2\n assert len(mock_setup_entry.mock_calls) == 1", "async def test_full_user_flow_single_installation(\n hass: HomeAssistant,\n mock_setup_entry: AsyncMock,\n mock_verisure_config_flow: MagicMock,\n) -> None:\n result = await hass.config_entries.flow.async_init(\n DOMAIN, context={\"source\": config_entries.SOURCE_USER}\n )\n assert result.get(\"step_id\") == \"user\"\n assert result.get(\"type\") == FlowResultType.FORM\n assert result.get(\"errors\") == {}\n\n mock_verisure_config_flow.get_installations.return_value = {\n k1: {k2: {k3: [v3[0]] for k3, v3 in v2.items()} for k2, v2 in v1.items()}\n for k1, v1 in mock_verisure_config_flow.get_installations.return_value.items()\n }\n\n result2 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n {\n \"email\": \"verisure_my_pages@example.com\",\n \"password\": \"SuperS3cr3t!\",\n },\n )\n await hass.async_block_till_done()\n\n assert result2.get(\"type\") == FlowResultType.CREATE_ENTRY\n assert result2.get(\"title\") == \"ascending (12345th street)\"\n assert result2.get(\"data\") == {\n CONF_GIID: \"12345\",\n CONF_EMAIL: \"verisure_my_pages@example.com\",\n CONF_PASSWORD: \"SuperS3cr3t!\",\n }\n\n assert len(mock_verisure_config_flow.login.mock_calls) == 1\n assert len(mock_setup_entry.mock_calls) == 1", "def setUp(self):\n self.loop = asyncio.new_event_loop()\n asyncio.set_event_loop(self.loop)", "def setUp(self):\n self.loop = asyncio.new_event_loop()\n asyncio.set_event_loop(self.loop)", "def setUp(self):\n self.loop = asyncio.new_event_loop()\n asyncio.set_event_loop(self.loop)", "def setUp(self):\n self.loop = asyncio.new_event_loop()\n asyncio.set_event_loop(self.loop)", "async def test_async_handler(dm):\n assert not dm.called_async_handler\n request = create_request(\"domain\", \"async\")\n response = create_responder(request)\n result = await dm.apply_handler(request, response)\n assert dm.called_async_handler\n assert result.dialogue_state == \"async_handler\"\n assert len(result.directives) == 1\n assert result.directives[0][\"name\"] == \"reply\"\n assert result.directives[0][\"payload\"] == {\"text\": \"this is the async handler\"}", "async def test_job_async_gen_failed(my_job_async_gen):\n\n # Set up callback to get notifications when job state changes.\n job = None\n\n def on_job_update(_job):\n \"\"\"The callback to update `job`.\"\"\"\n nonlocal job\n job = _job\n\n my_job_async_gen.set_on_update(on_job_update)\n\n # Submit a job which must fail.\n await my_job_async_gen.job(yieldsteps=1, mustfail=True)\n\n # Process ASGI messages and wait for the job to finish.\n await my_job_async_gen.process_jobs()\n\n # Check job state when job is done.\n assert job.state == 'ERROR', f'Failed job has wrong state `{job.state}`!'", "def wrap_generator(func):\n\n async def _wrapped(*a, **k):\n r, ret = None, []\n gen = func(*a, **k)\n while True:\n try:\n item = gen.send(r)\n except StopIteration:\n break\n if inspect.isawaitable(item):\n r = await item\n else:\n r = item\n ret.append(r)\n\n if len(ret) == 1:\n return ret.pop()\n return ret\n\n return _wrapped", "def generator_checker_py2(gen, gen_type, bound_Generic, bound_typevars,\n bound_typevars_readonly, follow_fwd_refs, _recursion_check):\n initialized = False\n sn = None\n while True:\n a = gen.send(sn)\n if initialized or not a is None:\n if not gen_type.__args__[0] is Any and \\\n not _isinstance(a, gen_type.__args__[0], bound_Generic, bound_typevars,\n bound_typevars_readonly, follow_fwd_refs, _recursion_check):\n tpa = deep_type(a)\n msg = _make_generator_error_message(tpa, gen, gen_type.__args__[0],\n 'has incompatible yield type')\n _raise_typecheck_error(msg, True, a, tpa, gen_type.__args__[0])\n initialized = True\n sn = yield a\n if not gen_type.__args__[1] is Any and \\\n not _isinstance(sn, gen_type.__args__[1], bound_Generic, bound_typevars,\n bound_typevars_readonly, follow_fwd_refs, _recursion_check):\n tpsn = deep_type(sn)\n msg = _make_generator_error_message(tpsn, gen, gen_type.__args__[1],\n 'has incompatible send type')\n _raise_typecheck_error(msg, False, sn, tpsn, gen_type.__args__[1])", "async def test_job_gen_done(my_job_gen):\n\n # Set up callback to get notifications when job state changes.\n job = None\n job_update_counter = 0\n\n def on_job_update(_job):\n \"\"\"The callback updates `job` and `job_update_counter`.\"\"\"\n nonlocal job, job_update_counter\n job = _job\n job_update_counter += 1\n # Make sure job state tells the job is cancelable when job is in\n # `PENDING`, `WORKING`, and `CANCELING` state and is not\n # cancelable in other states.\n if job.state in ['PENDING', 'WORKING', 'CANCELING']:\n assert job.is_cancelable, ('Job is not cancelable when it '\n 'must be cancelable!')\n else:\n assert not job.is_cancelable, ('Job is cancelable when it '\n 'must not be cancelable!')\n\n my_job_gen.set_on_update(on_job_update)\n\n # Submit a job which must finish OK.\n new_job = await my_job_gen.job(yieldsteps=1, mustfail=False)\n\n # Check that job is cancelable.\n assert new_job.is_cancelable, ('Job instance states that cancelable job'\n 'is not cancelable!')\n\n # Check job state right after job is submitted.\n assert job.state == 'PENDING', ('Submitted job has wrong state '\n f'{job.state}!')\n\n # Process ASGI messages and wait for the job to finish.\n await my_job_gen.process_jobs()\n\n # Check job state when job is done.\n assert job.state == 'DONE', f'Finished job has wrong state `{job.state}`!'\n\n # Check that job update callback has been called four times:\n # 1. job is submitted\n # 2. job switches to the working state\n # 3. job reports the progress\n # 4. job finishes\n assert job_update_counter == 4, 'Job updated wrong number of times!'", "def on_operation(\n self,\n ) -> AsyncIteratorOrIterator[None]: # pragma: no cover # pyright: ignore\n yield None", "async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:\n if DOMAIN not in config:\n return True\n\n hass.async_create_task(\n hass.config_entries.flow.async_init(\n DOMAIN,\n context={\"source\": config_entries.SOURCE_IMPORT},\n data=config[DOMAIN],\n )\n )\n async_create_issue(\n hass,\n HOMEASSISTANT_DOMAIN,\n f\"deprecated_yaml_{DOMAIN}\",\n breaks_in_ha_version=\"2024.2.0\",\n is_fixable=False,\n issue_domain=DOMAIN,\n severity=IssueSeverity.WARNING,\n translation_key=\"deprecated_yaml\",\n translation_placeholders={\n \"domain\": DOMAIN,\n \"integration_title\": \"Sure Petcare\",\n },\n )\n return True", "async def assert_await(source, values, exception=None):\n exception_type = type(exception) if exception else ()\n try:\n result = await source\n except StreamEmpty:\n assert values == []\n assert exception is None\n except exception_type as exc:\n assert compare_exceptions(exc, exception)\n else:\n assert result == values[-1]\n assert exception is None", "async def test_async_auto():\n\n # async_auto does not need the device to be turned on\n with patch(f\"{WinixDriver_TypeName}.auto\") as auto:\n wrapper = build_mock_wrapper()\n\n await wrapper.async_auto()\n assert auto.call_count == 1\n\n assert wrapper.is_auto\n assert not wrapper.is_manual\n assert not wrapper.is_plasma_on # unchanged\n assert not wrapper.is_sleep\n assert wrapper.get_state().get(ATTR_AIRFLOW) == AIRFLOW_LOW\n\n await wrapper.async_auto() # Calling again should not do anything\n assert auto.call_count == 1", "def test_another_loop():\n class Handler(RequestHandler):\n def get(self):\n self.write('Hello')\n\n app = Application([url('/hello', Handler)])\n\n tester1 = Tester(app)\n with tester1:\n response = yield tester1.http_client.fetch(\n tester1.url_for('/hello'))\n assert 'Hello' == text_body(response)\n\n tester2 = Tester(app)\n with tester2:\n with pytest.raises(RuntimeError):\n yield tester2.http_client.fetch(\n tester2.url_for('/hello'))", "async def no_sleep_coro():\n pass", "async def test_streaming_audio_oserror(\n hass: HomeAssistant, init_wyoming_wake_word\n) -> None:\n entity = wake_word.async_get_wake_word_detection_entity(\n hass, \"wake_word.test_wake_word\"\n )\n assert entity is not None\n\n async def audio_stream():\n yield b\"chunk1\", 1000\n\n mock_client = MockAsyncTcpClient(\n [Detection(name=\"Test Model\", timestamp=1000).event()]\n )\n\n with patch(\n \"homeassistant.components.wyoming.wake_word.AsyncTcpClient\",\n mock_client,\n ), patch.object(mock_client, \"read_event\", side_effect=OSError(\"Boom!\")):\n result = await entity.async_process_audio_stream(audio_stream())\n\n assert result is None", "def to_asyncio_future(tornado_future):\n ...", "def test_generator_without_iterable_len(self):\n with self.assertRaises(ValueError):\n next(chunk_tasks(iter([]), n_splits=1))", "async def main():\n\n # provide greetings for the program\n # print out program heading (using multi-line statement)\n programHeading = \"PROGRAM BEGINS BELOW (Python Version {})\".format(\\\n sys.version[0:sys.\\\n version.index(\" \")])\n print(programHeading)\n print('=' * len(programHeading))\n programHeading = \"PYTHON ASYNC GENERATOR EVENT-DRIVEN PROGRAMMING\"\n print(programHeading)\n print('=' * len(programHeading))\n # end of greetings\n \n # use an async for-loop to get the generated random int\n async for number in getrandom_number():\n # print out the returned number\n print(\"Random Number: {}\".format(number))" ]
[ "0.61319363", "0.59815097", "0.5970306", "0.5963432", "0.59373856", "0.58596766", "0.58034384", "0.5793992", "0.5694973", "0.56823844", "0.5653525", "0.5652958", "0.56505305", "0.5636267", "0.55687475", "0.5558097", "0.54776096", "0.5461566", "0.54609054", "0.5459655", "0.54506326", "0.54371154", "0.54043555", "0.5396213", "0.5342106", "0.53374624", "0.5333825", "0.5327383", "0.53271013", "0.5325856", "0.53225374", "0.5287759", "0.52801925", "0.5255401", "0.5251418", "0.52383506", "0.522612", "0.5222024", "0.5216242", "0.5208702", "0.51990503", "0.5185735", "0.5185049", "0.51801395", "0.5169733", "0.5165952", "0.51497877", "0.5148263", "0.5144793", "0.51326674", "0.51310563", "0.513002", "0.51118046", "0.5102958", "0.5100402", "0.5090775", "0.5090469", "0.5079873", "0.5074338", "0.5072444", "0.5071218", "0.50553465", "0.5054435", "0.5030293", "0.5029063", "0.5028045", "0.50157654", "0.50095767", "0.50065446", "0.5003959", "0.4995869", "0.49893096", "0.49831888", "0.49777228", "0.4976371", "0.49693754", "0.49637643", "0.4958732", "0.49536192", "0.49528447", "0.49511176", "0.49457535", "0.49436584", "0.49436584", "0.49436584", "0.49436584", "0.49327198", "0.49103394", "0.4908682", "0.49000746", "0.48998752", "0.48923156", "0.48858374", "0.48763052", "0.48720035", "0.486403", "0.4861865", "0.48597804", "0.48592633", "0.48560128", "0.4840864" ]
0.0
-1
Returns pandas dataframe which has latest record for each manual id after merging all "sheet_name" in the previously indexed_files which are present in "indexed_files_dir"
def zeta0_creation(self, indexed_files_dir, merge_columns): indexed_files = [file for file in os.listdir(indexed_files_dir) if not file.startswith("~")] indexed_files_dict = {} indexed_files_dict.clear() dateList = [] del dateList[:] for file in indexed_files: dated = file.split('_')[-1].split('.')[0] dated = dated[4:] + dated[:4] dateList.append(dated) indexed_files_dict[dated] = file dataframes = {} for dated, file in indexed_files_dict.items(): file_name = indexed_files_dir + '\\' + file dataframes[dated] = pd.read_excel(file_name, sheet_name=0) dataframes[dated]['file_date'] = dated dataframes[dated]['mid'] = [int(elem.split('_')[-1]) for elem in dataframes[dated]['manual_id']] merged_df = pd.concat([dataframes[dated] for dated in dateList], ignore_index=True) merged_df = merged_df.sort_values('file_date', ascending=False) zeta0 = merged_df.drop_duplicates(subset='manual_id', keep='first') pd.set_option('mode.chained_assignment', None) for col in zeta0.columns: zeta0[col] = zeta0[col].astype('str') zeta0 = zeta0.apply(lambda x: x.str.strip() if x.dtype == "object" else x) zeta0 = zeta0.sort_values('mid', ascending=True) if "manual_id" not in merge_columns: merge_columns.append("manual_id") zeta0 = zeta0[merge_columns] # print(zeta0) return zeta0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def merge_walkupseq_files(latest_tsca_id):\n paths = glob.glob('walkupseq_files/*sample_info*')\n\n dfs = []\n for f in paths:\n tmp = pd.read_table(f, encoding='latin1')\n dfs.append(tmp)\n\n df = pd.concat(dfs, axis=0)\n df.to_csv('walkupseq_files/walkupseq_all_combined_%s.txt'%latest_tsca_id, sep=\"\\t\", index=None)\n return df", "def master_idx_by_date(self, exptdate, timelapse=False):\n path = self.paths_dict[exptdate]\n datadir = os.path.join(os.path.dirname(path), 'data')\n os.path.exists(datadir)\n if not timelapse:\n fns = os.listdir(datadir)\n else:\n dirs = os.listdir(datadir)\n # Create a master idx dataframe based on the files found\n # in this experiments datadir\n strains = []\n filepaths = []\n\n for fn in fns:\n print(fn)\n if fn[-4:] == '.fcs':\n match = re.search(constants.patterns.strain_name, fn)\n if match:\n strains.append(match.group())\n filepath = os.path.join(datadir, fn)\n filepaths.append(filepath)\n\n df = pd.DataFrame({'strain': strains,\n 'filepath': filepaths})\n # Add clone indices to the dataframe\n for strain in df.strain.unique():\n\n n_clones = len(df[df.strain == strain])\n df.loc[df.strain == strain, 'clone'] = [int(idx) for idx in range(1, n_clones+1, 1)]\n\n # Lookup each strain in constants.strains_dir/Strains_Database.csv\n # and add information found in the database\n strains_df = pd.read_csv(os.path.join(constants.strains_dir, 'Strains_Database.csv'))\n\n for idx in df.index:\n strain_name = df.loc[idx, 'strain']\n if strain_name in strains_df.name.values:\n for col in strains_df.columns:\n df.loc[idx, col] = strains_df.loc[strains_df.name == strain_name, col].values[0]\n \n return df", "def get_data(self, df, latest_currency):\n file_paths = list(df[\"File\"])\n df = self.extract_df(file_paths[0])\n df = self.group_df(df)\n df = self.fill_league_currency(df, latest_currency)\n for file_path in file_paths[1:]:\n league = self.extract_df(file_path)\n league_grp = self.group_df(league)\n league_grp = self.fill_league_currency(league_grp, latest_currency)\n df = df.join(league_grp)\n df = df.reset_index(drop=True)\n return df", "def get_latest_league_data(self, df):\n max_date = pd.to_datetime(df[\"Date\"]).max()\n df = df[df[\"Date\"] == max_date]\n [latest_league_file_dir] = df[\"File\"].values\n df = self.extract_df(latest_league_file_dir)\n return df", "def get_dataframe(self):\n for i, study_id in enumerate(self.studies_to_combine):\n copy = repr(self.original_study_location).strip(\"'\")\n study_location = copy.replace(\"MTBLS1\", study_id)\n\n for maf in self.sort_mafs(study_location, study_id):\n maf_temp = None\n try:\n maf_temp = pandas.read_csv(os.path.join(study_location, maf), sep=\"\\t\", header=0, encoding='unicode_escape')\n except pandas.errors.EmptyDataError as e:\n logger.error(f'EmptyDataError Issue with opening maf file {maf}: {str(e)}')\n self.unopenable_maf_register.append(maf)\n continue\n except Exception as e:\n logger.error(f'Issue with opening maf file {maf}, cause of error unclear: {str(e)}')\n self.unopenable_maf_register.append(maf)\n continue\n\n cleanup_function = getattr(DataFrameUtils, f'{self.method}_maf_cleanup')\n maf_temp = cleanup_function(maf_temp, study_id, maf)\n maf_as_dict = totuples(df=maf_temp, text='dict')['dict']\n\n yield maf_as_dict", "def generate_postprocessed_files():\n get_excel_file = pd.ExcelFile('global_output.xlsx')\n get_sheet_names = get_excel_file.sheet_names\n\n writer = pd.ExcelWriter('master_ouput.xlsx', engine='xlsxwriter')\n for sheet in get_sheet_names:\n try:\n all_data = pd.DataFrame()\n sheetID = str(sheet)\n data = pd.read_excel('global_output.xlsx', sheet, dtype={'id': str})\n grouped_data = data.groupby(['Total Producers', 'Correct Producers Ratio', 'Collected Updates Ratio',\n 'Collected Votes Ratio', 'Collected Final Votes Ratio'], as_index=False)[\n 'Total Correct Ln(prod)',\n 'runs', 'Total Correct Ln(vote)',\n 'Runs With All Ln(prod)',\n 'Runs With All Ln(vote)',\n 'Runs With > 50% Correct', 'Runs With = Cn'].sum()\n\n grouped_data['num_correct_producers_Ln_prod'] = grouped_data['Total Correct Ln(prod)'] / grouped_data[\n 'runs']\n grouped_data['num_correct_producers_Ln_vote'] = grouped_data['Total Correct Ln(vote)'] / grouped_data[\n 'runs']\n grouped_data['percentage_for_50_%'] = (grouped_data['Runs With > 50% Correct'] / grouped_data['runs']) * 100\n grouped_data['Percentage Runs With = Cn'] = (grouped_data['Runs With = Cn'] / grouped_data['runs']) * 100\n\n all_data = all_data.append(grouped_data, ignore_index=True)\n\n all_data.to_excel(writer, sheet_name=sheet)\n except KeyError:\n continue\n writer.save()\n print(\"Merged File\")", "def auto_search_write(self, auto_search_result_df, out_csv):\n self.logger.info('Starting auto search and write')\n all_result_ids = auto_search_result_df['RESULT_ID'].unique()\n\n # validation of df structure\n required_col = ['RESULT_ID', 'SERIES_ID', 'RESULT_SERIES_SEQ_ID', 'QUERY_MOL_ID', 'RESULT_MOL_ID',\n 'RESULT_CONTEXT_ID', 'QUERY_FRAG_ID', 'QUERY_MOL_ID', 'QUERY_CONTEXT_ID', 'RESULT_FRAG_ID',\n 'QUERY_ORDER', 'RESULT_MOL_ACTIVITY']\n\n for col in required_col:\n if col not in auto_search_result_df.columns:\n raise Exception(\"Input data table does not have required columns: %s\" % col)\n\n # catch for empty table\n if auto_search_result_df.shape[0] == 0:\n print (\"No results found\")\n return False\n\n iteration = 1\n return_df = None\n\n for result_id in all_result_ids:\n\n self.logger.info(\"Result, series ID %s from table size %s: \" % (result_id, auto_search_result_df.shape[0]))\n\n sub_series_df = auto_search_result_df[auto_search_result_df['RESULT_ID'] == result_id]\n\n # get the original query mol_id_list in it's original query order\n # it can be mis-ordered due to strict_order=False param on the search method\n mol_id_list = list(zip(sub_series_df['QUERY_MOL_ID'].tolist(), sub_series_df['QUERY_ORDER'].tolist()))\n mol_id_list = sorted(mol_id_list, key=lambda xx: xx[1])\n mol_id_list = [x[0] for x in mol_id_list if x[1] > 0]\n\n self.logger.debug('Merging results to CSV frame for iteration %s and dataframe %s' %\n (iteration, str(sub_series_df.shape)))\n\n if iteration == 1:\n return_df = self.return_scored_series_dataframe(mol_id_list, sub_series_df, return_df, append=False)\n self.logger.debug('First iteration, sized at %s' % str(return_df.shape))\n iteration += 1\n else:\n # as above but append=True\n return_df = self.return_scored_series_dataframe(mol_id_list, sub_series_df, return_df, append=True)\n self.logger.debug('Merge operation, sized at %s' % str(return_df.shape))\n iteration += 1\n\n # return_df = self.enumerate_products(return_df, 'QUERY_MOL_CONTEXT', 'NEW_FRAG_R')\n\n return_df.to_csv(out_csv, index=False, float_format='%.3f') # , header=True)\n self.logger.info('Completed write of auto_search results')", "def history_clones(file, ht_df):\n if os.path.isfile(file):\n # if the file exists, we merge\n print(file + ' found, merging')\n df_file = pd.read_csv(file)\n\n ht_df['timestamp'] = pd.to_datetime(ht_df['timestamp']).dt.date\n\n df_file = pd.concat([df_file, ht_df])\n df_file['timestamp'] = df_file['timestamp'].astype(str)\n\n df_file.sort_values('timestamp', inplace=True)\n print(df_file.to_string())\n # we can't just drop the first instance: for the first day, we'll loose data.\n # so keep max value per date\n\n #df_file.drop_duplicates(subset=['timestamp'], keep='last', inplace=True)\n df_file = df_file.groupby('timestamp')[['uniques', 'count']].agg(['max']).reset_index()\n\n df_file.columns = df_file.columns.droplevel(level=1)\n #print(df_file.to_string())\n #print(df_file.columns)\n df_file.to_csv(file, index=False)\n\n else:\n # otherwise, just dump the df\n print('There is no file to merge, dumping df to ' + file)\n ht_df.to_csv(file, index=False)", "def merge_all_data(self):\n\n logging.info('***** Starting the merging process merge_all_data')\n\n \"\"\" All possible unique_dates to loop on \"\"\"\n date_times = self.merged_unique_dates\n date_times.sort()\n date_times = np.array(date_times) \n\n \"\"\" List storing the indices of the date_index of the merged dataset \"\"\"\n all_combined_obs , all_combined_head, all_combined_era5fb , combined_indices , combined_date_time, = [] , [] , [] , [] , []\n best_ds_list = [] \n source_files = []\n station_configurations = []\n\n \"\"\" The items contained in the lists in the list below can be removed from the list when the record that was previously stored is removed. \"\"\"\n all_list = [all_combined_obs , all_combined_head, all_combined_era5fb , combined_indices , combined_date_time, best_ds_list, source_files , station_configurations ] # holder of all the above lists\n all_list_name = ['all_combined_obs' , 'all_combined_head', 'all_combined_era5fb' , 'combined_indices' , 'combined_date_time' , 'best_ds_list', 'source_files' ] \n \n removed_record, kept_record = [], []\n \n \"\"\" Dictionary that will contain the merged file. \"\"\" \n # rand = datetime.strptime('1981-01-03 12:00:00', '%Y-%m-%d %H:%M:%S') \n #dt_bestds_dic = {} # store the selected best dataset for each dt \n #date_times=date_times[0:30000]\n tot = len(date_times)\n tt=time.time()\n print('*** Merging ' , tot, ' records ***')\n \n early_datasets = True\n \n self.processed_dt = [] \n \n for dt, c in zip(date_times, range(tot) ): # loop over all the possible date_times \n\n if (c+1)%1000==0:\n print('Analize : ', str(c+1) , '/', str(tot) , ' ', dt , ' ',\n now(time.time()),'{:5.3f}'.format(time.time()-tt ))\n\n delete = self.delete_ds(dt) # check if there is a dataset to delete \n \n \"\"\" Finding if this record is the same as the previous one analyzed, according to the given time_shift \"\"\"\n if c == 0:\n is_same_record = False\n else:\n is_same_record = self.is_same_record( time_shift = self.hour_time_delta , dt = dt)\n \n \"\"\" Updating list of processed datetimes \"\"\"\n self.processed_dt.append(dt) # cannot put it before the check_timeshift or it will check itself \n\n \n cleaned_df_container = {} \n all_len = [] # will hold the length of all the obs_tabs \n \n for k in self.dataset_per_dt[dt].keys() : # checking the list of available datasets \n ''' {'era5_2': ['example_stations/0-20000-0-82930_era5_2_harvested_era5.conv._1:82930.gz.nc', \n 'example_stations/0-20000-0-82930_era5_2_harvested_era5.conv._82930.gz.nc']}\n ''' \n for F in self.dataset_per_dt[dt][k]: # checking the list of available files for the dataset\n \n if data[k][F][\"counter\"] %self.slice_size==0 or data[k][F][\"counter\"] == 0: # loading the data only at specific slices \n load = self.load_obstab_feedback_sliced(datetime=dt, dataset=k, file = F)\n \n data[k][F][\"counter\"] = data[k][F][\"counter\"] + 1 \n \n obs_tab, era5fb_tab = self.make_obstab_era5fb_dic(dataset = k , date_time = dt, File = F )\n\n if len(obs_tab['date_time'][:])==0: # go to next file if obs_tab is empty \n #print('ZERO length')\n continue \n\n all_len.append( len(obs_tab['date_time'][:] ) )\n \n if k not in cleaned_df_container.keys():\n cleaned_df_container[k] = {}\n\n cleaned_df_container[k][F] = {}\n cleaned_df_container[k][F]['obs_tab'] = obs_tab # cleaned dataframe \n cleaned_df_container[k][F]['era5fb_tab'] = era5fb_tab # cleaned dataframe \n \n \"\"\" Merging the different records found in the sifferent sources \"\"\"\n if bool(all_len): # skipping empty container dictionary. At this point I certainyl have one valid record \n best_ds, combined_obs_tab, combined_era5fb_tab, combined_head_tab, selected_file, best_file = self.combine_record(dt, container = cleaned_df_container)\n \n if is_same_record: # decide what to keep in case of same record\n temporary_previous = all_combined_obs[-1] # keep the temporary previous record \n\n if best_ds in ['era5_1','era5_2']: # best_ds from era5\n if best_ds_list[-1] not in ['era5_1','era5_2']: # remove previous non era5_1 or era5_2 record \n for lista in all_list:\n lista.pop() \n #removed_record.append(temporary_previous)\n #kept_record.append(combined_obs_tab) \n\n elif best_ds_list[-1] in ['era5_1','era5_2']:\n if len(combined_obs_tab) <= len(all_combined_obs[-1] ):\n #kept_record.append(temporary_previous) \n #removed_record.append(combined_obs_tab)\n continue # nothing to do, will keep the previous records -> go to next dt \n \n else: # case where both the current and previous are from era5_1 and era5_2, but the previous has smaller number of data \n for lista in all_list:\n lista.pop() \n #removed_record.append(temporary_previous)\n #kept_record.append(combined_obs_tab) \n \n else: # best_ds not from era5\n if best_ds_list[-1] in ['era5_1','era5_2']:\n #print('This best ds is ' , best_ds , ' but I will keep ' , best_ds_list[-1] )\n #kept_record.append(temporary_previous) \n #removed_record.append(combined_obs_tab) \n continue \n \n else:\n if len(combined_obs_tab) < len(all_combined_obs[-1] ):\n #kept_record.append(temporary_previous) \n #removed_record.append(combined_obs_tab) \n continue # nothing to do, will keep the previous records -> go to next dt \n \n elif len(combined_obs_tab) > len(all_combined_obs[-1] ): # remove previous, keep current \n for lista in all_list:\n lista.pop() \n #kept_record.append(combined_obs_tab) \n #removed_record.append(temporary_previous)\n \n elif len(combined_obs_tab) == len(all_combined_obs[-1] ): # prefer igra2, otherwise\n if best_ds == 'igra2':\n for lista in all_list:\n lista.pop() \n #removed_record.append(temporary_previous)\n #kept_record.append(combined_obs_tab) \n \n else: # case where data source is not important, I keep the previous and do nothing \n #kept_record.append(temporary_previous) \n #removed_record.append(combined_obs_tab) \n continue \n \n else: # not the same record, nothing special to do, keep both previous and current \n pass \n else:\n print(' Found an empty record / time shifted record ')\n continue\n \n\n \"\"\" Fill the best_ds list \"\"\"\n best_ds_list.append(best_ds)\n\n \"\"\" Storing the selected file for the source_configuration \"\"\"\n source_files.append(selected_file)\n \"\"\" Selecting the station_configuration \"\"\"\n station_configurations.append(self.data[best_ds][best_file]['station_configuration'] )\n \n \"\"\" Storing the combined era5fb, header and observations tables\"\"\"\n all_combined_era5fb.append(combined_era5fb_tab)\n all_combined_obs .append(combined_obs_tab)\n \n primary, name = self.data[best_ds][best_file]['station_configuration']['primary_id'][0] , self.data[best_ds][best_file]['station_configuration']['station_name'][0] \n #combined_head_tab['primary_station_id'] = [ primary ] * len( combined_head_tab ) \n #combined_head_tab['station_name'] = [ name ] * len( combined_head_tab ) \n \n combined_head_tab['primary_station_id'] = np.array( [primary] )\n combined_head_tab['station_name'] = np.array( [name] )\n \n all_combined_head .append(combined_head_tab)\n\n \"\"\" Dictionary to fill the best_ds for duplicates \"\"\"\n #dt_bestds_dic[dt] = {}\n #dt_bestds_dic[dt]['best_ds'] = best_ds\n #dt_bestds_dic[dt]['len'] = len(combined_obs_tab['date_time'])\n\n \"\"\" New merged recordindex and recordtimestamps indices \"\"\"\n combined_indices.append(len(combined_obs_tab['date_time'])) \n combined_date_time.append(dt)\n\n del cleaned_df_container \n \n \n \n #print(blue + 'Memory used after deleting the cleaned_df_container: ', process.memory_info().rss/1000000000 , cend)\n\n \"\"\" Removing remaining loaded df \"\"\"\n for k in self.datasets_keys:\n for F in self.datasets[k]:\n try:\n del data[k][F]['era5fb_tab']\n print('=== removed era5fb ' , k , F )\n except:\n pass\n try:\n del data[k][F]['observations_table']\n print('=== removed obstab ' , k , F ) \n except:\n pass\n \n \n \"\"\" Saving a numpy dictionary \"\"\"\n print(\" === Saving the numpy dictionary of removed and kept records +++ \")\n #dic_records = { 'kept' : kept_record , 'removed': removed_record }\n #np.save(self.station + '_time_shift_removed_kept.npy',dic_records )\n \n \n \"\"\" Storing the merged date_time values and indices \"\"\"\n di=xr.Dataset()\n combined_date_time = np.array(combined_date_time)\n di['recordtimestamp'] = ( {'recordtimestamp' : combined_date_time.shape } , combined_date_time )\n di['recordtimestamp'].attrs['units']='seconds since 1900-01-01 00:00:00'\n\n \"\"\" Creating the merged indices mi \"\"\"\n mi = [] \n mi.append(0)\n for i in range(len(combined_indices)):\n mi.append( combined_indices[i] + mi[-1] )\n mi.pop()\n pop = np.array(mi) # removing last unecessary index \n di['recordindex'] = ( {'recordindex' : pop.shape } , pop )\n\n\n \"\"\" Creating the combined data \"\"\"\n logging.debug('*** Concatenating the observations_table ' ) \n combined_obs = {}\n #### Writing combined observations_table dic\n logging.info(' ***** Writing the observations_table to the netCDF output ***** ' ) \n for k in all_combined_obs[0].keys(): \n a = np.concatenate([all_combined_obs[i][k][:] for i in range(len(all_combined_obs))])\n if k == 'date_time':\n combined_obs[k]= a \n self.tot_records = len(combined_obs[k])\n self.write_merged(content = 'observations_table', table= {k:a})\n #logging.info('*** Written observations table %s: ', k)\n\n\n #self.tot_records = len(combined_obs['date_time'])\n del all_combined_obs\n print(blue + 'Memory used after deleting all_combined_obs dic: ', process.memory_info().rss/1000000000 , cend )\n \n dateindex = combined_obs['date_time']//86400 \n date_times, indices, counts = np.unique(dateindex, return_counts = True, return_index= True) \n di['dateindex'] = ( {'dateindex' : indices.shape } , indices ) # considers the day only \n del combined_obs\n \n combined_era5fb = {}\n #### Writing combined era5fb_table dic \n for k in all_combined_era5fb[0].keys():\n try:\n #combined_era5fb[k]=np.concatenate([all_combined_era5fb[i][k][:] for i in range(len(all_combined_era5fb))])\n #self.write_merged(content = 'era5fb', table= {k:combined_era5fb[k]})\n \"\"\" try replacing , remove combined_era5fb = {} \"\"\"\n a = np.concatenate([all_combined_era5fb[i][k][:] for i in range(len(all_combined_era5fb))])\n self.write_merged(content = 'era5fb', table= {k:a})\n logging.debug('*** Written era5fb %s: ', k)\n except:\n print(\"FAILED feedback variable \" , k)\n\n del all_combined_era5fb\n print(blue + 'Memory used after deleting era5fb_tab dic: ', process.memory_info().rss/1000000000 , cend)\n\n\n #### Writing combined header_table dic \n for k in all_combined_head[0].keys():\n print('head variable is', k )\n if ( k == 'comments' or k == 'history'):\n continue\n try:\n tab=np.concatenate([all_combined_head[i][k][:] for i in range(len(all_combined_head))])\n self.write_merged(content = 'header_table', table= {k: tab}) # { key: np.array([])}\n logging.info('*** Written header table %s: ', k)\n except:\n print('FFF FAILED variable in header table', k )\n\n del all_combined_head\n print(blue + 'Memory used after deleting all_merged head_tab dic: ', process.memory_info().rss/1000000000 , cend)\n \n self.write_merged(content = 'recordindex', table = di) \n self.write_merged(content = 'cdm_tables', table= '')\n\n\n source_conf=xr.Dataset()\n source_files = np.array(source_files).astype(dtype='|S70')\n source_conf['source_file'] = ( {'source_file' : source_files.shape } , source_files )\n self.write_merged(content = 'source_configuration', table= source_conf )\n\n print(0)\n\n\n \"\"\" Concatenation of station_configurations \"\"\"\n station_conf = pd.concat( station_configurations ) \n for k in station_conf.columns:\n try:\n a =np.array( station_conf[k])\n self.write_merged(content = 'station_configuration', table= {k:a})\n logging.debug('*** Written station_configuration %s: ', k)\n except:\n print(\" Failed station_configuration \" , k )\n \n return 0", "def get_newest_df(watchfolder, optional_column_names=[], existing_df=None):\n from measurement_directory import run_ids_from_txt, run_ids_from_filenames\n import os\n bc = load_breadboard_client()\n run_ids = []\n files = [filename for filename in os.listdir(watchfolder)]\n files_spe = []\n for file in files:\n if '.spe' in file:\n files_spe.append(file)\n elif 'run_ids.txt' in file:\n run_ids += run_ids_from_txt(\n os.path.abspath(os.path.join(watchfolder, file)))\n if existing_df is None:\n run_ids += run_ids_from_filenames(files_spe)\n df = bc.get_runs_df_from_ids(\n run_ids, optional_column_names=optional_column_names)\n else:\n run_ids = list(set(run_ids_from_filenames(files_spe)).union(set(run_ids)).difference(\n set(list(existing_df['run_id']))))\n if len(run_ids) > 0:\n df = existing_df.append(bc.get_runs_df_from_ids(run_ids,\n optional_column_names=optional_column_names),\n sort=False,\n ignore_index=True)\n else:\n df = existing_df\n\n def custom_sort(df):\n # takes in df and returns same df with user-interaction columns first\n #['run_id','badshot','manual_foo1','manual_foo2', 'listboundvar1', etc.]\n cols = list(df.columns)\n manual_cols = []\n for col in cols:\n if 'manual' in col:\n manual_cols += [col]\n manual_cols = sorted(manual_cols)\n user_interact_cols = ['run_id'] + ['badshot'] + manual_cols\n for col in user_interact_cols:\n cols.remove(col)\n return df[user_interact_cols + cols]\n\n df = custom_sort(df)\n df.sort_values(by='run_id', ascending=False, inplace=True)\n return df", "def combined_df(self) -> pd.DataFrame:\n return pd.concat([self.data, self.latest_data.reset_index()], ignore_index=True)", "def merge_all_data(self):\n \n logging.info('***** Starting the merging process ')\n\n \n \"\"\" All possible unqiue_dates to loop on \"\"\"\n date_times = self.merged_unique_dates\n date_times.sort()\n \n date_times = np.array(date_times) \n \n \"\"\" List storing the indices of the date_index of the merged dataset \"\"\"\n all_merged_obs , all_merged_head, all_merged_fb , merged_indices , merged_date_time, mi= [] , [] , [] , [] , [], []\n \n \"\"\" Dictionary that will contain the merged file. \"\"\" \n # rand = datetime.strptime('1981-01-03 12:00:00', '%Y-%m-%d %H:%M:%S') \n #for dt in date_times[3008:3100]: # loop over all the possible date_times \n \n tot = len(date_times)\n for dt, c in zip(date_times[3008:3100], range(tot) ): # loop over all the possible date_times \n #print('Analize : ', str(c) , '/', str(tot) , ' ', dt , ' ', now(time.time()) )\n \n logging.info('Analize : %s %s /', str(c) , str(tot) )\n \n cleaned_df_container = {} \n chunk = ''\n \n for k in self.dataset_per_dt[dt] : # checking the list of available datasets \n \n index, index_up = self.unique_dates[k]['indices'][dt]['low'] , self.unique_dates[k]['indices'][dt]['up'] # extracting the exact chunk of the dataframe where the data of this are stored \n \n chunk = self.data[k]['dataframe'].iloc[index:index_up]\n \n chunk['date_time'] = dt\n chunk = self.clean_dataframe(chunk) # cleaning from wrong or nan values \n \n if len(chunk)==0:\n continue\n \n cleaned_df_container[k] = {} \n cleaned_df_container[k]['df'] = chunk # cleaned dataframe \n\n \n if all(value == 0 for value in cleaned_df_container.values()):\n logging.debug('No data were found! ')\n continue\n \n merged_observations_table, best_ds, duplicates, header = self.merge_record(dt, container = cleaned_df_container)\n \n merged_observations_table['source_id'] = best_ds # adding extra columns i.e. chosen dataset, other dataset with data, number of pressure levels \n merged_observations_table['z_coordinate_type'] = 1 # only pressure inn [Pa] available at the moment. Check z_coordinate_type table for the correpsonding code \n \n \n \"\"\" Extracting the merged feedback, flagging the advanced_observations_feedback flag = 1\"\"\"\n feedback, merged_obs = self.get_reanalysis_feedback( dt, merged_observations_table , reanalysis='era5fb', best_ds= best_ds)\n all_merged_fb.append(feedback) \n all_merged_obs.append(merged_obs)\n \n \"\"\" Setting the correct report_id in the header table \"\"\"\n merged_report_id = merged_obs['report_id'].values[0] # same report_id as calculated in the observation_table \n header['report_id'] = merged_report_id \n all_merged_head.append(header)\n \n #if len(merged_observations_table) != len(header): \n #print('lengths check best ds: ', best_ds , ' obs_merged: ' , len(merged_observations_table), ' feedback:' , len(feedback) , ' header: ' , len(header) )\n #print( len(merged_observations_table), ' ' , len(feedback) )\n\n \"\"\" New merged recordindex and recordtimestamps indices \"\"\"\n merged_indices.append(len(merged_observations_table)) \n merged_date_time.append(dt)\n\n\n \"\"\" Storing the merged date_time values and indices \"\"\"\n di=xr.Dataset()\n merged_date_time = np.array(merged_date_time)\n di['recordtimestamp'] = ( {'recordtimestamp' : merged_date_time.shape } , merged_date_time )\n \n \n \"\"\" Creating the merged indices \"\"\"\n mi.append(0)\n for i,ind in zip(merged_indices[0:], range(len(merged_indices[0:]) ) ) :\n mi.append(mi[ind] + i )\n mi = np.array(mi) \n di['recordindex'] = ( {'recordindex' : mi.shape } , mi )\n self.MergedRecordIndex = di \n \n \n \"\"\" Creating the merged dataframes \"\"\"\n logging.debug('*** Concatenating the observations_table dataframes' ) \n merged_obs = pd.concat (all_merged_obs)\n \n self.MergedObs = merged_obs \n logging.debug('*** Finished concatenating theobservations_table dataframes' ) \n \n logging.debug('*** Concatenating the header_table dataframes' ) \n merged_hd = pd.concat (all_merged_head)\n self.MergedHead = merged_hd \n logging.debug('*** Finished concatenating the header_table dataframes' ) \n \n logging.debug('*** Concatenating the feedback dataframes' ) \n merged_fb = pd.concat (all_merged_fb)\n self.MergedFeedback = merged_fb \n logging.debug('*** Finished concatenating the feedback dataframes' ) \n\n return 0", "def main():\n data_dir = \".\\\\excel\\\\data\\\\\"\n archive_dir = \".\\\\excel\\\\archive\\\\\"\n xl_list = glob.glob(data_dir + \"*.xlsx\")\n\n try:\n for xl_file in xl_list:\n workbook = pd.ExcelFile(xl_file)\n\n if fnmatch.fnmatch(xl_file.lower(), \"*base*.xlsx\") == True:\n print(f\"Creating DataFrame for '{xl_file}'...\")\n \n df_base = workbook.parse(0, skiprows=1, header=None)\n df_base.columns = [\"dept\", \n \"category\", \n \"itemDesc\", \n \"itemCode\", \n \"itemSize\", \n \"pvtLblFlag\", \n \"buyerCode\", \n \"invUnitShipped\", \n \"invCaseShipped\", \n \"storeOrdProdQty\", \n \"shortedQty\", \n \"grossSvcLvl\", \n \"netSvcLvl\"]\n df_base[\"itemCode\"] = df_base[\"itemCode\"].map('{:0>6}'.format)\n df_base[\"buyerCode\"] = df_base[\"buyerCode\"] * 10\n df_base[\"itemDesc\"] = df_base[\"itemDesc\"] + \" \" + df_base[\"itemSize\"]\n \n print(f\"'{xl_file}' Successfully processed\\n\") \n elif fnmatch.fnmatch(xl_file.lower(), \"*short*.xlsx\") == True:\n print(f\"Creating DataFrame for '{xl_file}'...\")\n \n df_shorts = workbook.parse(0, skiprows=1, header=None)\n df_shorts.columns = [\"itemDesc\", \n \"itemCode\", \n \"yesterdayOOS\"]\n df_shorts[\"itemCode\"] = df_shorts[\"itemCode\"].map('{:0>6}'.format)\n df_shorts.drop(columns=[\"itemDesc\"], inplace=True)\n \n print(f\"'{xl_file}' Successfully processed\\n\") \n elif fnmatch.fnmatch(xl_file.lower(), \"*reason*.xlsx\") == True:\n print(f\"Creating DataFrame for '{xl_file}'...\")\n \n df_reason = workbook.parse(0, skiprows=2, header=None)\n df_reason.columns = [\"dept\", \n \"category\", \n \"itemDesc\", \n \"itemCode\", \n \"outOfStock\", \n \"manufacIssue\",\n \"disc\",\n \"other\",\n \"newItemIssue\"]\n df_reason[\"itemCode\"] = df_reason[\"itemCode\"].map('{:0>6}'.format)\n df_reason[\"max\"] = df_reason[[df_reason.columns[4], \n df_reason.columns[5], \n df_reason.columns[6], \n df_reason.columns[7], \n df_reason.columns[8]]].max(axis=1)\n df_reason.loc[df_reason[\"max\"] == df_reason[\"outOfStock\"], \"primaryReason\"] = \"Out Of Stock\"\n df_reason.loc[df_reason[\"max\"] == df_reason[\"manufacIssue\"], \"primaryReason\"] = \"Manufacturer Issue\"\n df_reason.loc[df_reason[\"max\"] == df_reason[\"disc\"], \"primaryReason\"] = \"Discontinued\"\n df_reason.loc[df_reason[\"max\"] == df_reason[\"other\"], \"primaryReason\"] = \"Other\"\n df_reason.loc[df_reason[\"max\"] == df_reason[\"newItemIssue\"], \"primaryReason\"] = \"New Item Issue\"\n df_reason.sort_values(by=[\"max\"], ascending=False, inplace=True)\n df_reason.drop(columns=[\"dept\", \n \"category\", \n \"itemDesc\", \n \"outOfStock\", \n \"manufacIssue\", \n \"disc\", \n \"other\", \n \"newItemIssue\", \n \"max\"], inplace=True)\n \n print(f\"'{xl_file}' Successfully processed\\n\") \n elif fnmatch.fnmatch(xl_file.lower(), \"*export*.xlsx\") == True:\n print(f\"Creating DataFrame for '{xl_file}'...\")\n \n to_drop = [\"14:HATFIELD NORTH\", \"1:BRATTLEBORO\"]\n \n df_cs = workbook.parse(0, skiprows=3, skipfooter=20, header=None)\n df_cs = df_cs[~df_cs[7].isin(to_drop)]\n df_cs = df_cs.filter([0, 14, 15, 17, 34])\n df_cs.columns = [\"custCode\", \n \"poDueDate\", \n \"poApptDate\", \n \"inStock\", \n \"daysOOS\"]\n df_cs[\"itemCode\"] = df_cs[\"custCode\"].astype(str).str[9:15]\n df_cs.drop(columns=[\"custCode\"], inplace=True)\n df_cs.drop_duplicates(inplace=True)\n\n print(f\"'{xl_file}' Successfully processed\\n\")\n\n for data_file in os.listdir(data_dir):\n if fnmatch.fnmatch(data_file, \"*.xlsx\") == True:\n print(f\"Deleting '{data_file}'...\\n\")\n os.remove(data_dir + data_file)\n\n df_join_1 = df_base.merge(df_reason, how=\"left\", on=\"itemCode\")\n df_join_2 = df_join_1.merge(df_shorts, how=\"left\", on=\"itemCode\")\n df_join_3 = df_join_2.merge(df_cs, how=\"left\", on=\"itemCode\")\n \n print(\"Exporting to Excel...\\n\")\n df_join_3.to_excel(f\".\\\\excel\\\\archive\\\\oos-data-{timestamp()}.xlsx\", index=False)\n\n sys.exit(0)\n except:\n try:\n df_join_1 = df_base.merge(df_reason, how=\"left\", on=\"itemCode\")\n df_join_2 = df_join_1.merge(df_shorts, how=\"left\", on=\"itemCode\")\n\n df_join_2[\"poDueDate\"] = \"NO CS DATA\"\n df_join_2[\"poApptDate\"] = \"NO CS DATA\"\n df_join_2[\"inStock\"] = \"NO CS DATA\"\n df_join_2[\"daysOOS\"] = \"NO CS DATA\"\n \n print(\"Exporting to Excel...\\n\")\n df_join_2.to_excel(f\".\\\\excel\\\\archive\\\\oos-data-{timestamp()}.xlsx\", index=False)\n except:\n if not os.path.exists(archive_dir):\n os.makedirs(archive_dir)\n if not os.path.exists(data_dir):\n os.makedirs(data_dir)\n\n sys.exit(1)", "def aggregate_results(output_files, agg_filename):\n\n print(file_marker + \"STARTING AGGREGATION\")\n feather_files = output_files\n\n results = []\n for i in range(len(feather_files)):\n print(file_marker + str(i))\n x = pd.read_feather(feather_files[i])\n results.append(x)\n \n overall_results = pd.concat(results, ignore_index=True, sort=False)\n opt_diff_results = overall_results\n\n opt_diff_results.reset_index(inplace=True, drop=True) \n # drop=True: column 'index' gets removed\n\n opt_diff_results.to_feather(agg_filename)\n print(file_marker + \"Aggregated results saved to: \" + agg_filename)", "def get_outdoor_data(temp_dir,site):\n if site == 'berk':\n files_od = glob(join(temp_dir,'outdoor','20*.xlsx'))\n elif site == 'bus':\n files_od = glob(join(temp_dir,'outdoor','Busara*.csv'))\n else:\n raise NameError(site)\n\n dfs = []\n for f in files_od:\n if site == 'berk':\n this_df = pd.read_excel(f,sheet_name=0,usecols='B:D',index_col=0,parse_dates=True, header=1)\n elif site == 'bus':\n this_df = pd.read_csv(f,usecols=[0,1,2],index_col=0,parse_dates=True,header=2)\n \n # drop missing values that prevented conversion to float type\n if this_df.iloc[:,0].dtype != np.float64:\n this_df = this_df[this_df.iloc[:,0] != ' ']\n this_df = this_df.astype(np.float64)\n\n # correct for weird timezones in berkeley datalogger\n this_df = correct_tz(this_df,site)\n \n this_df.columns = ['T','RH']\n this_df.index.name = 'time'\n\n # convert to celsius\n this_df['T'] = (this_df['T'] - 32) * 5/9\n dfs.append(this_df)\n \n df_od = pd.concat(dfs)\n\n # drop duplicated measurements\n df_od = df_od[~df_od.index.duplicated(keep='last')].sort_index()\n \n # separate out into daily min,mean,max\n groups = df_od.groupby(df_od.index.date)\n dfs_od = {'all':df_od,\n 'min': groups.min(),\n 'mean': groups.mean(),\n 'max': groups.max()}\n \n for i in ['min','mean','max']:\n # remove first and last day to ignore days where we did not get full recording\n dfs_od[i] = dfs_od[i].iloc[1:-1,:]\n \n # name index so that we can merge onto multiIndex'd dataframe\n dfs_od[i].index.name = 'date'\n \n return dfs_od", "def combine_excel_files(end_producer, step_producer, spec):\n glob.glob(\"excel/*.xlsx\")\n timestr = get_time()\n start_producer = spec['num_of_producers']\n try:\n if not os.listdir('merged-excel-docs'):\n print('Folder empty no need to remove files')\n os.mkdir('merged-excel-docs')\n except FileNotFoundError:\n os.mkdir('merged-excel-docs')\n\n writer = pd.ExcelWriter('merged-excel-docs/combined-result' + timestr + '.xlsx', engine='xlsxwriter')\n for ind_p in range(start_producer, end_producer, step_producer):\n all_data = pd.DataFrame()\n sheetID = str(ind_p)\n for f in glob.glob(\"excel/*.xlsx\"):\n df = pd.read_excel(f, \"P_\" + sheetID)\n all_data = all_data.append(df, ignore_index=True)\n all_data.to_excel(writer, sheet_name=\"P_\" + sheetID)\n writer.save()", "def extract_next_day_items(filename, ids_df, date_fields=[]):\n # An empty data frame to return\n new_items_df = pd.DataFrame()\n\n next_df = pd.DataFrame()\n try:\n if date_fields:\n next_df = pd.read_csv(filename, parse_dates=date_fields,\n converters={'FLIGHT_ID': lambda x: UUID(x)},\n memory_map=True)\n else:\n next_df = pd.read_csv(filename,\n converters={'FLIGHT_ID': lambda x: UUID(x)},\n memory_map=True)\n log.info('%s read ok', filename)\n except EnvironmentError:\n log.error('could not read file: %s', filename)\n return new_items_df # return empty DataFrame\n\n # Create a new dataframe WITHOUT any items that are in ids_df\n new_next_df = next_df[(~next_df['FLIGHT_ID'].isin(ids_df.index))]\n\n # Output the new next items\n new_next_filename = 'new_' + filename\n try:\n is_bz2 = has_bz2_extension(filename)\n if is_bz2:\n new_next_filename = new_next_filename[:-BZ2_LENGTH]\n\n new_next_df.to_csv(new_next_filename, index=False,\n date_format=ISO8601_DATETIME_FORMAT)\n log.info('written file: %s', new_next_filename)\n except EnvironmentError:\n log.error('could not write file: %s', new_next_filename)\n return new_items_df # return empty DataFrame\n\n # get the new items from the next DataFrame\n new_items_df = pd.merge(ids_df, next_df, left_index=True, right_on='FLIGHT_ID')\n replace_old_flight_ids(new_items_df)\n\n return new_items_df # return new items", "def looper(path2mdbs, tablename, csv=False):\n containing_folder = path2mdbs\n contained_files = os.listdir(containing_folder)\n df_dictionary={}\n\n count = 1\n basestring = 'file_'\n for i in contained_files:\n if os.path.splitext(os.path.join(containing_folder,i))[1]=='.mdb' or os.path.splitext(os.path.join(containing_folder,i))[1]=='.accdb':\n countup = basestring+str(count)\n # df creation/manipulation starts here\n print(i)\n df = main_translate(tablename,os.path.join(containing_folder,i))\n if df is not None:\n if 'DateLoadedInDB' in df.columns:\n df['DateLoadedInDB']=df['DateLoadedInDB'].astype('datetime64')\n df['DateLoadedInDB'] = datetime.now().strftime(\"%d-%m-%Y %H:%M:%S\")\n else:\n df['DateLoadedInDB'] = datetime.now().strftime(\"%d-%m-%Y %H:%M:%S\")\n\n df['DBKey'] = os.path.split(os.path.splitext(i)[0])[1].replace(\" \",\"\")\n # df add to dictionary list\n df_dictionary[countup] = df.copy()\n else:\n pass\n count+=1\n final_df = pd.concat([j for i,j in df_dictionary.items()], ignore_index=True).drop_duplicates()\n\n return final_df if csv==False else final_df.to_csv(os.path.join(containing_folder,tablename+'.csv'))", "def merge_physdfs2(files):\n\n temp_df = pd.read_csv(files[0], index_col=False)\n columns = temp_df.columns.tolist()\n merged_df = pd.DataFrame([], columns=columns)\n\n for file in files:\n df = pd.read_csv(file, index_col=False)\n\n # add 'rat_data' column to the merged df\n root_name = file.split('/')[-1]\n df = df.assign(raw_data=root_name)\n\n # add 'exp_label' column to the merged df\n cell_num = ''.join(re.findall(\"cell\\d{2}\", file))\n exp = file.split('_')[1]\n exp = ''.join(re.findall(\"[a-zA-Z]+\", exp))\n\n df = df.assign(exp_label=exp)\n df = df.assign(cell_num=cell_num)\n\n merged_df = pd.concat([merged_df, df], sort=True, ignore_index=True)\n\n return merged_df", "def merge_dfs(userdf, filtered_apidf):\n userdf['SOURCE']='USER'\n filtered_apidf['SOURCE']='API'\n filtered_apidf.rename(columns={'_id': 'bids_name'}, inplace=True)\n\n merged_df = pd.concat([userdf,filtered_apidf], sort=True).fillna(0)\n # merged_df['_INDEX']=merged_df.index\n\n # merged_df_with_index = pd.DataFrame(index = merged_df.index, data= merged_df)\n return merged_df", "def exptdf(self, exptdate, **kwargs):\n if 'master_index_df' in kwargs:\n master_idx = kwargs['master_index_df']\n else:\n master_idx = self.master_idx_by_date(exptdate) \n\n sampledfs = []\n # Read in data and add identifying information\n # based on master index\n print(f'Found master index with {len(master_idx)}')\n for idx in master_idx.index:\n row = master_idx.loc[idx, :]\n print(f'Looking for data at {row.filepath}')\n\n if os.path.exists(row.filepath):\n print(f'Found data')\n sampledf = FCMeasurement(ID=f'{row.strain}-{row.clone}', datafile=row.filepath).data\n print(f'Found {len(sampledf)} measurements in this file')\n # Annotate sample df\n for col in row.index:\n sampledf.loc[:, col] = row.loc[col]\n sampledfs.append(sampledf)\n else:\n print(f'No data found')\n\n if len(sampledfs) > 0:\n exptdf = pd.concat(sampledfs, ignore_index=True)\n else:\n exptdf = None\n print(f'No data found for exptdate {exptdate}')\n\n return exptdf", "def _make_current_jfiles(self):\n res = self._db.Query(\"\"\"SELECT *\n FROM report_data_set_instance\n WHERE\n `element_id`=%s\n AND segment_value_id = %s\n ORDER BY measurement_time DESC\n LIMIT 0, 1\"\"\",(self._id, self._segment_value_id))\n if res:\n last_data_set_instance = self._db.record[0]\n last_data_set_instance_id = last_data_set_instance['report_data_set_instance_id']\n \n self._jfile.make_current_data_set(last_data_set_instance_id)\n self._jfile.make_current_saved_data_set(last_data_set_instance_id)\n\n for pivot in self._pivots:\n self._jfile.make_current_pivot_set(pivot['report_data_set_pivot_id'], last_data_set_instance_id)\n\n for chart in self._charts:\n self._jfile.make_current_chart_set(chart['report_data_set_chart_id'], last_data_set_instance_id)", "def reindex_hfd5(self):\n dfs = []\n objectpath = os.path.join(self.rootpath, self.OBJECTPATH)\n for root, dirs, files in os.walk(objectpath, topdown=False):\n for name in files:\n blob_uuid = name\n dfs.append(self.load_blob_metadata_value_df(blob_uuid))\n df = pd.concat(dfs)\n self.index.df = df\n self.index.to_hdf5(os.path.join(self.rootpath, self.INDEXFILENAME))\n return df", "def parse_directory_of_series_files(self):\n if self.series_base_dir is None or len(self.series_file_list) < 1:\n self.logger.warn('Fatal: Base Directory not set %s')\n raise Exception('Error Base Directory not set')\n\n self.logger.info('Parsing dir of files from %s' % self.series_base_dir)\n\n self.ref_series_df = pd.DataFrame([], columns=['SERIES_ID', 'SERIES_SEQ_ID', 'CONTEXT',\n 'FRAG', 'MOL_ID', 'ACTIVITY'])\n\n required_col = ['SERIES_ID', 'SERIES_SEQ_ID', 'CONTEXT', 'FRAG', 'MOL_ID', 'ACTIVITY']\n max_series_id = 0\n\n for series_file in self.series_file_list:\n\n # print series_file\n temp_df = pd.read_csv(series_file) # , index_col=False)\n # print temp_df.columns\n\n # sanity check the data table for the columns we need\n for col in required_col:\n if col not in temp_df.columns:\n raise Exception(\"Input CSV %s does not have required columns: %s\" % (series_file, col))\n\n # re-sequence the series ID's\n if max_series_id == 0:\n max_series_id = temp_df['SERIES_ID'].max()\n else:\n max_series_id = self.ref_series_df['SERIES_ID'].max()\n # print max_series_id\n\n temp_df['SERIES_ID'] = temp_df['SERIES_ID'] + max_series_id\n temp_df['SOURCE_FILE'] = os.path.basename(series_file)\n\n # py2>3 explicit sort=False added\n self.ref_series_df = self.ref_series_df.append(temp_df, sort=False)\n self.logger.info('Appended dataframe shape %s to master dataframe %s' %\n (str(temp_df.shape), str(self.ref_series_df.shape)))\n # print ('Appended dataframe shape %s to master dataframe %s' % (str(temp_df.shape),\n # str(self.ref_series_df.shape)))\n # print self.ref_series_df['SERIES_ID'].max()\n\n self.series_comparison_df = self.ref_series_df", "def merge_df_rows(dlist):\n\n # Create Dataframe from the dlist files\n dframe = concat(dlist, axis=0, join='outer', sort=False)\n\n # Sort the df based on the datetime index\n dframe.sort_values(by='Dates', inplace=True)\n\n # Setting Dates as the dataframe index\n dframe.set_index(['Dates'], drop=True, inplace=True)\n\n # Dropiing duplicated time points that may exist in the data\n dframe = dframe[~dframe.index.duplicated()]\n\n return dframe", "def append_score():\n score_frame = fu.read_file_to_df(working_file_url, u'企业评分')\n score_frame = score_frame.set_index(u'企业编号'.encode('utf-8'))\n\n for file_n in annual_report_indexes:\n print file_n\n\n data_frame = fu.read_file_to_df(corporation_index_file_url, file_n + '_index')\n data_frame = data_frame.set_index('Unnamed: 0')\n\n data_frame = data_frame.join(score_frame)\n\n fu.write_file(data_frame, corporation_index_file_url, file_n + '_index', index=True)\n return", "def merge(df_list):\n df_final = pd.read_csv(df_list[0])\n for ind, df in enumerate(df_list):\n if ind >= 1:\n temp_df = pd.read_csv(df_list[ind])\n temp_df = temp_df.drop(['lbl'], axis=1)\n df_final = pd.merge(df_final, temp_df, on=['author_id'])\n final_path = os.path.join(os.path.expanduser(\"~/Desktop/Age-Detection\"), \"merged-feature-collection.csv\")\n df_final.to_csv(final_path, sep=',', index=False)\n return final_path", "def refresh_final_acc_df(self, report_peak_acc=False):\n\n # build case -> group dict\n group_dict = dict()\n with open(os.path.join(self.data_dir, \"net_configs.json\"), \"r\") as json_file:\n net_configs = json.load(json_file)\n\n for g in net_configs.keys():\n cases = net_configs[g]\n case_names = cases.keys()\n \n for c in case_names:\n\n group_dict[c] = g\n\n # load current df if exists\n df_name = \"final_acc_df.csv\"\n # curr_df = pd.read_csv(os.path.join(self.df_sub_dir, df_name))\n # curr_df.drop(columns=\"Unnamed: 0\", inplace=True)\n\n acc_arr = []\n case_dict = dict()\n with open(os.path.join(self.df_sub_dir, \"case_dict.json\"), \"r\") as json_file:\n case_dict = json.load(json_file)\n\n # walk dir looking for saved net stats\n net_dir = os.path.join(self.data_dir, f\"nets\")\n for root, _, files in os.walk(net_dir):\n \n # only interested in locations files are saved\n if len(files) <= 0:\n continue\n \n slugs = root.split(\"/\")\n\n # exclude some dirs...\n if any(self.exclude_slug in slug for slug in slugs):\n continue\n\n # only latest results\n if not \"adam_lravg_nosplit\" in slugs:\n continue\n\n # consider all files...\n for filename in files:\n\n # ...as long as they are perf_stats\n if not \"perf_stats\" in filename:\n continue\n \n filepath = os.path.join(root, filename)\n stats_dict = np.load(filepath, allow_pickle=True).item()\n \n # extract data\n dataset = stats_dict.get(\"dataset\") if stats_dict.get(\"dataset\") is not None else \"imagenette2\"\n net_name = stats_dict.get(\"net_name\")\n train_scheme = stats_dict.get(\"train_scheme\") if stats_dict.get(\"train_scheme\") is not None else \"sgd\"\n initial_lr = stats_dict.get(\"initial_lr\") if stats_dict.get(\"initial_lr\") is not None else -1\n case = stats_dict.get(\"case\")\n sample = stats_dict.get(\"sample\")\n group = stats_dict.get(\"group\")\n if group is None:\n group = group_dict.get(case)\n modified_layers = stats_dict.get(\"modified_layers\")\n if modified_layers is not None:\n case_dict[case] = {\n \"act_fns\": modified_layers.get(\"act_fns\"),\n \"act_fn_params\": modified_layers.get(\"act_fn_params\")\n }\n\n # array containing acc/loss\n perf_stats = np.array([s for s in stats_dict.get(\"perf_stats\") if s is not None])\n if len(perf_stats) == 0:\n continue\n\n # find peak accuracy?\n try:\n\n if report_peak_acc:\n i_acc = np.argmax(perf_stats[:,0])\n else:\n i_acc = -1\n (val_acc, val_loss, train_acc, train_loss) = perf_stats[i_acc]\n\n # for learning speed\n pct_acc = (self.pct / 100.) * val_acc\n i_first = next(x for x, val in enumerate(perf_stats[:,0]) if val > pct_acc)\n \n test_acc = stats_dict.get(\"test_acc\")\n\n acc_arr.append([dataset, net_name, train_scheme, group, case, i_acc, sample, val_acc, test_acc, i_first, initial_lr])\n\n # by epoch\n n_epoch_samples = 31\n epochs = [10*i for i in range(n_epoch_samples)]\n epochs = epochs[:-1] + [int(x) for x in np.linspace(epochs[-1], len(perf_stats)-1, 5)]\n epochs = list(set(epochs))\n for epoch in epochs:\n \n try:\n (val_acc, val_loss, train_acc, train_loss) = perf_stats[epoch]\n acc_arr.append([dataset, net_name, train_scheme, group, case, epoch, sample, val_acc, None, None, initial_lr])\n except IndexError:\n break\n\n except ValueError:\n print(f\"Max entry in {case} {sample} perf_stats did not match expectations.\")\n continue\n\n # make dataframe\n acc_df = pd.DataFrame(acc_arr, columns=self.net_idx_cols+[\"val_acc\", \"test_acc\", \"epochs_past\", \"initial_lr\"])\n\n # process\n # 1. mark mixed nets\n acc_df[\"is_mixed\"] = [len(case_dict[c][\"act_fns\"]) > 1 if case_dict.get(c) is not None else False for c in acc_df[\"case\"]]\n acc_df[\"cross_fam\"] = [len(case_dict[c][\"act_fns\"]) == len(set(case_dict[c][\"act_fns\"])) if case_dict.get(c) is not None else False for c in acc_df[\"case\"]]\n\n # 2. add columns for predictions\n acc_df[\"max_pred_val_acc\"] = np.nan\n acc_df[\"linear_pred_val_acc\"] = np.nan\n acc_df[\"max_pred_val_acc_p_val\"] = np.nan\n acc_df[\"linear_pred_val_acc_p_val\"] = np.nan\n\n acc_df[\"max_pred_test_acc\"] = np.nan\n acc_df[\"linear_pred_test_acc\"] = np.nan\n acc_df[\"max_pred_test_acc_p_val\"] = np.nan\n acc_df[\"linear_pred_test_acc_p_val\"] = np.nan\n\n # index new and old without group\n # idx_no_group = list(self.net_idx_cols + [\"epoch\"])\n # idx_no_group.remove(\"group\")\n # curr_df.set_index(idx_no_group, inplace=True)\n # acc_df.set_index(idx_no_group, inplace=True)\n\n # merge new and old, preferring new\n # ndf = pd.concat([curr_df[~curr_df.index.isin(acc_df.index)], acc_df])\n\n # port over group from old df where appropriate\n # ndf[ndf.index.isin(curr_df.index)][\"group\"] = curr_df[\"group\"]\n\n # 2.9. index with group\n ndf = acc_df\n ndf.reset_index(drop=False, inplace=True)\n ndf.set_index(self.net_idx_cols, inplace=True)\n\n # 3. predictions for mixed cases\n mixed_df = ndf.query(\"is_mixed == True\")\n for epoch in mixed_df.index.unique(level=5):\n\n for midx in mixed_df.query(f\"epoch == {epoch}\").index.values:\n\n # break up multi-index\n d, n, sch, g, c, e, s = midx\n \n # skip if already predicted\n try:\n prediction = ndf.at[midx, \"max_pred_val_acc\"]\n if not math.isnan(prediction):\n continue\n except:\n print(f\"Prediction did not match expectations at: {midx} - {prediction}\")\n continue\n\n # get rows in this mixed case\n mixed_case_rows = ndf.loc[(d, n, sch, g, c, e)]\n \n # get component case rows\n component_cases = get_component_cases(case_dict, c)\n component_rows = ndf.query(f\"is_mixed == False\") \\\n .query(f\"dataset == '{d}'\") \\\n .query(f\"net_name == '{n}'\") \\\n .query(f\"train_scheme == '{sch}'\") \\\n .query(f\"case in {component_cases}\") \\\n .query(f\"epoch == {e}\")\n\n # flag to indicate whether row used in prediction yet\n component_rows[\"used\"] = False\n\n # make a prediction for each sample in this mixed case\n for i in range(len(mixed_case_rows)):\n mixed_case_row = mixed_case_rows.iloc[i]\n\n # choose component row accs/learning epochs\n c_accs = []\n c_accs_test = []\n # c_epochs = []\n for cc in component_cases:\n c_row = component_rows \\\n .query(f\"case == '{cc}'\") \\\n .query(f\"used == False\")\n \n if len(c_row) == 0:\n break\n c_row = c_row.sample()\n c_accs.append(c_row.val_acc.values[0])\n c_accs_test.append(c_row.test_acc.values[0])\n # c_epochs.append(c_row.epochs_past.values[0])\n\n # mark component row as used in prediction\n component_rows.at[c_row.index.values[0], \"used\"] = True\n\n if len(c_accs) == 0:\n break\n\n max_pred = np.max(c_accs)\n lin_pred = np.mean(c_accs)\n\n ndf.at[(d, n, sch, g, c, e, mixed_case_row.name), \"max_pred_val_acc\"] = max_pred\n ndf.at[(d, n, sch, g, c, e, mixed_case_row.name), \"linear_pred_val_acc\"] = lin_pred\n \n if len(c_accs_test) == 0:\n continue\n\n ndf.at[(d, n, sch, g, c, e, mixed_case_row.name), \"max_pred_test_acc\"] = np.max(c_accs_test)\n ndf.at[(d, n, sch, g, c, e, mixed_case_row.name), \"linear_pred_test_acc\"] = np.mean(c_accs_test)\n\n # significance\n upper_dists = [\"val_acc\", \"val_acc\", \"test_acc\", \"test_acc\"]\n lower_dists = [\"max_pred_val_acc\", \"linear_pred_val_acc\", \"max_pred_test_acc\", \"linear_pred_test_acc\"]\n cols = [\"max_pred_val_acc\", \"linear_pred_val_acc\", \"max_pred_test_acc\", \"linear_pred_test_acc\"]\n for upper, lower, col in zip(upper_dists, lower_dists, cols):\n\n t, p = ttest_ind(ndf.at[(d, n, sch, g, c, e), upper], ndf.at[(d, n, sch, g, c, e), lower])\n if t < 0:\n p = 1. - p / 2.\n else:\n p = p / 2.\n ndf.loc[(d, n, sch, g, c, e), f\"{col}_p_val\"] = p\n\n # save things\n self.save_df(df_name, ndf)\n\n # TODO: separate the refresh code for this from final_acc_df???\n self.save_json(\"case_dict.json\", case_dict)", "def read_combine_elia_activated_energy(path,status):\r\n #loop, read in and combine all data files into one \"combined_data\"\r\n i=0\r\n dfsprice = []\r\n dfsvol = []\r\n data_files_price = glob.glob(path + 'ActivatedEnergyPrices*')\r\n data_files_volume = glob.glob(path + 'ActivatedEnergyVolumes*')\r\n print(str(datetime.datetime.utcnow()) + \" amount of files to combine: \" + str(len(data_files_volume)+len(data_files_price)))\r\n \r\n for file1 in data_files_price:\r\n i=i+1\r\n print(str(datetime.datetime.utcnow()) + \" processing file number: \"+ str(i))\r\n df1 = read_elia_activated_energy_prices(file1,status)\r\n dfsprice.append(df1)\r\n combined_data_price = pd.concat(dfsprice, axis = 0)\r\n \r\n #remove \"NRV in MW\" column, because it is duplicate \r\n combined_data_price = combined_data_price.drop(combined_data_price.columns[7], axis=1)\r\n \r\n for file2 in data_files_volume:\r\n i=i+1\r\n print(str(datetime.datetime.utcnow()) + \" processing file number: \"+ str(i))\r\n df2 = read_elia_activated_energy_volumes(file2,status)\r\n dfsvol.append(df2)\r\n combined_data_vol = pd.concat(dfsvol, axis = 0)\r\n \r\n result = pd.concat([combined_data_price, combined_data_vol], axis=1)\r\n result.reset_index(inplace=True)\r\n result[\"Timestamp\"]=pd.to_datetime(result[\"Timestamp\"],format=(\"%d/%m/%Y %H:%M\"))\r\n result=result.set_index(\"Timestamp\")\r\n print(str(datetime.datetime.utcnow()) + \" finished\")\r\n return result", "def fetch(index, outfile):\n populate_index(index, outfile=outfile)", "def tidy_param_df(sample_id, df_param_indexed, out_filename):\n \n df_param_indexed = df_param_indexed.copy()\n# add link to each sheet in excel on paramters sheet, goes to label cell B2 20180111\n d2 = {1 : pd.Series(\n '=HYPERLINK(\"[{}]\\'{}\\'!B2\", \"Sheet\")'.format(\n out_filename, sample_id), index=['Link'])}\n df_param_link = pd.DataFrame(d2)\n df = df_param_link.append(\n df_param_indexed).transpose()\n df = df.drop(columns='Sample Number')\n\n return df", "def __get_latest_data(table_name='derivatives_economicindicatorstandard'):\n # create query and get data\n query = 'SELECT * FROM ' + table_name\n df = AccessDB().run_read_query(query)\n\n if table_name == 'derivatives_economicindicatorstandard':\n df = pd.DataFrame(df.groupby(['dbcode', 'indicator', 'country', 'freq', 'flow'])['date'].max())\n else:\n df = pd.DataFrame(df.groupby(['dbcode', 'indicator', 'country', 'freq', 'counter_party'])['date'].max())\n df.reset_index(inplace=True)\n return df", "async def report_by_id(idx: str):\n return DF.iloc[int(idx)].to_dict()", "def main(file_list):\n data_store = {}\n \n for file in file_list:\n sample_id = get_sample_id(file)\n data_store[sample_id] = {}\n data_store[sample_id][\"sample_type\"], data_store[sample_id][\"out_filename\"], data_store[sample_id][\"out_location\"] = check_name(file, sample_id)\n data_store[sample_id][\"df_parameters\"], data_store[sample_id][\"df_values\"], data_store[sample_id][\"df_parameters_for_values\"] = data_in(file, sample_id)\n if data_store[sample_id][\"sample_type\"] == \"EFC\":\n binder_mass = efc_calcs(data_store[sample_id][\"df_parameters\"])\n elif data_store[sample_id][\"sample_type\"] == \"OPC\":\n binder_mass = opc_calcs(data_store[sample_id][\"df_parameters\"])\n data_store[sample_id][\"df_values\"] = tidy_val_df(data_store[sample_id][\"df_values\"], binder_mass)\n data_store[sample_id][\"df_parameters\"] = tidy_param_df(sample_id, data_store[sample_id][\"df_parameters\"], data_store[sample_id][\"out_filename\"])\n for key, value in data_store.items():\n write_to_excel(key, value[\"df_parameters\"], value[\"df_values\"], value[\"df_parameters_for_values\"], value[\"out_location\"])", "def combine_storm_reports(start_date, end_date, out_dir, report_type):\n\n dates = pd.date_range(start_date, end_date)\n file_list = []\n for date in dates:\n if not isfile(join(out_dir, f\"{date.strftime('%y%m%d')}_{report_type}.csv\")):\n continue\n filename = join(out_dir, f\"{date.strftime('%y%m%d')}_{report_type}.csv\")\n f = pd.read_csv(filename)\n f['Report_Date'] = pd.to_datetime(date.strftime('%Y%m%d'))\n f['Actual_Date'] = f['Report_Date']\n f.loc[f['Time'] < 1200, 'Actual_Date'] += pd.Timedelta('1D')\n file_list.append(f)\n df = pd.concat(file_list)\n hours = df.loc[:, 'Time'].apply(lambda x: str(x)[:-2] if len(str(x)) >= 3 else '0').astype(int) + 1\n df['Actual_Date'] = df['Actual_Date'] + pd.to_timedelta(hours, unit='h')\n return df", "def get_anes_data(rawdata_dir, targetdir, targetdir_codes, targetdir_verbatims, optional_ds=False):\n # setup preliminaries\n input_path = rawdata_dir + \"anes_timeseries_2008_openends_redacted_Dec2012Revision.xls\"\n\n # 1. make new directory in the project root where we want to put the matched verbatim-codes datasets\n # 2. and a directory where we want to put the extracted verbatims\n # 3. and a directory where we want to put the extracted codes for the verbatims-files\n folders = [targetdir, targetdir_verbatims, targetdir_codes]\n for folder in folders:\n # create the mentioned folders if they do not exist\n if not os.path.exists(folder):\n os.makedirs(folder)\n\n # import verbatim excel-file for splitting\n xlsfile = pd.ExcelFile(input_path)\n sheet_names = xlsfile.sheet_names\n\n # split the verbatims in the openends_redacted-file into individual datasets and save\n # from that always extract first and last column, slap header (from the names) above,\n # save as new datasets (just relevant text), leave out the overview sheet up front\n counter = 0\n sheet_df_map = {}\n for current_name in sheet_names[1:]:\n # print status\n counter += 1\n print(\"Processing sheet (\", counter, \") :\", current_name)\n\n # load file, get variable names, select first and last variable (caseID and verbatim - question)\n sheet_df = pd.read_excel(input_path, sheet_name=current_name)\n colnames = sheet_df.columns.values\n select_vars = colnames[::len(colnames) - 1]\n print(\"Selected vars: \", select_vars)\n\n # save individual sheet to csv for documentation purposes\n sheet_df.to_csv(targetdir + current_name + \".csv\", index=False, header=False, encoding='utf-8')\n\n # always extract the first and third column for our text learning purposes by sheet\n # drop the first two rows from the excels (descriptions, big header, etc)\n verbatim_extract_df = pd.DataFrame(sheet_df, columns=select_vars)\n verbatim_extract_df = verbatim_extract_df.iloc[1:]\n # and save (for manual inspection!)\n verbatim_extract_df.to_csv(targetdir_verbatims + current_name + \"_verbatim.csv\",\n index=False,\n header=True,\n encoding='utf-8')\n\n # add the df to the df map for later convenient use\n sheet_df_map[current_name] = verbatim_extract_df\n\n # now load these verbatims one by one, merge with codes, save to thematically split question-answer groups\n # (to keep comparable to Card/Smith) and collect them into a dataframe map (initialized here)\n return_df_map = {}\n\n ######################################### Dataset 1: General Election ##############################################\n # Reasons why McCain lost the general election (MCGEN_Code1 to MCGEN_Code13), Sheet: WhyElectLose\n # Reasons why Obama won the general election (OBGEN_Code1 to OBGEN_Code13), Sheet: WhyElectWin\n\n # get input data filename\n elect_outcome_codes = rawdata_dir + \"anes_timeseries_2008_election_outcomes/\" + \\\n \"anes_timeseries_2008_election_outcomes_all_codes.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1x46 vector\n # (46 because we have 46 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here (should be valid for all 4 question-answer encodings):\n gen_elect_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"5\": 4, \"7\": 5, \"8\": 6, \"9\": 7, \"10\": 8, \"11\": 9, \"12\": 10, \"13\": 11,\n \"14\": 12, \"15\": 13, \"16\": 14, \"17\": 15, \"18\": 16, \"19\": 17, \"20\": 18, \"21\": 19, \"22\": 20,\n \"23\": 21, \"24\": 22, \"25\": 23, \"26\": 24, \"27\": 25, \"28\": 26, \"29\": 27, \"30\": 28, \"31\": 29,\n \"32\": 30, \"33\": 31, \"34\": 32, \"35\": 33, \"36\": 34, \"37\": 35, \"38\": 36, \"39\": 37, \"40\": 38,\n \"41\": 39, \"42\": 40, \"94\": 41, \"95\": 42, \"96\": 43, \"97\": 44, \"98\": 45, \"99\": 46}\n # unknown codes (not mentioned in code overview): 3, 18, 11\n # 1. Reasons why McCain lost the general election (MCGEN_Code1 to MCGEN_Code13)\n\n # setup desired output path\n mccain_output_path = targetdir_codes + \"/mccain_eleclost.csv\"\n # generate variable names for data extraction according to the ones mentioned in the respective dataset\n # coding report\n mccain_select_vars = varname_generator(\"ID\", \"MCGEN_Code\", 13)\n # transform to binarized code dataframe and save\n mccain_elec_df = transformer_function(path_to_df=elect_outcome_codes,\n list_of_vars_selection=mccain_select_vars,\n mappings_dictionary=gen_elect_dict,\n num_code_vars=46,\n path_to_output=mccain_output_path)\n print(\"McCain orig ds rowcount (codes): \", mccain_elec_df.shape[0])\n # match with verbatims and save\n vmccain_output_path = targetdir + \"/mccain_verbatim_code_eleclost.csv\"\n mccain_eleclost_df = verbatim_code_merger(verbatim_df=sheet_df_map['WhyElectLose'],\n binarized_codes_df=mccain_elec_df,\n output_path=vmccain_output_path)\n print(\"McCain merged ds rowcount (codes): \", mccain_eleclost_df.shape[0])\n print(\"Finished WhyElectLose McCain\")\n # 2. Reasons why Obama won the general election (OBGEN_Code1 to OBGEN_Code13)\n\n # setup desired output path\n obama_output_path = targetdir_codes + \"/obama_elecwin.csv\"\n\n # generate variable names for data extraction according to the ones mentioned in the respective\n # dataset coding report\n obama_select_vars = varname_generator(\"ID\", \"OBGEN_Code\", 13)\n\n # transform to binarized code dataframe and save\n obama_elec_df = transformer_function(path_to_df=elect_outcome_codes,\n list_of_vars_selection=obama_select_vars,\n mappings_dictionary=gen_elect_dict,\n num_code_vars=46,\n path_to_output=obama_output_path)\n print(\"Obama orig ds rowcount (codes): \", obama_elec_df.shape[0])\n # match with verbatims and save\n vobama_output_path = targetdir + \"/obama_verbatim_code_elecwin.csv\"\n obama_win_df = verbatim_code_merger(verbatim_df=sheet_df_map['WhyElectWin'],\n binarized_codes_df=obama_elec_df,\n output_path=vobama_output_path)\n print(\"Obama merged ds rowcount (codes): \", obama_win_df.shape[0])\n print(\"Finished WhyElectWin Obama\")\n\n # 3. Match the codes with the verbatims and stack the verbatims and codes for both into a dataset\n # (following Card/Smith)\n pieces = (obama_win_df, mccain_eleclost_df)\n general_election = pd.concat(pieces, ignore_index=True)\n return_df_map['Dataset_1'] = general_election # save to df map (same name as in Card/Smith)\n\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = obama_win_df.shape[0] + mccain_eleclost_df.shape[0]\n print(\"(Dataset 1) Sanity check on row sum: \",\n rows_sum == general_election.shape[0],\n \"(Sum: \", general_election.shape[0],\n \")\")\n genlec_output_path = targetdir + \"/Elecwinlost_verbatim_codes.csv\"\n general_election.to_csv(genlec_output_path, index=False, header=True, encoding='utf-8')\n\n ######################################### Dataset 2: Primary Election ##############################################\n # Reasons why Clinton lost the Democratic nomination (CLPRIM_Code1 to CLPRIM_Code13), Sheet: WhyNomLose\n # Reasons why Obama won the Democratic nomination (OBPRIM_Code1 to OBPRIM_Code13), Sheet: WhyNomWin\n\n # get input data filename\n nom_outcome_codes = rawdata_dir + \"anes_timeseries_2008_election_outcomes/\" + \\\n \"anes_timeseries_2008_election_outcomes_all_codes.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1x46 vector\n # (46 because we have 46 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here (should be valid for all 4 question-answer encodings):\n noutcomes_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"5\": 5, \"6\": 6, \"7\": 7, \"8\": 8, \"9\": 9, \"10\": 10, \"11\": 11,\n \"12\": 12, \"13\": 13, \"14\": 14, \"15\": 15, \"16\": 16, \"17\": 17, \"19\": 18, \"20\": 19, \"21\": 20,\n \"22\": 21,\n \"23\": 22, \"24\": 23, \"25\": 24, \"26\": 25, \"27\": 26, \"28\": 27, \"29\": 28, \"30\": 29, \"31\": 30,\n \"32\": 31,\n \"33\": 32, \"34\": 33, \"35\": 34, \"36\": 35, \"37\": 36, \"38\": 37, \"39\": 38, \"40\": 39, \"41\": 40,\n \"42\": 41,\n \"94\": 42, \"95\": 43, \"96\": 44, \"97\": 45, \"98\": 46, \"99\": 47}\n # unknown codes (not mentioned in code overview): 3,4,11\n # 1. Why do you think Barack Obama won the Democratic nomination? (OBPRIM_Code1 to OBPRIM_Code13)\n\n # setup desired output path\n nobama_output_path = targetdir_codes + \"/obama_nomwin.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n nobama_select_vars = varname_generator(\"ID\", \"OBPRIM_Code\", 13)\n # transform to binarized code dataframe and save\n nobama_df = transformer_function(path_to_df=nom_outcome_codes,\n list_of_vars_selection=nobama_select_vars,\n mappings_dictionary=noutcomes_dict,\n num_code_vars=47,\n path_to_output=nobama_output_path)\n\n # match with verbatims and save\n vnobama_output_path = targetdir + \"/obama_verbatim_code_nomwin.csv\"\n obama_nomwin_df = verbatim_code_merger(verbatim_df=sheet_df_map['WhyNomWin'],\n binarized_codes_df=nobama_df,\n output_path=vnobama_output_path)\n print(\"Finished WhyNomWin Obama\")\n\n # 2. Why do you think Hillary Clinton lost the Democratic nomination? (CLPRIM_Code1 to CLPRIM_Code13)\n\n # setup desired output path\n nclinton_output_path = targetdir_codes + \"/clinton_nomlost.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n nclinton_select_vars = varname_generator(\"ID\", \"CLPRIM_Code\", 13)\n # transform to binarized code dataframe and save\n nclinton_df = transformer_function(path_to_df=nom_outcome_codes,\n list_of_vars_selection=nclinton_select_vars,\n mappings_dictionary=noutcomes_dict,\n num_code_vars=47,\n path_to_output=nclinton_output_path)\n\n # match with verbatims and save\n vnclinton_output_path = targetdir + \"/clinton_verbatim_code_nomlost.csv\"\n clinton_nomlost_df = verbatim_code_merger(verbatim_df=sheet_df_map['WhyNomLose'],\n binarized_codes_df=nclinton_df,\n output_path=vnclinton_output_path)\n print(\"Finished WhyNomLose Clinton\")\n\n # 3. Match the codes with the verbatims and stack the verbatims and codes for both into a dataset\n # (following Card/Smith)\n pieces = (obama_nomwin_df, clinton_nomlost_df)\n primary_election = pd.concat(pieces, ignore_index=True)\n return_df_map['Dataset_2'] = primary_election # save to df map (same name as in Card/Smith)\n\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = obama_nomwin_df.shape[0] + clinton_nomlost_df.shape[0]\n print(\"(Dataset 2) Sanity check on row sum: \",\n rows_sum == primary_election.shape[0],\n \"(Sum: \", primary_election.shape[0],\n \")\")\n pelec_output_path = targetdir + \"/Nomwinlost_verbatim_codes.csv\"\n primary_election.to_csv(pelec_output_path, index=False, header=True, encoding='utf-8')\n\n ######################################### Dataset 3: Party (Dis)likes ##############################################\n # Dislikes about the Republican Party (REPDL_Code1 to REPDL_Code13), Sheet: RptyDislik\n # Likes about the Republican Party (REPLI_Code1 to REPLI_Code13), Sheet: RptyLike\n # Dislikes about the Democratic Party (DEMDL_Code1 to DEMDL_Code13), Sheet: DptyDislik\n # Likes about the Democratic Party (DEMLI_Code1 to DEMLI_Code13), Sheet: DptyLike\n\n # get input data filename\n party_dlikes_codes = rawdata_dir + \"anes_timeseries_2008_party_likes_and_dislikes/\" + \\\n \"anes_timeseries_2008_party_likes_and_dislikes_all_codes.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1x 41 vector\n # (41 because we have 41 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here (should be valid for all 4 question-answer encodings):\n pdlikes_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"5\": 5, \"6\": 6, \"7\": 7, \"8\": 8, \"9\": 9, \"10\": 10, \"11\": 11,\n \"12\": 12, \"13\": 13, \"14\": 14, \"15\": 15, \"16\": 16, \"17\": 17, \"18\": 18, \"19\": 19, \"20\": 20, \"21\": 21,\n \"22\": 22, \"23\": 23, \"24\": 24, \"25\": 25, \"26\": 26, \"27\": 27, \"28\": 28, \"29\": 29, \"30\": 30, \"31\": 31,\n \"32\": 32, \"44\": 33, \"45\": 34, \"46\": 35, \"94\": 36, \"95\": 37, \"96\": 38, \"97\": 39, \"98\": 40, \"99\": 41}\n # unknown codes (not mentioned in code overview): 19, 2, 9, 8, 17, 7\n # 1. Dislikes about the Republican Party (REPDL_Code1 to REPDL_Code13)\n\n # setup desired output path\n disl_output_path = targetdir_codes + \"/repparty_dislikes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n disl_select_vars = varname_generator(\"ID\", \"REPDL_Code\", 13)\n # transform to binarized code dataframe and save\n rep_disl_df = transformer_function(path_to_df=party_dlikes_codes,\n list_of_vars_selection=disl_select_vars,\n mappings_dictionary=pdlikes_dict,\n num_code_vars=41,\n path_to_output=disl_output_path)\n\n # match with verbatims and save\n vrep_disl_output_path = targetdir + \"/repparty_verbatim_dislikes.csv\"\n rep_dlikes_df = verbatim_code_merger(verbatim_df=sheet_df_map['RptyDislik'],\n binarized_codes_df=rep_disl_df,\n output_path=vrep_disl_output_path)\n\n print(\"Finished RptyDislik: Republican Party dislikes\")\n\n # 2. Likes about the Republican Party (REPLI_Code1 to REPLI_Code13) RptyLike\n\n # setup desired output path\n like_output_path = targetdir_codes + \"/repparty_likes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n like_select_vars = varname_generator(\"ID\", \"REPLI_Code\", 13)\n # transform to binarized code dataframe and save\n rep_like_df = transformer_function(path_to_df=party_dlikes_codes,\n list_of_vars_selection=like_select_vars,\n mappings_dictionary=pdlikes_dict,\n num_code_vars=41,\n path_to_output=like_output_path)\n\n # match with verbatims and save\n vrep_like_output_path = targetdir + \"/repparty_verbatim_likes.csv\"\n rep_likes_df = verbatim_code_merger(verbatim_df=sheet_df_map['RptyLike'],\n binarized_codes_df=rep_like_df,\n output_path=vrep_like_output_path)\n\n print(\"Finished RptyLike: Republican Party likes\")\n\n # 3. Dislikes about the Democratic Party (DEMDL_Code1 to DEMDL_Code13) DptyDislik\n\n # setup desired output path\n ddislik_output_path = targetdir_codes + \"/demparty_dislikes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n ddislik_select_vars = varname_generator(\"ID\", \"DEMDL_Code\", 13)\n # transform to binarized code dataframe and save\n dem_dislik_df = transformer_function(path_to_df=party_dlikes_codes,\n list_of_vars_selection=ddislik_select_vars,\n mappings_dictionary=pdlikes_dict,\n num_code_vars=41,\n path_to_output=ddislik_output_path)\n\n # match with verbatims and save\n vdem_dislik_output_path = targetdir + \"/demparty_verbatim_dislikes.csv\"\n dem_dlikes_df = verbatim_code_merger(verbatim_df=sheet_df_map['DptyDislik'],\n binarized_codes_df=dem_dislik_df,\n output_path=vdem_dislik_output_path)\n\n print(\"Finished DptyDislik: Democratic Party dislikes\")\n\n # 4. Likes about the Democratic Party (DEMLI_Code1 to DEMLI_Code13) DptyLike\n\n # setup desired output path\n dem_like_output_path = targetdir_codes + \"/demparty_likes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n dem_like_select_vars = varname_generator(\"ID\", \"DEMLI_Code\", 13)\n # transform to binarized code dataframe and save\n dem_like_df = transformer_function(path_to_df=party_dlikes_codes,\n list_of_vars_selection=dem_like_select_vars,\n mappings_dictionary=pdlikes_dict,\n num_code_vars=41,\n path_to_output=dem_like_output_path)\n\n # match with verbatims and save\n vdemlike_output_path = targetdir + \"/demparty_verbatim_likes.csv\"\n dem_likes_df = verbatim_code_merger(verbatim_df=sheet_df_map['DptyLike'],\n binarized_codes_df=dem_like_df,\n output_path=vdemlike_output_path)\n\n print(\"Finished DptyLike: Democratic Party likes\")\n\n # 5. Match the codes with the verbatims and stack the verbatims and codes for both into a dataset\n # (following Card/Smith)\n pieces = (dem_likes_df, rep_likes_df, dem_dlikes_df, rep_dlikes_df)\n party_dlikes = pd.concat(pieces, ignore_index=True)\n return_df_map['Dataset_3'] = party_dlikes # save to df map (same name as in Card/Smith)\n\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = dem_likes_df.shape[0] + rep_likes_df.shape[0] + dem_dlikes_df.shape[0] + rep_dlikes_df.shape[0]\n print(\"(Dataset 3) Sanity check on row sum: \",\n rows_sum == party_dlikes.shape[0],\n \"(Sum: \", party_dlikes.shape[0],\n \")\")\n output_path = targetdir + \"/Party_dlikes_verbatim_codes.csv\"\n party_dlikes.to_csv(output_path, index=False, header=True, encoding='utf-8')\n\n ######################################### Dataset 4: Person (Dis)likes #############################################\n # Reasons to vote against John McCain (MCCDL_Code1 to MCCDL_Code21), Sheet: RcandDislik\n # Reasons to vote for John McCain (MCCLI_Code1 to MCCLI_Code21), Sheet: RcandLike\n # Reasons to vote against Barack Obama (OBADL_Code1 to OBADL_Code21), Sheet: DcandDislik\n # Reasons to vote for Barack Obama (OBALI_Code1 to OBALI_Code21), Sheet: DcandLike\n\n # get input data filename\n lkdk_cand_codes = rawdata_dir + \"anes_timeseries_2008_candidate_likes_and_dislikes/\" + \\\n \"anes_timeseries_2008_candidate_likes_and_dislikes_all_codes.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1x39 vector\n # (39 because we have 39 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here (should be valid for all 4 question-answer encodings):\n cand_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"5\": 5, \"6\": 6, \"7\": 7, \"8\": 8, \"9\": 9, \"10\": 10, \"11\": 11,\n \"12\": 12, \"13\": 13, \"14\": 14, \"15\": 15, \"16\": 16, \"17\": 17, \"18\": 18, \"19\": 19, \"20\": 20, \"21\": 21,\n \"22\": 22, \"23\": 23, \"24\": 24, \"25\": 25, \"26\": 26, \"27\": 27, \"28\": 28, \"29\": 29, \"30\": 30, \"31\": 31,\n \"32\": 32, \"33\": 33, \"43\": 34, \"94\": 35, \"95\": 36, \"96\": 37, \"97\": 38, \"98\": 39, \"99\": 40}\n\n # 1. DcandLike: Reasons to vote for Barack Obama (OBALI_Code1 to OBALI_Code21)\n\n # setup desired output path\n fobama_output_path = targetdir_codes + \"/obama_likes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n fobama_select_vars = varname_generator(\"ID\", \"OBALI_Code\", 21)\n # transform to binarized code dataframe and save\n vfor_obama_df = transformer_function(path_to_df=lkdk_cand_codes,\n list_of_vars_selection=fobama_select_vars,\n mappings_dictionary=cand_dict,\n num_code_vars=40,\n path_to_output=fobama_output_path)\n\n # match with verbatims and save\n vfobama_output_path = targetdir + \"/obama_verbatim_code_likes.csv\"\n obama_likes_df = verbatim_code_merger(verbatim_df=sheet_df_map['DcandLike'],\n binarized_codes_df=vfor_obama_df,\n output_path=vfobama_output_path)\n print(\"Finished DcandLike Obama\")\n\n # 2. DcanddLike: Reasons to vote against Barack Obama (OBADL_Code1 to OBADL_Code21)\n\n # setup desired output path\n vaobama_output_path = targetdir_codes + \"/obama_dislikes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n vaobama_select_vars = varname_generator(\"ID\", \"OBADL_Code\", 21)\n # transform to binarized code dataframe and save\n vagainst_obama_df = transformer_function(path_to_df=lkdk_cand_codes,\n list_of_vars_selection=vaobama_select_vars,\n mappings_dictionary=cand_dict,\n num_code_vars=40,\n path_to_output=vaobama_output_path)\n # match with verbatims and save\n vaobama_output_path = targetdir + \"/obama_verbatim_code_dislikes.csv\"\n obama_dislikes_df = verbatim_code_merger(verbatim_df=sheet_df_map['DcandDislik'],\n binarized_codes_df=vagainst_obama_df,\n output_path=vaobama_output_path)\n print(\"Finished DcandDislik Obama\")\n\n # 3. Rcandlike: Reasons to vote for John McCain (MCCLI_Code1 to MCCLI_Code21)\n\n # setup desired output path\n fmccain_output_path = targetdir_codes + \"/mccain_likes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n fmccain_select_vars = varname_generator(\"ID\", \"MCCLI_Code\", 21)\n # transform to binarized code dataframe and save\n vfor_maccain_df = transformer_function(path_to_df=lkdk_cand_codes,\n list_of_vars_selection=fmccain_select_vars,\n mappings_dictionary=cand_dict,\n num_code_vars=40,\n path_to_output=fmccain_output_path)\n # match with verbatims and save\n vformccain_output_path = targetdir + \"/mccain_verbatim_code_likes.csv\"\n mccain_likes_df = verbatim_code_merger(verbatim_df=sheet_df_map['RcandLike'],\n binarized_codes_df=vfor_maccain_df,\n output_path=vformccain_output_path)\n print(\"Finished RcandLike McCain\")\n\n # 4. Rcanddlike: Reasons to vote against John McCain (MCCDL_Code1 to MCCDL_Code21)\n\n # setup desired output path\n vamccain_output_path = targetdir_codes + \"/mccain_dislikes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n vamccain_select_vars = varname_generator(\"ID\", \"MCCDL_Code\", 21)\n # transform to binarized code dataframe and save\n vagainst_mccain_df = transformer_function(path_to_df=lkdk_cand_codes,\n list_of_vars_selection=vamccain_select_vars,\n mappings_dictionary=cand_dict,\n num_code_vars=40,\n path_to_output=vamccain_output_path)\n # match with verbatims and save\n vamccain_output_path = targetdir + \"/mccain_verbatim_dislikes.csv\"\n mccain_dislikes_df = verbatim_code_merger(verbatim_df=sheet_df_map['RcandDislik'],\n binarized_codes_df=vagainst_mccain_df,\n output_path=vamccain_output_path)\n print(\"Finished RcandDislik McCain\")\n\n # 5. Match the codes with the verbatims and stack the verbatims and codes for both into a dataset\n # (following Card/Smith)\n pieces = (obama_likes_df, obama_dislikes_df, mccain_likes_df, mccain_dislikes_df)\n person_dlikes_df = pd.concat(pieces, ignore_index=True)\n return_df_map['Dataset_4'] = person_dlikes_df # save to df map (same name as in Card/Smith)\n\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = mccain_likes_df.shape[0] + obama_likes_df.shape[0] + \\\n mccain_dislikes_df.shape[0] + obama_dislikes_df.shape[0]\n print(\"(Dataset 4) Sanity check on row sum: \",\n rows_sum == person_dlikes_df.shape[0],\n \"(Sum: \",\n person_dlikes_df.shape[0],\n \")\")\n output_path = targetdir + \"/Cand_like_dislike_verbatim_codes.csv\"\n person_dlikes_df.to_csv(output_path, index=False, header=True, encoding='utf-8')\n\n ############################################## Dataset 5: Terrorists ###############################################\n # Terrorists’ attacks (Terr1 to Terr11), Sheet: DHSsept11\n\n # get input data filename\n terrorists_codes = rawdata_dir + \"anes_timeseries_2008_terrorists/\" + \\\n \"Terrorists - All codes.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1x30 vector\n # (30 because we have 30 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here:\n terrorist_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"5\": 5, \"6\": 6, \"7\": 7, \"8\": 8, \"9\": 9, \"10\": 10, \"11\": 11,\n \"12\": 12, \"13\": 13, \"14\": 14, \"15\": 15, \"16\": 16, \"17\": 17, \"18\": 18, \"19\": 19, \"20\": 20,\n \"21\": 21, \"22\": 22, \"23\": 23, \"24\": 24, \"25\": 25, \"91\": 26, \"96\": 27, \"97\": 28, \"98\": 29,\n \"99\": 30}\n\n # unknown codes (not mentioned in the code overview): none\n\n # also the master code variable description is wrong! it is not: Terrorists’ attacks (Terr1 to Terr11) BUT:\n # TER_Code1;TER_Code2;TER_Code3;TER_Code4;TER_Code5;TER_Code6;TER_Code7;TER_Code8;TER_Code9;TER_Code10;TER_Code11;\n\n # setup desired output path\n tcodes_output_path = targetdir_codes + \"/terrorists.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n terr_select_vars = varname_generator(\"ID\", \"TER_Code\", 11)\n # transform to binarized code dataframe and save\n terr_df = transformer_function(path_to_df=terrorists_codes,\n list_of_vars_selection=terr_select_vars,\n mappings_dictionary=terrorist_dict,\n num_code_vars=30,\n path_to_output=tcodes_output_path)\n # match with verbatims and save\n vterr_output_path = targetdir + \"/terrrorists_verbatim_code.csv\"\n terrorists_df = verbatim_code_merger(verbatim_df=sheet_df_map['DHSsept11'],\n binarized_codes_df=terr_df,\n output_path=vterr_output_path)\n\n print(\"Finished DHSsept11: Terrorists question on 9/11\")\n\n # Collect the dataset into the final dataset map and save into a dataset (following Card/Smith)\n return_df_map['Dataset_5'] = terrorists_df # save to df map (same name as in Card/Smith)\n\n # sanity check: is total of original codes-df row numbers equal final df row number?\n checktotal_orig_df = pd.read_csv(terrorists_codes, sep=\";\")\n rows_sum = checktotal_orig_df.shape[0]\n print(\"(Dataset 5) Sanity check on row sum: \",\n rows_sum == terrorists_df.shape[0],\n \"(Sum: \",\n terrorists_df.shape[0],\n \")\")\n output_path = targetdir + \"/terrorists_verbatim_codes.csv\"\n terrorists_df.to_csv(output_path, index=False, header=True, encoding='utf-8')\n\n ############################################## Dataset 6: Important Issues #########################################\n # Most Important Political Problem:\n #\tMIPPOL1_Code1 to MIPPOL1_Code8\n #\tMIPPOL1_Substantive1 to MIPPOL1_Substantive8, Sheet: MIPpolit1\n # Second Most Important Political Problem:\n #\tMIPPOL2_Code1 to MIPPOL2_Code8\n #\tMIPPOL2_Substantive1 to MIPPOL2_Substantive8, Sheet: MIPpolit2\n # Most Important Election Issue:\n #\tMIIELE1_Code1 to MIIELE1_Code8\n #\tMIIELE1_Substantive1 to MIIELE1_Substantive8, Sheet: MIPpers1\n # Second Most Important Election Issue:\n #\tMIIELE2_Code1 to MIIELE2_Code8\n #\tMIIELE2_Substantive1 to MIIELE2_Substantive8, Sheet: MIPpers2\n\n # get input data filename\n important_codes = rawdata_dir + \"anes_timeseries_2008_most_important_problem/\" + \\\n \"Most Important Problem - All codes.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1x77 vector\n # (77 because we have 77 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here:\n mip_issues_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"5\": 5, \"6\": 6, \"7\": 7, \"8\": 8, \"9\": 9, \"10\": 10, \"11\": 11,\n \"12\": 12, \"13\": 13, \"14\": 14, \"15\": 15, \"16\": 16, \"17\": 17, \"18\": 18, \"19\": 19, \"20\": 20,\n \"21\": 21, \"22\": 22, \"23\": 23, \"24\": 24, \"25\": 25, \"26\": 26, \"27\": 27, \"28\": 28, \"29\": 29,\n \"30\": 30, \"31\": 31, \"32\": 32, \"33\": 33, \"34\": 34, \"35\": 35, \"36\": 36, \"37\": 37, \"38\": 38,\n \"39\": 39, \"40\": 40, \"41\": 41, \"42\": 42, \"43\": 43, \"44\": 44, \"45\": 45, \"46\": 46, \"47\": 47,\n \"48\": 48, \"49\": 49, \"50\": 50, \"51\": 51, \"52\": 52, \"53\": 53, \"54\": 54, \"55\": 55, \"56\": 56,\n \"57\": 57, \"58\": 58, \"59\": 59, \"60\": 60, \"61\": 61, \"62\": 62, \"63\": 63, \"64\": 64, \"65\": 65,\n \"66\": 66, \"67\": 67, \"68\": 68, \"69\": 69, \"70\": 70, \"71\": 71, \"91\": 72, \"95\": 73, \"96\": 74,\n \"97\": 75, \"98\": 76, \"99\": 77}\n\n # unknown codes (not mentioned in the code overview): none\n # 1. Most Important Political Problem MIPPOL1_Code1 to MIPPOL1_Code8 and\n # MIPPOL1_Substantive1 to MIPPOL1_Substantive8\n\n # setup desired output path\n mip1_output_path = targetdir_codes + \"/mip1_political.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n mip1_first_select_vars = varname_generator(\"ID\", \"MIPPOL1_Code\", 8)\n mip1_second_select_vars = varname_generator(\"ID\", \"MIPPOL1_Substantive\", 8)\n mip1_second_select_vars = mip1_second_select_vars[1:] # throw away the second \"ID\" variable reference\n mip1_vars_selected_overall = mip1_first_select_vars + mip1_second_select_vars # concat the two lists\n # transform to binarized code dataframe and save\n mip1_df = transformer_function(path_to_df=important_codes,\n list_of_vars_selection=mip1_vars_selected_overall,\n mappings_dictionary=mip_issues_dict,\n num_code_vars=77,\n path_to_output=mip1_output_path)\n # match with verbatims and save\n vmip1_output_path = targetdir + \"/mip1_political_verbatim_code.csv\"\n mip1_political = verbatim_code_merger(verbatim_df=sheet_df_map['MIPpolit1'],\n binarized_codes_df=mip1_df,\n output_path=vmip1_output_path)\n print(\"Finished MIPpolit1: Most important political issue question\")\n # 2. Second Most Important Political Problem: MIPPOL2_Code1 to MIPPOL2_Code8 and MIPPOL2_Substantive1 to\n # MIPPOL2_Substantive8\n\n # setup desired output path\n mip2_output_path = targetdir_codes + \"/mip2_political.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n mip2_first_select_vars = varname_generator(\"ID\", \"MIPPOL2_Code\", 8)\n mip2_second_select_vars = varname_generator(\"ID\", \"MIPPOL2_Substantive\", 8)\n mip2_second_select_vars = mip2_second_select_vars[1:] # throw away the second \"ID\" variable reference\n mip2_vars_selected_overall = mip2_first_select_vars + mip2_second_select_vars # concat the two lists\n # transform to binarized code dataframe and save\n mip2_df = transformer_function(path_to_df=important_codes,\n list_of_vars_selection=mip2_vars_selected_overall,\n mappings_dictionary=mip_issues_dict,\n num_code_vars=77,\n path_to_output=mip2_output_path)\n # match with verbatims and save\n vmip2_output_path = targetdir + \"/mip2_political_verbatim_code.csv\"\n mip2_political = verbatim_code_merger(verbatim_df=sheet_df_map['MIPpolit2'],\n binarized_codes_df=mip2_df,\n output_path=vmip2_output_path)\n print(\"Finished MIPpolit2: Second most important political issue question\")\n # 3. Most Important Election Issue: MIIELE1_Code1 to MIIELE1_Code8 and MIIELE1_Substantive1 to MIIELE1_Substantive8\n\n # setup desired output path\n pip1_output_path = targetdir_codes + \"/mielect1_political.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n pip1_first_select_vars = varname_generator(\"ID\", \"MIIELE1_Code\", 8)\n pip1_second_select_vars = varname_generator(\"ID\", \"MIIELE1_Substantive\", 8)\n pip1_second_select_vars = pip1_second_select_vars[1:] # throw away the second \"ID\" variable reference\n pip1_vars_selected_overall = pip1_first_select_vars + pip1_second_select_vars # concat the two lists\n # transform to binarized code dataframe and save\n pip1_df = transformer_function(path_to_df=important_codes,\n list_of_vars_selection=pip1_vars_selected_overall,\n mappings_dictionary=mip_issues_dict,\n num_code_vars=77,\n path_to_output=pip1_output_path)\n # match with verbatims and save\n vpip1_output_path = targetdir + \"/mielect1_political_verbatim_code.csv\"\n mielect1_political = verbatim_code_merger(verbatim_df=sheet_df_map['MIPpers1'],\n binarized_codes_df=pip1_df,\n output_path=vpip1_output_path)\n print(\"Finished MIPpers1: Most important personal topic in this election\")\n # 4. Second Most Important Election Issue: MIIELE2_Code1 to MIIELE2_Code8 and MIIELE2_Substantive1 to\n # MIIELE2_Substantive8\n\n # setup desired output path\n pip2_output_path = targetdir_codes + \"/mielect2_political.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n pip2_first_select_vars = varname_generator(\"ID\", \"MIIELE2_Code\", 8)\n pip2_second_select_vars = varname_generator(\"ID\", \"MIIELE2_Substantive\", 8)\n pip2_second_select_vars = pip2_second_select_vars[1:] # throw away the second \"ID\" variable reference\n pip2_vars_selected_overall = pip2_first_select_vars + pip2_second_select_vars # concat the two lists\n # transform to binarized code dataframe and save\n pip2_df = transformer_function(path_to_df=important_codes,\n list_of_vars_selection=pip2_vars_selected_overall,\n mappings_dictionary=mip_issues_dict,\n num_code_vars=77,\n path_to_output=output_path)\n # match with verbatims and save\n vpip2_output_path = targetdir + \"/mielect2_political_verbatim_code.csv\"\n mielect2_political = verbatim_code_merger(verbatim_df=sheet_df_map['MIPpers2'],\n binarized_codes_df=pip2_df,\n output_path=vpip2_output_path)\n\n print(\"Finished MIPpers2: Second most important personal topic in this election\")\n # 5. Match the codes with the verbatims and stack the verbatims and codes for both\n # into a dataset (following Card/Smith)\n\n pieces = (mielect1_political, mielect2_political, mip1_political, mip2_political)\n important_issues_df = pd.concat(pieces, ignore_index=True)\n return_df_map['Dataset_6'] = important_issues_df # save to df map (same name as in Card/Smith)\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = mielect1_political.shape[0] + mielect2_political.shape[0] + \\\n mip1_political.shape[0] + mip2_political.shape[0]\n print(\"(Dataset 6) Sanity check on row sum: \",\n rows_sum == important_issues_df.shape[0],\n \"(Sum: \",\n important_issues_df.shape[0],\n \")\")\n output_path = targetdir + \"/Important_Issues_verbatim_codes.csv\"\n important_issues_df.to_csv(output_path, index=False, header=True, encoding='utf-8')\n\n ###################################### Dataset 7: Political Knowledge: Brown #######################################\n # Gordon Brown Office Recognition Codes (BROWN_Code1 to BROWN_Code5), Sheet: OfcBrown\n # get input data filename\n brown_ofrc_codes = rawdata_dir + \"ANES2008TS_OfficeRecognition/\" + \\\n \"Political knowledge - All codes2.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1 x 16 vector\n # (16 because we have 16 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here:\n brown_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"11\": 5, \"12\": 6, \"13\": 7, \"14\": 8, \"21\": 9, \"23\": 10, \"24\": 11,\n \"95\": 12, \"96\": 13, \"97\": 14, \"98\": 15, \"99\": 16}\n # unknown codes (not mentioned in the code overview): none\n\n # setup desired output path\n brown_output_path = targetdir_codes + \"/brown_ofrc.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n brown_select_vars = varname_generator(\"ID\", \"BROWN_Code\", 5)\n # transform to binarized code dataframe and save\n brown_df = transformer_function(path_to_df=brown_ofrc_codes,\n list_of_vars_selection=brown_select_vars,\n mappings_dictionary=brown_dict,\n num_code_vars=16,\n path_to_output=brown_output_path)\n # match with verbatims and save\n vbrown_output_path = targetdir + \"/brown_ofrc_verbatim_code.csv\"\n brown_ofrc_df = verbatim_code_merger(verbatim_df=sheet_df_map['OfcBrown'],\n binarized_codes_df=brown_df,\n output_path=vbrown_output_path)\n # Collect the dataset into the final dataset map and save into a dataset (following Card/Smith)\n\n brown_checktotal_orig = pd.read_csv(brown_ofrc_codes, sep=\";\")\n return_df_map['Dataset_7'] = brown_ofrc_df # save to df map (same name as in Card/Smith)\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = brown_checktotal_orig.shape[0]\n print(\"(Dataset 7) Sanity check on row sum: \",\n rows_sum == brown_ofrc_df.shape[0],\n \"(Sum: \",\n brown_ofrc_df.shape[0],\n \")\")\n output_path = targetdir + \"/brown_ofrc_verbatim_codes.csv\"\n brown_ofrc_df.to_csv(output_path, index=False, header=True, encoding='utf-8')\n print(\"Finished OfcBrown: Office recognition question Gordon Brown\")\n\n ###################################### Dataset 8: Political Knowledge: Cheney ######################################\n # Dick Cheney Office Recognition Codes (CHENEY_Code1 to CHENEY_Code5), Sheet: OfcCheney\n\n # get input data filename\n cheney_ofrc_codes = rawdata_dir + \"ANES2008TS_OfficeRecognition/\" + \\\n \"Political knowledge - All codes2.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1 x 14 vector\n # (14 because we have 14 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here:\n cheney_dict = {\"1\": 1, \"3\": 2, \"4\": 3, \"11\": 4, \"12\": 5, \"21\": 6, \"22\": 7, \"23\": 8, \"24\": 9, \"95\": 10, \"96\": 11,\n \"97\": 12, \"98\": 13, \"99\": 14}\n\n # unknown codes (not mentioned in the code overview): none\n\n # setup desired output path\n chen_output_path = targetdir_codes + \"/cheney_ofrc.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n chen_select_vars = varname_generator(\"ID\", \"CHENEY_Code\", 5)\n # transform to binarized code dataframe and save\n cheney_df = transformer_function(path_to_df=cheney_ofrc_codes,\n list_of_vars_selection=chen_select_vars,\n mappings_dictionary=cheney_dict,\n num_code_vars=14,\n path_to_output=chen_output_path)\n # match with verbatims and save\n vcheney_output_path = targetdir + \"/cheney_ofrc_verbatim_code.csv\"\n cheney_ofrc_df = verbatim_code_merger(verbatim_df=sheet_df_map['OfcCheney'],\n binarized_codes_df=cheney_df,\n output_path=vcheney_output_path)\n # Collect the dataset into the final dataset map and save into a dataset (following Card/Smith)\n\n chen_checktotal_orig = pd.read_csv(cheney_ofrc_codes, sep=\";\")\n return_df_map['Dataset_8'] = cheney_ofrc_df # save to df map (same name as in Card/Smith)\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = chen_checktotal_orig.shape[0]\n print(\"(Dataset 8) Sanity check on row sum: \",\n rows_sum == cheney_ofrc_df.shape[0],\n \"(Sum: \",\n cheney_ofrc_df.shape[0],\n \")\")\n output_path = targetdir_codes + \"/cheney_ofrc_verbatim_codes.csv\"\n cheney_ofrc_df.to_csv(output_path, index=False, header=True, encoding='utf-8')\n print(\"Finished OfcCheney: Office recognition question Dick Cheney\")\n\n ###################################### Dataset 9: Political Knowledge: Pelosi ######################################\n # Nancy Pelosi Office Recognition Codes (PELOSI_Code1 to PELOSI_Code5), Sheet: OfcPelosi\n\n # get input data filename\n pelosi_ofrc_codes = rawdata_dir + \"ANES2008TS_OfficeRecognition/\" + \\\n \"Political knowledge - All codes2.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1 x 17 vector\n # (17 because we have 17 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here:\n pelosi_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"11\": 5, \"12\": 6, \"15\": 7, \"16\": 8, \"21\": 9, \"22\": 10, \"23\": 11,\n \"24\": 12, \"95\": 13, \"96\": 14, \"97\": 15, \"98\": 16, \"99\": 17}\n # unknown codes (not mentioned in the code overview): none\n\n # setup desired output path\n pel_output_path = targetdir_codes + \"/pelosi_ofrc.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n pel_select_vars = varname_generator(\"ID\", \"PELOSI_Code\", 5)\n # transform to binarized code dataframe and save\n pelosi_df = transformer_function(path_to_df=pelosi_ofrc_codes,\n list_of_vars_selection=pel_select_vars,\n mappings_dictionary=pelosi_dict,\n num_code_vars=17,\n path_to_output=pel_output_path)\n # match with verbatims and save\n vpel_output_path = targetdir + \"/pelosi_ofrc_verbatim_code.csv\"\n pelosi_ofrc_df = verbatim_code_merger(verbatim_df=sheet_df_map['OfcPelosi'],\n binarized_codes_df=pelosi_df,\n output_path=vpel_output_path)\n # Collect the dataset into the final dataset map and save into a dataset (following Card/Smith)\n\n pel_checktotal_orig = pd.read_csv(pelosi_ofrc_codes, sep=\";\")\n return_df_map['Dataset_9'] = pelosi_ofrc_df # save to df map (same name as in Card/Smith)\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = pel_checktotal_orig.shape[0]\n print(\"(Dataset 9) Sanity check on row sum: \",\n rows_sum == pelosi_ofrc_df.shape[0],\n \"(Sum: \",\n pelosi_ofrc_df.shape[0],\n \")\")\n output_path = targetdir + \"/pelosi_ofrc_verbatim_codes.csv\"\n pelosi_ofrc_df.to_csv(output_path, index=False, header=True, encoding='utf-8')\n print(\"Finished OfcPelosi: Office recognition question Nancy Pelosi\")\n\n ###################################### Dataset 10: Political Knowledge: Roberts ####################################\n # John Roberts Office Recognition Codes (ROBERTS_Code1 to ROBERTS_Code5), Sheet: OfcRoberts\n\n # get input data filename\n roberts_ofrc_codes = rawdata_dir + \"ANES2008TS_OfficeRecognition/\" + \\\n \"Political knowledge - All codes2.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1 x 17 vector\n # (17 because we have 17 unique codes used in the dataset, see word coument on codes). Finally, we can then merge\n # them into one big dataset where all verbatims by question are stacked under each other\n\n # setup the code to position mapping dict here:\n roberts_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"11\": 5, \"12\": 6, \"13\": 7, \"14\": 8, \"21\": 9, \"23\": 10, \"24\": 11,\n \"95\": 12, \"96\": 13, \"97\": 14, \"98\": 15, \"99\": 16}\n\n # unknown codes (not mentioned in the code overview): none\n\n # setup desired output path\n rob_output_path = targetdir_codes + \"/roberts_ofrc.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n rob_select_vars = varname_generator(\"ID\", \"ROBERTS_Code\", 5)\n # transform to binarized code dataframe and save\n roberts_df = transformer_function(path_to_df=roberts_ofrc_codes,\n list_of_vars_selection=rob_select_vars,\n mappings_dictionary=roberts_dict,\n num_code_vars=16,\n path_to_output=rob_output_path)\n # match with verbatims and save\n vrob_output_path = targetdir + \"/roberts_ofrc_verbatim_code.csv\"\n roberts_ofrc_df = verbatim_code_merger(verbatim_df=sheet_df_map['OfcRoberts'],\n binarized_codes_df=roberts_df,\n output_path=vrob_output_path)\n\n # Collect the dataset into the final dataset map and save into a dataset (following Card/Smith)\n rob_checktotal_orig = pd.read_csv(roberts_ofrc_codes, sep=\";\")\n return_df_map['Dataset_10'] = roberts_ofrc_df # save to df map (same name as in Card/Smith)\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = rob_checktotal_orig.shape[0]\n print(\"(Dataset 10) Sanity check on row sum: \",\n rows_sum == roberts_ofrc_df.shape[0],\n \"(Sum: \",\n roberts_ofrc_df.shape[0],\n \")\")\n output_path = targetdir + \"/roberts_ofrc_verbatim_codes.csv\"\n roberts_ofrc_df.to_csv(output_path, index=False, header=True, encoding='utf-8')\n print(\"Finished OfcRoberts: Office recognition question John Roberts\")\n\n ########################################### DATASETS CARD/ SMITH DONE ##############################################\n # Additionally: convert and merge the datasets for occupation and industry (for checks by interested persons or\n # raw data providers)\n if optional_ds == True:\n print(\"Generating optional datasets: occupation and industry\")\n # A. Occupation codes and verbatims (past/present)\n\n # get input data filename\n occupation_codes = rawdata_dir + \"anes_timeseries_2008_occupation_industry/\" + \\\n \"Occupation and Industry codes.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1 x 101\n # vector (101 because we have 101 unique codes used in the dataset, see word coument on codes).\n # Finally, we can then merge them into one big dataset where all verbatims by question are stacked\n # under each other\n\n # setup the code to position mapping dict here:\n occupations_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"5\": 5, \"6\": 6, \"7\": 7, \"8\": 8, \"9\": 9, \"10\": 10, \"11\": 11,\n \"12\": 12, \"13\": 13, \"14\": 14, \"15\": 15, \"16\": 16, \"17\": 17, \"18\": 18, \"19\": 19, \"20\": 20,\n \"21\": 21, \"22\": 22, \"23\": 23, \"24\": 24, \"25\": 25, \"26\": 26, \"27\": 27, \"28\": 28, \"29\": 29,\n \"30\": 30, \"31\": 31, \"32\": 32, \"33\": 33, \"34\": 34, \"35\": 35, \"36\": 36, \"37\": 37, \"38\": 38,\n \"39\": 39, \"40\": 40, \"41\": 41, \"42\": 42, \"43\": 43, \"44\": 44, \"45\": 45, \"46\": 46, \"47\": 47,\n \"48\": 48, \"49\": 49, \"50\": 50, \"51\": 51, \"52\": 52, \"53\": 53, \"54\": 54, \"55\": 55, \"56\": 56,\n \"57\": 57, \"58\": 58, \"59\": 59, \"60\": 60, \"61\": 61, \"62\": 62, \"63\": 63, \"64\": 64, \"65\": 65,\n \"66\": 66, \"67\": 67, \"68\": 68, \"69\": 69, \"70\": 70, \"71\": 71, \"72\": 72, \"73\": 73, \"74\": 74,\n \"75\": 75, \"76\": 76, \"77\": 77, \"78\": 78, \"79\": 79, \"80\": 80, \"81\": 81, \"82\": 82, \"83\": 83,\n \"84\": 84, \"85\": 85, \"86\": 86, \"87\": 87, \"88\": 88, \"89\": 89, \"90\": 90, \"91\": 91, \"92\": 92,\n \"93\": 93, \"94\": 94, \"95\": 95, \"96\": 96, \"97\": 97, \"996\": 98, \"997\": 99, \"998\": 100,\n \"999\": 101}\n # unknown codes (not mentioned in the code overview): none\n # 1. Current occupation\n\n # setup desired output path\n cocc_output_path = targetdir_codes + \"/current_occupation_codes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n cocc_select_vars = [\"ID\", \"COCode\"]\n # transform to binarized code dataframe and save\n cocc_df = transformer_function(path_to_df=occupation_codes,\n list_of_vars_selection=cocc_select_vars,\n mappings_dictionary=occupations_dict,\n num_code_vars=101,\n path_to_output=cocc_output_path)\n # match with verbatims and save\n vcocc_output_path = targetdir + \"/current_occupation_verbatim_code.csv\"\n current_occupation = verbatim_code_merger(verbatim_df=sheet_df_map['OccNow'],\n binarized_codes_df=cocc_df,\n output_path=vcocc_output_path)\n # 2. Past occupation\n\n # setup desired output path\n pocc_output_path = targetdir_codes + \"/past_occupation_codes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n pocc_select_vars = (\"ID\", \"POCode\")\n # transform to binarized code dataframe and save\n pocc_df = transformer_function(path_to_df=occupation_codes,\n list_of_vars_selection=pocc_select_vars,\n mappings_dictionary=occupations_dict,\n num_code_vars=101,\n path_to_output=pocc_output_path)\n # match with verbatims and save\n vpocc_output_path = targetdir + \"/past_occupation_verbatim_code.csv\"\n past_occupation = verbatim_code_merger(verbatim_df=sheet_df_map['OccPast'],\n binarized_codes_df=pocc_df,\n output_path=vpocc_output_path)\n # 3. Match the codes with the verbatims and stack the verbatims and codes for both into a dataset\n\n pieces = (past_occupation, current_occupation) # order in verbatims excel-file\n occ_df_final = pd.concat(pieces, ignore_index=True)\n return_df_map['Dataset_opt_1_occupation'] = occ_df_final # save to df map (additional dataset with new name)\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = current_occupation.shape[0] + past_occupation.shape[0]\n print(\"(Optional dataset 1: occupation) Sanity check on row sum: \",\n rows_sum == occ_df_final.shape[0],\n \"(Sum: \",\n occ_df_final.shape[0],\n \")\")\n output_path = targetdir + \"/occupation_complete_verbatim_codes.csv\"\n occ_df_final.to_csv(output_path, index=False, header=True, encoding='utf-8')\n print(\"Finished Occupation (past/present): Occupation questions\")\n\n # B. Industry codes and verbatims (past/present)\n # get input data filename\n industry_codes = rawdata_dir + \"anes_timeseries_2008_occupation_industry/\" + \\\n \"Occupation and Industry codes.csv\"\n\n # we have to go through each dataset individually, isolate the codings, transform them into a binary 1 x 101\n # vector (101 because we have 101 unique codes used in the dataset, see word coument on codes).\n # Finally, we can then merge them into one big dataset where all verbatims by question are stacked\n # under each other\n\n # setup the code to position mapping dict here:\n industry_dict = {\"1\": 1, \"2\": 2, \"3\": 3, \"4\": 4, \"5\": 5, \"6\": 6, \"7\": 7, \"8\": 8, \"9\": 9, \"10\": 10, \"11\": 11,\n \"12\": 12, \"13\": 13, \"14\": 14, \"15\": 15, \"16\": 16, \"17\": 17, \"18\": 18, \"19\": 19, \"20\": 20,\n \"21\": 21, \"22\": 22, \"23\": 23, \"24\": 24, \"25\": 25, \"26\": 26, \"27\": 27, \"28\": 28, \"29\": 29,\n \"30\": 30, \"31\": 31, \"32\": 32, \"33\": 33, \"34\": 34, \"35\": 35, \"36\": 36, \"37\": 37, \"38\": 38,\n \"39\": 39, \"40\": 40, \"41\": 41, \"42\": 42, \"43\": 43, \"44\": 44, \"45\": 45, \"46\": 46, \"47\": 47,\n \"48\": 48, \"49\": 49, \"50\": 50, \"51\": 51, \"52\": 52, \"53\": 53, \"54\": 54, \"55\": 55, \"56\": 56,\n \"57\": 57, \"58\": 58, \"59\": 59, \"60\": 60, \"61\": 61, \"62\": 62, \"63\": 63, \"64\": 64, \"65\": 65,\n \"66\": 66, \"67\": 67, \"68\": 68, \"69\": 69, \"70\": 70, \"71\": 71, \"72\": 72, \"73\": 73, \"74\": 74,\n \"75\": 75, \"76\": 76, \"77\": 77, \"78\": 78, \"79\": 79, \"80\": 80, \"81\": 81, \"82\": 82, \"83\": 83,\n \"84\": 84, \"85\": 85, \"86\": 86, \"87\": 87, \"88\": 88, \"89\": 89, \"90\": 90, \"91\": 91, \"92\": 92,\n \"93\": 93, \"94\": 94, \"95\": 95, \"96\": 96, \"97\": 97, \"98\": 98, \"99\": 99, \"996\": 100, \"997\": 101,\n \"998\": 102, \"999\": 103}\n\n # unknown codes (not mentioned in the code overview): none\n # 1. Current industry\n\n # setup desired output path\n cind_output_path = targetdir_codes + \"/current_industry_codes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n cind_select_vars = [\"ID\", \"CICode\"]\n\n # transform to binarized code dataframe and save\n cind_df = transformer_function(path_to_df=industry_codes,\n list_of_vars_selection=cind_select_vars,\n mappings_dictionary=industry_dict,\n num_code_vars=103,\n path_to_output=cind_output_path)\n # match with verbatims and save\n vcind_output_path = targetdir + \"/current_industry_verbatim_code.csv\"\n current_industry = verbatim_code_merger(verbatim_df=sheet_df_map['IndNow'],\n binarized_codes_df=cind_df,\n output_path=vcind_output_path)\n # 2. Past industry\n\n # setup desired output path\n pind_output_path = targetdir_codes + \"/past_industry_codes.csv\"\n # generate variable names according to the ones mentioned in the respective dataset coding report\n pind_select_vars = [\"ID\", \"PICode\"]\n\n # transform to binarized code dataframe and save\n pind_df = transformer_function(path_to_df=industry_codes,\n list_of_vars_selection=pind_select_vars,\n mappings_dictionary=industry_dict,\n num_code_vars=103,\n path_to_output=pind_output_path)\n # match with verbatims and save\n vpind_output_path = targetdir + \"/past_industry_verbatim_code.csv\"\n past_industry = verbatim_code_merger(verbatim_df=sheet_df_map['IndPast'],\n binarized_codes_df=pind_df,\n output_path=vpind_output_path)\n # 3. Match the codes with the verbatims and stack the verbatims and codes for both into a dataset\n\n pieces = (past_industry, current_industry)\n indu_df_final = pd.concat(pieces, ignore_index=True)\n return_df_map['Dataset_opt_2_industry'] = indu_df_final # save to df map (additional dataset with new name)\n # sanity check: is sum of individual df row numbers equal final df row number?\n rows_sum = current_industry.shape[0] + past_industry.shape[0]\n print(\"Sanity check on row sum: \",\n rows_sum == indu_df_final.shape[0],\n \"(Sum: \",\n indu_df_final.shape[0],\n \")\")\n output_path = targetdir + \"/industry_complete_verbatim_codes.csv\"\n indu_df_final.to_csv(output_path, index=False, header=True, encoding='utf-8')\n print(\"Finished Industry (past/present): Industry questions\")\n\n ########################################### RETURN THE MERGED VERBATIM/CODES DATASETS ##############################\n print(\"Done loading and preparing the datasets!\")\n return return_df_map", "def merge_survey(self) -> pd.DataFrame:\n\n df_list = []\n for survey_id in self.survey_id:\n self.log.debug(f\"Reading: {survey_id}\")\n temp_df = self.get_survey_responses(survey_id)\n df_list.append(temp_df[2:])\n\n df_col = reduce(pd.Index.union, (df.columns for df in df_list))\n\n merged_df = pd.DataFrame()\n for df in df_list:\n temp_df = df.reindex(columns=df_col, fill_value=0)\n merged_df = merged_df.append([temp_df], ignore_index=True)\n return merged_df", "def merging_data(dataframes_list):\n\n adm, pat, diag, serv, icu = dataframes_list\n\n raw_data = adm.merge(pat, how='outer', on='subject_id')\n raw_data = raw_data.merge(diag, how='outer', on=('subject_id', 'hadm_id'))\n raw_data = raw_data.merge(serv, how='outer', on=('subject_id', 'hadm_id'))\n raw_data = raw_data.merge(icu, how='outer', on=('subject_id', 'hadm_id'))\n\n keeping_cols = ['subject_id', 'hadm_id', 'admittime', 'dischtime',\n 'admission_type', 'admission_location', 'insurance',\n 'religion', 'marital_status', 'ethnicity', 'gender', 'dob',\n 'deathtime', 'icd9_code', 'curr_service', \"first_careunit\"\n ]\n\n raw_data = raw_data[keeping_cols]\n\n return raw_data", "def index_mock_files(self,mfiles):\n results = []\n for i in range(len(mfiles['file_name'])):\n print(\"Submitting {} to indexd at {}.\".format(mfiles['file_name'][i],mfiles['object_id'][i]))\n res = self.create_record(\n did=mfiles['object_id'][i],\n hashes={'md5':mfiles['md5sum'][i]},\n size=mfiles['file_size'][i],\n urls=mfiles['storage_urls'][i],\n file_name=mfiles['file_name'][i],\n acl=mfiles['acl'][i],\n authz=mfiles['authz'][i])\n results.append(res)\n return results", "def return_list(origin_list, mode):\n if mode == 'original': filename = 'original_ass_list'\n if mode == 'current': filename = 'latest_ass_list'\n df = pd.DataFrame(origin_list)\n #df.to_csv(\"latest_assassins_list.csv\", sep=\",\", na_rep='', header=False, index=False, index_label=None, mode='w', encoding=None, compression=None, quoting=None, quotechar='\"', line_terminator='n', chunksize=None, tupleize_cols=None, date_format=None, doublequote=True, escapechar=None, decimal='.')\n df.to_excel(filename+ '.xlsx', sheet_name='latest_list', header=['Name', 'Email', 'Photo'], index=False, index_label=None, startrow=0, startcol=0, engine=None, merge_cells=True, encoding=None, inf_rep='inf', verbose=True, freeze_panes=None)", "def _merge_files(parse_results: Iterable[ParseResult]) -> Iterable[ParseResult]:\n return map(_merge_records, groupby_file(parse_results))", "def merge_physdfs(files, mode='basic'):\n\ttemp_df = pd.read_csv(files[0], index_col=False)\n\tcolumns = temp_df.columns.tolist()\n\tmerged_df = pd.DataFrame([], columns=columns)\n\n\tind = 1\n\ttot = len(files)\n\tfor file in files:\n\t\tprint(\"Merging (%d/%d): %s\" % (ind, tot, file))\n\t\tind = ind + 1\n\n\t\tdf = pd.read_csv(file, index_col=False)\n\n\t\t# add 'rat_data' column to the merged df\n\t\troot_name = file.split('/')[-1]\n\t\tdf = df.assign(raw_data=root_name)\n\n\t\t# add 'exp_label' column to the merged df\n\t\tif mode=='basic':\n\t\t\texp = re.findall(r'[a-zA-Z]{3}\\d{1}', file)\n\t\t\tdf = df.assign(exp_label=exp[0][:-1])\n\n\t\tif mode=='general':\n\t\t if 'cohort' in root_name:\n\t\t df = df.assign(exp_label=root_name[0:8])\n\t\t else:\n\t\t m = root_name.find('_') + 1\n\t\t n = root_name.find('_', m)\n\t\t df = df.assign(exp_label=root_name[m:n])\n\n\t\tif mode=='mengdi':\n\t\t\tm = root_name.find('_') + 1\n\t\t\tm = root_name.find('_', m) + 1\n\t\t\tn = root_name.find('-', m)\n\t\t\tdf = df.assign(exp_label=root_name[m:n])\n\n\t\tif mode=='stiffness':\n\t\t\tm = root_name.find('-') + 1\n\t\t\tm = root_name.find('-', m) + 1\n\t\t\tn = root_name.find('_') + 1\n\t\t\tn = root_name.find('_', n)\n\t\t\tdf = df.assign(exp_label=root_name[m:n])\n\n\t\tmerged_df = pd.concat([merged_df, df], sort=True, ignore_index=True)\n\n\treturn merged_df", "def merge_summaries_old(root_dir,output_file=None):\n #\n sumfiles = glob.glob(f\"{root_dir}/**/*smry.txt\",recursive=True)\n nsums = len(sumfiles)\n print (f\"Found {nsums} summary files in {root_dir}\")\n #\n with tempfile.NamedTemporaryFile(mode='w') as fp:\n for i in range(nsums):\n sumfile = sumfiles[i]\n iobs = os.path.basename(sumfile)[0:10]\n with open(sumfile,'r') as sfile:\n fp.write(sfile.read())\n #\n # now read as pandas dataframe\n #\n colnames = [\"rev\",\"obsid\",\"expid\",\"mode\",\"filt\",\"tstart\",\"tend\",\"texpo\",\\\n \"mvcratio\", # (a rough measure of the ratio of counts in the MnKa versus continuum)\n \"qboxt0\",\"qboxt1\",\"qboxt2\",\"qboxt3\", # x 4 (electronics quadrant box temperatures)\n \"ndisclin_mean0\",\"ndisclin_mean1\",\"ndisclin_mean2\",\"ndisclin_mean3\", #x 4\n \"mipsel0\",\"mipsel1\",\"mipsel2\",\"mipsel3\", #x 4 (parameter for on-board MIP rejection algorithm)\n \"maxmip0\",\"maxmip1\",\"maxmip2\",\"maxmip3\", #x 4 (parameter for on-board MIP rejection algorithm)\n \"ndisclin_med0\",\"ndisclin_med1\",\"ndisclin_med2\",\"ndisclin_med3\", #median x 4\n \"ndisclin_std0\",\"ndisclin_std1\",\"ndisclin_std2\",\"ndisclin_std3\"] #, stddev x 4\n #\n df = pd.read_csv(fp.name,delimiter='\\s+',header=None,skip_blank_lines=True,names=colnames)\n #\n # now calculate the time_delta, the difference in years from observation start and 2000-01-01\n #\n stime = [(datetime.strptime(x,\"%Y-%m-%dT%H:%M:%S\")-time0).total_seconds()/(365.0*24.0*3600.0) for x in df.tstart]\n df.insert(6,\"delta_time\",pd.Series(stime,index=df.index))\n #\n print (f'Last observation t={df.delta_time.max():.2f} years')\n if (output_file is not None):\n df.to_csv(output_file)\n fp.close()\n return df", "def multi_sheet(self):\n # Initialize #\n all_sheets = []\n # Loop #\n for name in self.handle.sheet_names:\n sheet = self.handle.parse(name)\n sheet.insert(0, \"nace\", name)\n all_sheets.append(sheet)\n # Write #\n df = pandas.concat(all_sheets)\n df.to_csv(str(self.dest), **self.kwargs)", "def read_historical_data():\n pull_dir = \"/common/quidel-historical-raw\"\n columns = ['SofiaSerNum', 'TestDate', 'Facility', 'ZipCode',\n 'FluA', 'FluB', 'StorageDate']\n df = pd.DataFrame(columns=columns)\n\n for fn in os.listdir(pull_dir):\n if \"xlsx\" in fn:\n newdf = pd.read_excel(\"/\".join([pull_dir, fn]))\n df = df.append(newdf[columns])\n return df", "def get_arterial(file_path,category):\n book = xlrd.open_workbook(file_path)\n file_name = os.path.basename(file_path)\n year = str(20) + \"\".join([str(s) for s in file_name if s.isdigit()]) ## gets the year from filename\n Month = strptime(file_name[2:5],'%b').tm_mon ## gets month no\n mydate = datetime.date(int(year),Month, 1) ## first day of the month and year\n mydate_1 = mydate - datetime.timedelta(days=1) ## interested in last month of this year as data corresponds to last month and same year\n mydate_2 = mydate - datetime.timedelta(days=368) ## interested in last month of last year as data corresponds to last month and last year \n #monthid1 = str(mydate_1.strftime(\"%Y\")) + str(mydate_1.strftime(\"%m\")) ## 200706 for July 2007 file\n monthid2 = str(mydate_2.strftime(\"%Y\")) + str(mydate_2.strftime(\"%m\")) ## 200606 for July 2007 file\n try:\n if category.lower() == \"rural\":\n index = 3\n elif category.lower() == \"urban\":\n index = 4\n else:\n index = 5\n sheet = book.sheet_by_index(index)\n list_states = sheet.col_values(0)\n xstart = list_states.index('Connecticut')\n xend = list_states.index('TOTALS')\n #list1 = sheet.col_slice(colx= 8,start_rowx=xstart,end_rowx= xend - 1)\n #list1 = [w.value for w in list1]\n list2 = sheet.col_slice(colx= 9,start_rowx=xstart,end_rowx= xend - 1)\n list2 = [w.value for w in list2]\n list3 = sheet.col_slice(colx= 0,start_rowx=xstart,end_rowx= xend - 1)\n list3 = [w.value.lower() for w in list3] ## take lowercase for direct match later\n df = pd.concat([pd.DataFrame(list3),pd.DataFrame(list2)], axis = 1) # ,pd.DataFrame(list1)\n #col_name_1 = category + '_Arterial_' + monthid1\n col_name_2 = category + '_Arterial_' + monthid2\n df.columns = ['State', col_name_2 ] # col_name_1, \n df[col_name_2].replace('', np.nan, inplace=True) ## removes rows with blank records ( zonal categories)\n df['State'].replace('', np.nan, inplace=True)\n curr_monthid = str(mydate.strftime(\"%Y\")) + str(mydate.strftime(\"%m\")) ## 200707 for July 2007 file\n df['data_monthid'] = curr_monthid\n df.dropna(subset=[col_name_2], inplace=True)\n df.dropna(subset=['State'], inplace=True)\n df = df[~df.State.str.contains(\"subtotal\")] ### causes problems on joins, there in most files\n df = df[df.State != \"total\"] ## causes problems on joins, is there only in specific files\n df['State'] = df.State.str.strip() ## removes leading and lagging white spaces if any\n df2 = pd.melt(df,id_vars=['State','data_monthid'],var_name=['category'], value_name='Million_Vehicle_Miles')\n return df2\n except:\n print(\"error in file \",os.path.basename(file_path))", "def join():\n dataset_df = pd.read_excel(\"dataset.xlsx\")\n statistics_df = pd.read_excel(\"statistics.xlsx\")\n\n merge_df = pd.merge(dataset_df, statistics_df, on=['patient_identifier'])\n\n writer = pd.ExcelWriter('final_dataset.xlsx', engine='xlsxwriter')\n merge_df.to_excel(writer, sheet_name='Sheet1')\n writer.save()", "def merge_summaries(root_dir: str,output_file: str=None) -> pd.DataFrame:\n #\n print (f'Collecting the available summary files in {root_dir}, can take time... please wait.')\n sumfiles = glob.glob(f\"{root_dir}/**/*smry.txt\",recursive=True)\n nsums = len(sumfiles)\n print (f\"Found {nsums} summary files in {root_dir}\")\n #\n # will concatenate all smry.txt files into one temporary file and then will put it in pandas DataFrame and \n # save as CSV\n #\n with tempfile.NamedTemporaryFile(mode='w') as fp:\n for sumfile in tqdm(sumfiles,desc='Collecting the summaries'):\n with open(sumfile,'r') as sfile:\n fp.write(sfile.read())\n #\n # now read as pandas dataframe\n #\n colnames = [\"rev\",\"obsid\",\"expid\",\"mode\",\"filt\",\"tstart\",\"tend\",\"texpo\",\"mvcratio\", # (a rough measure of the ratio of counts in the MnKa versus continuum)\n \"qboxt0\",\"qboxt1\",\"qboxt2\",\"qboxt3\", # x 4 (electronics quadrant box temperatures)\n \"ndisclin_mean0\",\"ndisclin_mean1\",\"ndisclin_mean2\",\"ndisclin_mean3\", #x 4\n \"mipsel0\",\"mipsel1\",\"mipsel2\",\"mipsel3\", #x 4 (parameter for on-board MIP rejection algorithm)\n \"maxmip0\",\"maxmip1\",\"maxmip2\",\"maxmip3\", #x 4 (parameter for on-board MIP rejection algorithm)\n \"ndisclin_med0\",\"ndisclin_med1\",\"ndisclin_med2\",\"ndisclin_med3\", #median x 4\n \"ndisclin_std0\",\"ndisclin_std1\",\"ndisclin_std2\",\"ndisclin_std3\"] #, stddev x 4\n\n df = pd.read_csv(fp.name,delimiter='\\s+',header=None,skip_blank_lines=True,names=colnames)\n #\n # now calculate the time_delta, the difference in years from observation start and 2000-01-01\n #\n stime = [(datetime.strptime(x,\"%Y-%m-%dT%H:%M:%S\")-time0).total_seconds()/(365.0*24.0*3600.0) for x in df.tstart]\n df.insert(6,\"delta_time\",pd.Series(stime,index=df.index))\n #\n print (f'Last observation t={df.delta_time.max():.2f} years')\n if (output_file is not None):\n df.to_csv(output_file)\n fp.close()\n return df", "def load_file_data_from_db(self):\n\n file_objs = self.file_queryset.filter(sip=self.sip, removedtime__isnull=True)\n for file_obj in self._batch_query(file_objs):\n self.file_events = get_file_events(file_obj)\n if not self.file_events:\n return\n try:\n # merge the map_file_data dict with the map_av_data\n mapped_file_info = merge_file_data_dicts(\n map_file_data(file_obj, self.file_events), map_av_data(file_obj)\n )\n self.md_info[\"files\"].append(mapped_file_info)\n self.md_info[\"premis:size\"] = create_package_size(\n mapped_file_info[\"premis:size\"]\n )\n self.md_info[\"amount_of_files\"] += 1\n failed_virus_checks = get_failed_virus_checks(self.file_events)\n if failed_virus_checks:\n self.md_info[\"virus_scan_info\"][\"failed_virus_checks\"].append(\n failed_virus_checks\n )\n passed_virus_checks = get_passed_virus_checks(self.file_events)\n # add info virus_scan_tools if they passed and respect\n # different tools and versions if needed.\n if (\n passed_virus_checks\n and passed_virus_checks\n not in self.md_info[\"virus_scan_info\"][\"virus_scan_tools\"]\n ):\n self.md_info[\"virus_scan_info\"][\"virus_scan_tools\"].append(\n passed_virus_checks\n )\n except KeyError:\n logger.info(\n \"File is no longer present on the filesystem: %s\",\n file_obj.currentlocation,\n )\n continue", "def mergeDatabase(df):\n\n\tabsPath = os.path.abspath(__file__)\n\tabsPath = os.path.split(absPath)[0]\n\tabsPath = ''\n\n\tdataPath = os.path.join(absPath, 'dual-data')\n\n\tdfFinal = None\n\n\t# 20210122__0002_lcrPicker has high-freq ap, no lcr b/w spikes\n\trejectionList = ['dual-data/20210122/20210122__0002_lcrPicker.csv']\n\n\tprint('dataPath:', dataPath)\n\tnumFiles = 0\n\tfor obj in os.listdir(dataPath):\n\t\tfolderPath = os.path.join(dataPath, obj)\n\t\tif os.path.isdir(folderPath):\n\t\t\tprint('folderPath:', folderPath)\n\t\t\tfor file in os.listdir(folderPath):\n\t\t\t\tif file.startswith('.'):\n\t\t\t\t\tcontinue\n\t\t\t\tif file.endswith('_lcrPicker.csv'):\n\t\t\t\t\tcsvPath = os.path.join(folderPath, file)\n\t\t\t\t\tprint(' csvPath:', csvPath)\n\n\t\t\t\t\tif csvPath in rejectionList:\n\t\t\t\t\t\tprint('!!! rejecting csvPath:', csvPath)\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tnumFiles += 1\n\t\t\t\t\tif dfFinal is None:\n\t\t\t\t\t\tdfFinal = pd.read_csv(csvPath, header=0)\n\t\t\t\t\telse:\n\t\t\t\t\t\tdf0 = pd.read_csv(csvPath, header=0)\n\t\t\t\t\t\tdfFinal = dfFinal.append(df0)\n\t\t\t\t\t\tdfFinal.reset_index(drop=True)\n\t\t\t\t\t\t# todo: should be\n\t\t\t\t\t\t#dfFinal = dfFinal.reset_index(drop=True)\n\n\t#\n\t# add new column for time of lcr before spike\n\t# todo: make new col to get rid of lcr where lcrPreSpikeSec < 0.1 sec\n\tif 1:\n\t\tdfFinal['lcrPreSpikeSec'] = dfFinal['spikeSec'] - dfFinal['lcrSec']\n\n\t\t#print(dfFinal[ np.isnan(dfFinal['lcrPreSpikeSec']) ] )\n\n\t\t# remove lcr (rows) that are close to before the spike, lcrPreSpikeSec<0.1\n\t\t# important: we need second or np.isnan() to KEEP lcrPicker with no spike detecct\n\t\tlcrNoCloserThanSec = 0.15\n\t\tprint('num lcr before removing lcr close to spike:', len(dfFinal))\n\t\tdfFinal = dfFinal[ (dfFinal['lcrPreSpikeSec'] > lcrNoCloserThanSec) | (np.isnan(dfFinal['lcrPreSpikeSec']) ) ]\n\t\tprint(' after removing lcr close to spike:', len(dfFinal))\n\n\t#\n\t# save merged ccsv\n\tmasterCsv = 'lcrPicker-db.csv'\n\tprint('mergeDatabase() merged:', numFiles, '...saving masterCsv:', masterCsv)\n\tdfFinal.to_csv(masterCsv)", "def main():\n \n lookupslocation = 'C:\\\\Users\\\\gwilliams\\\\Desktop\\\\Python Experiments\\\\work projects\\\\FaresIndexSourceData\\\\regulated_fares_data\\\\'\n destination = 'C:\\\\Users\\\\gwilliams\\\\Desktop\\\\Python Experiments\\\\work projects\\\\FaresIndexSourceData\\\\regulated_fares_data\\\\comparison output\\\\'\n lookupfileslist, count = getdata(lookupslocation)\n\n print(f\"there are {count} files found.\")\n\n newlookup = lookupfileslist[0]\n oldlookup = lookupfileslist[1]\n\n #join new to old // old to new\n new_uniquevalues = pd.merge(left=newlookup,right=oldlookup,how='left',\n left_on=['orig','dest','route','ticket'],right_on=['orig','dest','route','ticket'])\n\n old_uniquevalues = pd.merge(left=newlookup,right=oldlookup,how='right',\n left_on=['orig','dest','route','ticket'],right_on=['orig','dest','route','ticket'])\n\n print(\"These are values unique to new lookup\") \n new_uniquevalues = new_uniquevalues[new_uniquevalues.ticketa.isnull()==True]\n exportfile(new_uniquevalues,destination,'unique_new_values',1)\n\n print(\"These are values unique to old lookup\")\n old_uniquevalues = old_uniquevalues[old_uniquevalues.new_flag.isnull()==True]\n exportfile(old_uniquevalues,destination,'unique_old_values',1)", "def _index_latest_df(\n latest_df: pd.DataFrame, ts_locations: Union[pd.api.extensions.ExtensionArray, np.ndarray]\n) -> pd.DataFrame:\n assert latest_df.index.names == [None]\n\n if latest_df.empty:\n warnings.warn(BadMultiRegionWarning(\"Unexpected empty latest DataFrame\"))\n return pd.DataFrame(index=ts_locations).sort_index()\n else:\n latest_df_with_index = latest_df.set_index(CommonFields.LOCATION_ID, verify_integrity=True)\n # Make an index with the union of the locations in the timeseries and latest_df to keep all rows of\n # latest_df\n all_locations = (\n latest_df_with_index.index.union(ts_locations)\n .unique()\n .sort_values()\n # Make sure the index has a name so that reset_index() restores the column name.\n .rename(CommonFields.LOCATION_ID)\n )\n # reindex takes the name from index `all_locations`, see\n # https://github.com/pandas-dev/pandas/issues/9885\n return latest_df_with_index.reindex(index=all_locations)", "def get_arterial(file_path,category):\n book = xlrd.open_workbook(file_path)\n file_name = os.path.basename(file_path)\n year = str(20) + \"\".join([str(s) for s in file_name if s.isdigit()]) ## gets the year from filename\n Month = strptime(file_name[2:5],'%b').tm_mon ## gets month no\n mydate = datetime.date(int(year),Month, 1) ## first day of the month and year\n #mydate_1 = mydate - datetime.timedelta(days=1) ## interested in last month of this year as data corresponds to last month and same year\n mydate_2 = mydate - datetime.timedelta(days=368) ## interested in last month of last year as data corresponds to last month and last year \n #monthid1 = str(mydate_1.strftime(\"%Y\")) + str(mydate_1.strftime(\"%m\")) ## 200706 for July 2007 file\n monthid2 = str(mydate_2.strftime(\"%Y\")) + str(mydate_2.strftime(\"%m\")) ## 200606 for July 2007 file\n try:\n if category.lower() == \"rural\":\n index = 3\n elif category.lower() == \"urban\":\n index = 4\n else:\n index = 5\n sheet = book.sheet_by_index(index)\n list_states = sheet.col_values(0)\n xstart = list_states.index('Connecticut')\n xend = list_states.index('TOTALS')\n #list1 = sheet.col_slice(colx= 6,start_rowx=xstart,end_rowx= xend - 1)\n #list1 = [w.value for w in list1]\n list2 = sheet.col_slice(colx= 7,start_rowx=xstart,end_rowx= xend - 1)\n list2 = [w.value for w in list2]\n list3 = sheet.col_slice(colx= 0,start_rowx=xstart,end_rowx= xend - 1)\n list3 = [w.value.lower() for w in list3] ## take lowercase for direct match later\n df = pd.concat([pd.DataFrame(list3),pd.DataFrame(list2)], axis = 1) # pd.DataFrame(list1),\n #col_name_1 = category + '_Arterial_' + monthid1\n col_name_2 = category + '_Arterial_' + monthid2\n df.columns = ['State', col_name_2 ] ## col_name_1,\n df[col_name_2].replace('', np.nan, inplace=True) ## removes rows with blank records ( zonal categories)\n df['State'].replace('', np.nan, inplace=True)\n curr_monthid = str(mydate.strftime(\"%Y\")) + str(mydate.strftime(\"%m\")) ## 200707 for July 2007 file\n df['data_monthid'] = curr_monthid\n df.dropna(subset=[col_name_2], inplace=True)\n df.dropna(subset=['State'], inplace=True)\n df = df[~df.State.str.contains(\"subtotal\")] ### causes problems on joins, there in most files\n df = df[df.State != \"total\"] ## causes problems on joins, is there only in specific files\n df['State'] = df.State.str.strip() ## removes leading and lagging white spaces if any\n df2 = pd.melt(df,id_vars=['State','data_monthid'],var_name=['category'], value_name='Million_Vehicle_Miles')\n return df2\n except:\n print(\"error in file \",os.path.basename(file_path))", "def fetch_combine_elia_freq_R1(fromdate,todate):\r\n i=0\r\n dfs = []\r\n file_names = []\r\n fromdatepd=pd.to_datetime(fromdate,format=\"%m-%Y\")\r\n todatepd=pd.to_datetime(todate,format=\"%m-%Y\")\r\n date = fromdatepd - relativedelta(months=1)\r\n \r\n while (date < todatepd):\r\n date = date + relativedelta(months=1)\r\n if date.month < 10:\r\n monthwithzero = \"0\" + str(date.month)\r\n else:\r\n monthwithzero = str(date.month) \r\n file_names.append(\"http://publications.elia.be/Publications/Publications/FileRepository.v1.svc/DownloadFile?filePath=\\Tulyp\\FrequencyAndDemandR1Export\\FrequencyAndDemand_\"+str(date.year)+\"_\"+monthwithzero+\".xlsx\")\r\n print(str(datetime.datetime.utcnow()) + \" amount of files to combine: \" + str(len(file_names))) \r\n for file in file_names:\r\n i=i+1\r\n print(str(datetime.datetime.utcnow()) + \" processing file number: \"+ str(i))\r\n df = read_elia_freq_R1(file)\r\n dfs.append(df)\r\n combined_data = pd.concat(dfs, axis = 0)\r\n print(str(datetime.datetime.utcnow()) + \" finished\")\r\n return combined_data", "def read_dataframe_with_new_ids(filename, ids_df, *, date_fields=['TIME']):\n df = pd.read_csv(filename, parse_dates=date_fields, memory_map=True)\n return pd.merge(ids_df, df, left_index=True, right_on='FLIGHT_ID')", "def read_stats_excel(filename):\n\n df_dict = {}\n xl = pd.ExcelFile(filename)\n\n for sh in xl.sheet_names:\n df = pd.read_excel(xl, sheet_name=sh, header=[0, 1, 2])\n\n # Use start date as index\n if df[\"End\"].dtypes.all() == pd.Timestamp:\n if \"File Number\" in df.columns:\n df = df.drop(\"File Number\", axis=1, level=0)\n df = df.drop(\"End\", axis=1, level=0)\n df = df.set_index(df.columns[0])\n df.index = pd.to_datetime(df.index, format=\"%Y-%m-%d %H:%M:%S\")\n df.index.name = \"Date\"\n # Use file number as index\n else:\n df = df.drop([\"Start\", \"End\"], axis=1, level=0)\n df = df.set_index(df.columns[0])\n df.index.name = \"File Number\"\n\n df.columns.rename([\"channels\", \"stats\", \"units\"], inplace=True)\n df_dict[sh] = df\n\n return df_dict", "def get_vetted_sample(self):\n list_of_files = glob.glob(self.final_path)\n latest_file = max(list_of_files, key=os.path.getctime)\n df = pd.read_csv(latest_file)\n return df", "def merge_book_data(\n book_df: pd.DataFrame,\n book_extra_info_df: pd.DataFrame\n) -> pd.DataFrame:\n book_df = book_df.drop(columns=['isbn13'])\n book_merged_data_df = book_df.merge(book_extra_info_df, on='work_id')\n return book_merged_data_df", "def fetch_combine_elia_activated_energy(fromdate,todate,status):\r\n i=0\r\n dfsprice = []\r\n dfsvol = []\r\n data_files_price = []\r\n data_files_volume = [] \r\n fromdatepd=pd.to_datetime(fromdate,format=\"%m-%Y\")\r\n todatepd=pd.to_datetime(todate,format=\"%m-%Y\")\r\n date = fromdatepd - relativedelta(months=1)\r\n \r\n while (date < todatepd):\r\n date = date + relativedelta(months=1)\r\n if date.month < 10:\r\n monthwithzero = \"0\" + str(date.month)\r\n else:\r\n monthwithzero = str(date.month) \r\n data_files_volume.append(\"http://imbalanceb2c.elia.be/proxy.aspx?pubName=ActivatedEnergyVolumes&fId=\"+str(date.year)+monthwithzero+\".xls\")\r\n data_files_price.append(\"http://imbalanceb2c.elia.be/proxy.aspx?pubName=ActivatedEnergyPrices&fId=\"+str(date.year)+monthwithzero+\".xls\") \r\n print(str(datetime.datetime.utcnow()) + \" amount of files to combine: \" + str(len(data_files_volume)+len(data_files_price))) \r\n for file1 in data_files_price:\r\n i=i+1\r\n print(str(datetime.datetime.utcnow()) + \" processing file number: \"+ str(i))\r\n df1 = read_elia_activated_energy_prices(file1,status)\r\n dfsprice.append(df1)\r\n combined_data_price = pd.concat(dfsprice, axis = 0)\r\n \r\n #remove \"NRV in MW\" column, because it is duplicate \r\n if \"NRV in MW\" in combined_data_price: \r\n combined_data_price = combined_data_price.drop(combined_data_price[[\"NRV in MW\"]],axis=1)\r\n if \"NRV\\n (MW)\" in combined_data_price:\r\n combined_data_price = combined_data_price.drop(combined_data_price[[\"NRV\\n (MW)\"]],axis=1)\r\n \r\n for file2 in data_files_volume:\r\n i=i+1\r\n print(str(datetime.datetime.utcnow()) + \" processing file number: \"+ str(i))\r\n df2 = read_elia_activated_energy_volumes(file2,status)\r\n dfsvol.append(df2)\r\n combined_data_vol = pd.concat(dfsvol, axis = 0)\r\n \r\n result = pd.concat([combined_data_price, combined_data_vol], axis=1)\r\n result.reset_index(inplace=True)\r\n result[\"Timestamp\"]=pd.to_datetime(result[\"Timestamp\"],format=(\"%d/%m/%Y %H:%M\"))\r\n result=result.set_index(\"Timestamp\")\r\n \r\n if \"NRV\\n (MW)\" in result:\r\n if \"NRV in MW\" in result:\r\n result.fillna(0,inplace=True)\r\n result[\"Bids+ in euro/MWh\"] = result[\"Bids+\\n (\\u20ac/MWh)\"] + result[\"Bids+ in euro/MWh\"]\r\n result[\"Bids- in euro/MWh\"] = result[\"Bids-\\n(\\u20ac/MWh)\"] + result[\"Bids- in euro/MWh\"]\r\n result[\"IGCC- in euro/MWh\"] = result[\"IGCC-\\n(\\u20ac/MWh)\"] + result[\"IGCC- in euro/MWh\"]\r\n result[\"IGGC+ in euro/MWh\"] = result[\"IGGC+\\n(\\u20ac/MWh)\"] + result[\"IGGC+ in euro/MWh\"]\r\n result[\"MDP in euro/MWh\"] = result[\"MDP\\n(\\u20ac/MWh)\"] + result[\"MDP in euro/MWh\"]\r\n result[\"MIP in euro/MWh\"] = result[\"MIP\\n (\\u20ac/MWh)\"] + result[\"MIP in euro/MWh\"]\r\n result[\"R2+ in euro/MWh\"] = result[\"R2+\\n(\\u20ac/MWh)\"] + result[\"R2+ in euro/MWh\"]\r\n result[\"R2- in euro/MWh\"] = result[\"R2-\\n(\\u20ac/MWh)\"] + result[\"R2- in euro/MWh\"]\r\n result[\"R3 flex in euro/MWh\"] = result[\"R3 Flex (\\u20ac/MWh)\"] + result[\"R3 flex in euro/MWh\"]\r\n result[\"R3 std in euro/MWh\"] = result[\"R3 std (\\u20ac/MWh)\"] + result[\"R3 std in euro/MWh\"]\r\n result[\"R3- in euro/MWh\"] = result[\"R3-\\n(\\u20ac/MWh)\"] + result[\"R3- in euro/MWh\"]\r\n result[\"SR in euro/MWh\"] = result[\"SR\\n(\\u20ac/MWh)\"] + result[\"SR in euro/MWh\"]\r\n result[\"inter TSO import in euro/MWh\"] = result[\"Inter-TSO Import\\n (\\u20ac/MWh)\"] + result[\"inter TSO import in euro/MWh\"]\r\n result[\"Bids+ in MW\"] = result[\"Bids+\\n (MW)\"] + result[\"Bids+ in MW\"]\r\n result[\"Bids- in MW\"] = result[\"Bids-\\n(MW)\"] + result[\"Bids- in MW\"]\r\n result[\"GDV in MW\"] = result[\"GDV\\n(MW)\"] + result[\"GDV in MW\"]\r\n result[\"GUV in MW\"] = result[\"GUV\\n (MW)\"] + result[\"GUV in MW\"]\r\n result[\"IGCC+ in MW\"] = result[\"IGCC+\\n(MW)\"] + result[\"IGCC+ in MW\"]\r\n result[\"IGCC- in MW\"] = result[\"IGCC-\\n(MW)\"] + result[\"IGCC- in MW\"]\r\n result[\"inter TSO export in MW\"] = result[\"Inter-Tso\\nExport\\n(MW)\"] + result[\"inter TSO export in MW\"]\r\n result[\"inter TSO import in MW\"] = result[\"Inter-Tso Import(MW)\"] + result[\"inter TSO import in MW\"]\r\n result[\"NRV in MW\"] = result[\"NRV\\n (MW)\"] + result[\"NRV in MW\"]\r\n result[\"R2+ in MW\"] = result[\"R2+\\n(MW)\"] + result[\"R2+ in MW\"]\r\n result[\"R2- in MW\"] = result[\"R2-\\n(MW)\"] + result[\"R2- in MW\"]\r\n result[\"R3 flex in MW\"] = result[\"R3 Flex\\n(MW)\"] + result[\"R3 flex in MW\"]\r\n result[\"R3 std in MW\"] = result[\"R3 Std\\n (MW)\"] + result[\"R3 std in MW\"]\r\n result[\"SR in MW\"] = result[\"SR\\n(MW)\"] + result[\"SR in MW\"]\r\n result=result.drop([\"Bids+\\n (\\u20ac/MWh)\",\"Bids-\\n(\\u20ac/MWh)\",\"IGCC-\\n(\\u20ac/MWh)\",\"IGGC+\\n(\\u20ac/MWh)\",\"MDP\\n(\\u20ac/MWh)\",\"MIP\\n (\\u20ac/MWh)\",\"R2+\\n(\\u20ac/MWh)\",\"R2-\\n(\\u20ac/MWh)\",\"R3 Flex (\\u20ac/MWh)\",\"R3 std (\\u20ac/MWh)\",\"R3-\\n(\\u20ac/MWh)\",\"SR\\n(\\u20ac/MWh)\",\"Inter-TSO Import\\n (\\u20ac/MWh)\",\"Bids+\\n (MW)\",\"Bids-\\n(MW)\",\"GDV\\n(MW)\",\"GUV\\n (MW)\",\"IGCC+\\n(MW)\",\"IGCC-\\n(MW)\",\"Inter-Tso\\nExport\\n(MW)\",\"Inter-Tso Import(MW)\",\"NRV\\n (MW)\",\"R2+\\n(MW)\",\"R2-\\n(MW)\",\"R3 Flex\\n(MW)\",\"R3 Std\\n (MW)\",\"SR\\n(MW)\"],axis=1)\r\n result[result == 0] = np.nan\r\n \r\n print(str(datetime.datetime.utcnow()) + \" finished\")\r\n return result", "def get_data(self, date_time):\n id_columns = ','.join([col for col in self.table_primary_keys if col not in ['EFFECTIVEDATE', 'VERSIONNO']])\n return_columns = ','.join(self.table_columns)\n with self.con:\n cur = self.con.cursor()\n cur.execute(\"DROP TABLE IF EXISTS temp;\")\n cur.execute(\"DROP TABLE IF EXISTS temp2;\")\n cur.execute(\"DROP TABLE IF EXISTS temp3;\")\n cur.execute(\"DROP TABLE IF EXISTS temp4;\")\n # Store just the unique sets of ids that came into effect before the the datetime in a temporary table.\n query = \"\"\"CREATE TEMPORARY TABLE temp AS \n SELECT * \n FROM {table} \n WHERE EFFECTIVEDATE <= '{datetime}';\"\"\"\n cur.execute(query.format(table=self.table_name, datetime=date_time))\n # For each unique set of ids and effective dates get the latest versionno and sore in temporary table.\n query = \"\"\"CREATE TEMPORARY TABLE temp2 AS\n SELECT {id}, EFFECTIVEDATE, MAX(VERSIONNO) AS VERSIONNO\n FROM temp\n GROUP BY {id}, EFFECTIVEDATE;\"\"\"\n cur.execute(query.format(id=id_columns))\n # For each unique set of ids get the record with the most recent effective date.\n query = \"\"\"CREATE TEMPORARY TABLE temp3 as\n SELECT {id}, VERSIONNO, max(EFFECTIVEDATE) as EFFECTIVEDATE\n FROM temp2\n GROUP BY {id};\"\"\"\n cur.execute(query.format(id=id_columns))\n # Inner join the original table to the set of most recent effective dates and version no.\n query = \"\"\"CREATE TEMPORARY TABLE temp4 AS\n SELECT * \n FROM {table} \n INNER JOIN temp3 \n USING ({id}, VERSIONNO, EFFECTIVEDATE);\"\"\"\n cur.execute(query.format(table=self.table_name, id=id_columns))\n # Inner join the most recent data with the interconnectors used in the actual interval of interest.\n query = \"\"\"SELECT {cols} FROM temp4 ;\"\"\"\n query = query.format(cols=return_columns)\n data = pd.read_sql_query(query, con=self.con)\n return data", "def load_df_from_files():\n with open(\"legislators-historical.json\") as f:\n data_old = json.load(f)\n\n with open(\"legislators-current.json\") as f:\n data_new = json.load(f)\n\n data = data_old + data_new\n\n rows = []\n for person in data:\n try:\n these_rows = make_rows(person)\n except:\n print(person)\n rows.extend(these_rows)\n\n df = pd.DataFrame(rows)\n return df", "def merge(parent_folder):\n parent_folder = Path(parent_folder)\n\n address_csv_files = sorted(parent_folder.glob('*_step_*.csv'))\n\n frames = []\n\n #: read all csv's delimiter='|', quoting=csv.QUOTE_MINIMAL\n for address_csv_file in address_csv_files:\n temp = pd.read_csv(\n address_csv_file, sep='|', encoding='utf-8', names=['type', 'id', 'county', 'senate', 'house', 'census']\n )\n\n frames.append(temp)\n\n #: merge all csv's\n merged = pd.concat(frames)\n merged.to_csv(parent_folder / 'all.csv', sep='|', header=False, index=False, encoding='utf-8')", "def combine_global_output_file(end_producer, step_producer, spec):\n\n start_producer = 100\n end_producer = 1001\n writer = pd.ExcelWriter('global_output.xlsx', engine='xlsxwriter')\n\n for ind_p in range(start_producer, end_producer, step_producer):\n all_data = pd.DataFrame()\n sheetID = str(ind_p)\n for f in glob.glob(\"merged-excel-docs/*.xlsx\"):\n try:\n df = pd.read_excel(f, \"P_\" + sheetID)\n all_data = all_data.append(df, ignore_index=True)\n except (ValueError, xlrd.biffh.XLRDError):\n continue\n all_data.to_excel(writer, sheet_name=\"P_\" + sheetID)\n writer.save()", "def get_filtered_df(db, mama_id_list, garfield_id_list):\n df_mama = pd.DataFrame(list(db.mama.find({'_id': {\"$in\": mama_id_list}, \"active\": True})))\n df_mama = df_mama.set_index('_id')\n if len(mama_id_list) == len(df_mama):\n print(\"\\nMama database successfully extracted from Mongo\")\n print(\"Mama DF Length:\", len(df_mama))\n print(\"Expected Mama DF Length:\", len(mama_id_list))\n else:\n print(\"\\nUnexpected row count in mama DF\")\n print(\"Mama DF Length:\", len(df_mama))\n print(\"Expected Mama DF Length:\", len(mama_id_list))\n\n\n df_garfield = pd.DataFrame(list(db.garfield.find({'_id': {\"$in\": garfield_id_list}, \"active\": True})))\n df_garfield = df_garfield.set_index('_id')\n df_garfield['request_date'] = pd.to_datetime(df_garfield['request_date'])\n if len(df_garfield) == len(garfield_id_list): \n print(\"\\nGarfield database successfully extracted from Mongo\")\n print(\"Garfield DF Length:\", len(df_garfield))\n print(\"Expected Garfield DF Length:\", len(garfield_id_list)) \n else:\n print(\"\\nUnexpected row count in Garfield DF\")\n print(\"Mama DF Length:\", len(df_garfield))\n print(\"Expected Mama DF Length:\", len(garfield_id_list))\n \n if 'address_concat' not in df_garfield.columns:\n df_garfield[\"address_concat\"] = df_garfield[\"address\"]+\", \"+ df_garfield[\"city\"]+\", \"+ df_garfield[\"state\"]+\", \"+ df_garfield[\"zip_code\"]\n \n return df_mama, df_garfield", "def merge (*a_data) :\n i = 0\n for loc_data in a_data :\n i += 1\n if i == 1 :\n loc_new_df = loc_data\n else :\n loc_new_df = __pd.merge(loc_new_df,loc_data,left_index=True,right_index=True)\n return loc_new_df", "def find_cell_master_index(cell_roi_df, active_imp_path):\n # Get the path to the directory holding the directory\n # holding the active imp\n exptdir = get_exptdir(active_imp_path)\n # Get all files that look like master indexes in that\n # exptdir (e.g. data/20200221_byc/20200221_byc_master_index.csv)\n master_index_paths = utilities.get_master_index_paths(exptdir)\n matched_paths = []\n matched_dfs = []\n matched_rows = []\n if master_index_paths != None:\n for i, master_index_df in enumerate([pd.read_csv(path) for path in master_index_paths]):\n # Scan through the master dfs found to look for \n # ones that have a row that matches \n row = find_cell_row(cell_roi_df, master_index_df) \n if row != None and type(row) == int:\n matched_paths.append(master_index_paths[i])\n matched_dfs.append(master_index_df)\n matched_rows.append(row)\n else:\n pass\n else:\n print(f\"No master_index_dfs found for cell_roi_df in path:\\n{exptdir}\")\n return None, None, None\n\n assert len(matched_dfs) == len(matched_dfs) == len(matched_rows), \"paths and dfs list lengths do not match\"\n\n if len(matched_dfs) == 1:\n print(f\"Found cell match at row {matched_rows[0]} in master index:\\n{matched_paths[0]}\")\n return matched_dfs[0], matched_paths[0], matched_rows[0]\n\n elif len(matched_dfs) == 0:\n print(f\"No master_index_dfs found for cell_roi_df in path:\\n{exptdir}\")\n return None, None, None\n\n elif len(matched_dfs) > 1:\n print(f\"Multiple master_index_dfs found for cell_roi_df in path:\\n{exptdir}\")\n return None, None, None\n\n else:\n print(\"Unknown error\")\n return None, None, None", "def getLatest(df):\n df_info = df.iloc[:,0:5]\n df_last = df.iloc[:,-1]\n df_info['latest'] = df_last\n \n return df_info", "def index_files(self, results):\n \n for warc_file in results:\n # Annotation .tsv is empty\n if warc_file is False:\n continue\n for record in warc_file:\n replaced_annotated_record = self.lucene.preprocess(record['replaced_record'])\n cleaned_record = self.lucene.preprocess(record['cleaned_record'])\n self.index_file(record['record_id'], replaced_annotated_record, cleaned_record, record['entities_record'])\n self.lucene.close_writer()", "def get_merged_data_frame_per_location(id):\n dfs = []\n models = ['CM', 'CM3x3', 'CN', 'CN3x3', 'CSD', 'GLRLM', 'GLRLM3x3', 'HOG', 'LBP', 'LBP3x3']\n min_rows = float('inf')\n for model in models:\n input_file = './descvis/img/' + get_location_from_id(id) + ' ' + model + '.csv'\n df = pd.read_csv(input_file, header=None)\n df = df.drop(df.columns[0], axis=1)\n if min_rows > df.shape[0]:\n min_rows = df.shape[0]\n dfs.append(df)\n\n merged = pd.concat(dfs, axis=1)\n merged = merged[:min_rows]\n\n return merged", "def report_format_completion(project_constants_lst, current_date=str(date.today())):\n\n project_steps_df, max_title, _, report_requisites_sr, *_ = project_constants_lst\n \n # verify if any report DataFrame need to be saved\n mask_report = project_steps_df['report_type'] == 'report'\n mask_save = project_steps_df['export_to_excel'] == 1\n\n if not project_steps_df.loc[mask_report & mask_save].empty:\n\n print('\\n')\n info = f'Completing the report'.upper()\n print(info, end =\" \")\n\n file_name = report_requisites_sr['customer_name'] + '_' + report_requisites_sr['project_title'] + '_tables_' + current_date + '.xlsx'\n file_path = os.path.join(report_requisites_sr['today_report_folder'], file_name)\n try:\n with pd.ExcelWriter(file_path, mode='a', if_sheet_exists= 'replace', engine='openpyxl') as writer: \n # import table of contents\n content_df = pd.read_excel(writer, sheet_name='Содержание')\n # sort table of contents items\n content_df.sort_values(by=['Закладка'], key=lambda menu_sr: project_steps_df.loc[menu_sr, 'sort_weight'], inplace=True)\n # write content to report file\n content_df.to_excel(writer, sheet_name='Содержание', index = False)\n workbook = openpyxl.load_workbook(file_path)\n # create hyperlinks for all items of table of contents\n hyperlink_content(workbook)\n # sort worksheets\n workbook._sheets.sort(key=lambda ws: project_steps_df.loc[ws.title, 'sort_weight'])\n workbook.save(file_path)\n except PermissionError:\n status_info('fail', max_title, len(info))\n print('\\nPermission denied. Close the file.\\n')\n exit()\n else:\n status_info('ok', max_title, len(info))", "def update_which_sde_data(\n current_sde_df,\n latest_esi_df,\n index_key\n):\n pass", "def import_data_model(directory):\n analyses = pd.read_excel(directory + 'analyses.xlsx')\n analytes = pd.read_excel(directory + 'analytes.xlsx')\n for index, analysis in analyses.iterrows():\n analyte_data = []\n analyte_names = analysis.analyte_keys.split(', ')\n for analyte_key in analyte_names:\n analyte_item = analytes.loc[analytes.key == analyte_key]\n analyte_data.append(analyte_item.to_dict(orient='records'))\n analyses.at[index, 'analytes'] = analyte_data \n analyses_data = analyses.to_dict(orient='records')\n for index, values in analyses_data.iterrows():\n doc_id = str(values.key)\n doc_data = values.to_dict()\n ref = ''\n update_document(ref, doc_data)\n # doc_data = data.to_dict(orient='index')\n # data_ref = create_reference(db, ref)\n # data_ref.document(doc_id).set(doc_data, merge=True)\n # data_ref.set(doc_data, merge=True)\n\n return NotImplementedError", "def read_daily_qualified_report(self):\n from itertools import repeat\n\n self.ID_TOTAL_CANDIDATES = kpi_from_db_config.ID_TOTAL_CANDIDATES\n self.ID_TOTAL_PROCESSED = kpi_from_db_config.ID_TOTAL_PROCESSED\n self.ID_TOTAL_EXPORTED = kpi_from_db_config.ID_TOTAL_EXPORTED\n self.ID_TOTAL_CLASSIFIED = kpi_from_db_config.ID_TOTAL_CLASSIFIED\n self.ID_TOTAL_QUALIFIED = kpi_from_db_config.ID_TOTAL_QUALIFIED\n self.ID_TOTAL_DISQUALIFIED = kpi_from_db_config.ID_TOTAL_DISQUALIFIED\n\n list_id = [self.ID_TOTAL_CANDIDATES, \n self.ID_TOTAL_PROCESSED, \n self.ID_TOTAL_EXPORTED, \n self.ID_TOTAL_CLASSIFIED, \n self.ID_TOTAL_QUALIFIED, \n self.ID_TOTAL_DISQUALIFIED]\n list_result = [[] for i in repeat(None,len(list_id))]\n\n for i in range(len(list_id)):\n self.cursor.execute('''\n SELECT value\n FROM public.kpi_report\n WHERE id = %s\n ORDER BY created_at DESC\n LIMIT 2\n ''', [list_id[i]])\n\n rows_count = self.cursor.rowcount\n if (rows_count == 2):\n for doc in self.cursor:\n list_result[i].append(int(doc[0]))\n elif (rows_count == 1):\n for doc in self.cursor:\n list_result[i].append(int(doc[0]))\n list_result[i] = list_result[i] + [0]\n else:\n list_result[i] = [0] * 2 \n\n# print \"TESTING .... {}\".format(list_result)\n return list_result", "def file_upload_to_obj():\n\n temp = []\n file_content = pd.read_excel(INPUT_FILE_NAME).fillna(0).to_dict('records')\n sorted_content = sorted(file_content, key=itemgetter(\n 'filedbentity.file_extension'))\n for item in file_content:\n\n raw_date = item.get('filedbentity.file_date')\n if raw_date:\n temp_date = raw_date.strftime('%Y-%m-%d')\n raw_date = datetime.strptime(temp_date, \"%Y-%m-%d\").date()\n else:\n raw_date = datetime.now().date()\n\n raw_status = item.get('dbentity.status')\n if raw_status == 'Archive':\n raw_status = 'Archived'\n \n obj = {\n 'path': item.get('EBS path'),\n 'display_name': item.get('dbentity.display_name'),\n 'status': raw_status,\n 'source': item.get('dbentity.source'),\n 'topic_edam_id': item.get('topic edam_id').upper().replace('TOPIC', 'EDAM').strip(),\n 'data_edam_id': item.get('data edam_id').upper().replace('DATA', 'EDAM').strip(),\n 'format_edam_id': item.get('format edam_id').upper().replace('FORMAT', 'EDAM').strip(),\n 'file_extension': item.get('filedbentity.file_extension'),\n 'file_date': raw_date,\n 'is_public': (item.get('filedbentity.is_public') == '1'),\n 'is_in_spell': item.get('filedbentity.is_in_spell'),\n 'is_in_browser': (item.get('filedbentity.is_in_browser') == '1'),\n 'readme_name': item.get('readme name'),\n 'description': item.get('filedbentity.description'),\n 'pmids': item.get('pmids (|)'),\n 'keywords': item.get('keywords (|)')\n }\n temp.append(obj)\n\n if len(temp) > 0:\n return temp\n return None", "def get_df(self, target_indices=None, fill=True, overwrite=None,\n target_columns=None):\n target_indices = target_indices if target_indices is not None else self.get_default_target_indices()\n overwrite = overwrite if overwrite is not None else options.force\n\n assert all(index == enb.atable.get_canonical_path(index) for index in target_indices)\n original_df = self.original_properties_table.get_df(target_indices=target_indices,\n target_columns=target_columns)\n\n target_indices = [enb.atable.get_canonical_path(index)\n for index in target_indices]\n version_indices = [self.original_to_versioned_path(index)\n for index in target_indices]\n\n version_fun_id = ray.put(self.version)\n overwrite_id = ray.put(overwrite)\n original_df_id = ray.put(original_df)\n options_id = ray.put(options)\n versioning_result_ids = []\n for original_path, version_path in zip(target_indices, version_indices):\n input_path_id = ray.put(original_path)\n output_path_id = ray.put(version_path)\n versioning_result_ids.append(ray_version_one_path.remote(\n version_fun=version_fun_id, input_path=input_path_id,\n output_path=output_path_id, overwrite=overwrite_id,\n original_info_df=original_df_id,\n check_generated_files=ray.put(self.check_generated_files),\n options=options_id))\n for output_file_path, time_list in ray.get(versioning_result_ids):\n self.current_run_version_times[output_file_path] = time_list\n\n return FilePropertiesTable.get_df(\n self,\n target_indices=[p for p in glob.glob(os.path.join(self.version_base_dir, \"**\", \"*\"), recursive=True)\n if os.path.isfile(p)],\n target_columns=target_columns, overwrite=overwrite)", "def load_records(dir):\n\n\t# I saved all the WoS full records for 'machine learning'\n\tfiles =os.listdir(dir)\n\tdf =pd.concat([pd.read_table(df, sep='\\t',index_col = False) for df in [dir+f for f in files]])\n\tdf = df.drop_duplicates()\n\n\t#fix index\n\tindex = range(0, df.shape[0])\n\tdf.index = index\n\n\t#to get all cited refs\n\tcited_refs = [set(re.split(pattern='; ', string=str(ref).lower().lstrip().rstrip())) for ref in df.CR]\n\n\t# add as column to dataframe\n\tdf['cited_refs'] = cited_refs\n\n\t# normalise authors\n\tdf.au = [str(au).lower().lstrip().rstrip() for au in df.AF]\n\n\treturn df", "def __merge_ati_files(ati_files: List[str]) -> pd.DataFrame:\n ati_df = pd.DataFrame(columns=consts.ACTIVITY_TRACKER_COLUMN.activity_tracker_columns())\n dataframes = []\n for ati_file in ati_files:\n dataframes.append(pd.read_csv(ati_file, encoding=consts.ISO_ENCODING,\n names=consts.ACTIVITY_TRACKER_COLUMN.activity_tracker_columns()))\n return __merge_dataframes(dataframes, ati_df, ACTIVITY_TRACKER_COLUMN.TIMESTAMP_ATI.value)", "def get_full_data(raw_data_path, return_missing_column_list=False):\n\n # Read raw_data into individual dataframes\n df21 = pd.read_csv(raw_data_path + '/2020-21/gws/merged_gw.csv',\n encoding='utf_8')\n df20 = pd.read_csv(raw_data_path + '/2019-20/gws/merged_gw.csv',\n encoding='utf_8')\n df19 = pd.read_csv(raw_data_path + '/2018-19/gws/merged_gw.csv',\n encoding='ISO-8859-1')\n df18 = pd.read_csv(raw_data_path + '/2017-18/gws/merged_gw.csv',\n encoding='ISO-8859-1')\n df17 = pd.read_csv(raw_data_path + '/2016-17/gws/merged_gw.csv',\n encoding='ISO-8859-1')\n\n # Add a column identifying the season to each dataframe\n df21['season'] = 21\n df20['season'] = 20\n df19['season'] = 19\n df18['season'] = 18\n df17['season'] = 17\n\n # Create a list of all dataframes\n dflist = [df17, df18, df19, df20, df21]\n\n # Create a list of all columns existing in each season\n consistent_columns = []\n for column in list(df21.columns):\n if column in list(df20.columns) and \\\n column in list(df19.columns) and \\\n column in list(df18.columns) and \\\n column in list(df17.columns):\n consistent_columns.append(column)\n\n # Create a dictionary with columns that are only in some years.\n # Key = column name, value = years for which the column exists.\n missing_columns = {}\n for df in dflist:\n for column in list(df.columns):\n if not column in consistent_columns:\n if column in missing_columns.keys():\n missing_columns[column].append(df.loc[0, ['season']][0])\n else:\n missing_columns[column] = [df.loc[0, ['season']][0]]\n\n # Concatenate seasons based on consistent columns\n frames_to_concat = [\n df21[consistent_columns], df20[consistent_columns],\n df19[consistent_columns], df18[consistent_columns],\n df17[consistent_columns]\n ]\n complete_data = pd.concat(frames_to_concat)\n\n # Remove trailing underscores and numbers from names\n complete_data.name = complete_data.name.str.rstrip('_1234567890')\n\n # Swap out underscores for spaces within names\n complete_data.name = complete_data.name.str.replace('_', ' ')\n\n # Drop column 'round' as it is the same as 'GW'\n complete_data.drop(columns=['round'], inplace=True)\n\n # Set names to lowercase\n complete_data.name = complete_data.name.apply(lambda n: n.lower())\n\n # Correct game weeks for 2020\n complete_data['GW'] = complete_data['GW'].apply(correct_2020)\n\n # Match player position\n complete_data, players_raw_seasons = match_position(complete_data, raw_data_path)\n\n # Add dreamteam count for each player for the last season\n complete_data = add_dreamteam_count(complete_data, players_raw_seasons)\n\n # Add team name for last two years to dataframe\n complete_data = add_team(complete_data, players_raw_seasons, raw_data_path)\n\n # Add opponent strength\n complete_data = add_opponent_strength(complete_data)\n\n # Add own team strength\n complete_data = add_team_strength(complete_data)\n\n # Sort ascending by player name and kickoff-date and separate kickoff-date and -time\n complete_data = sort_kickoff(complete_data)\n\n if return_missing_column_list:\n return complete_data, missing_columns\n return complete_data", "def shred_sheets(subdomain, audit_date, input_file, _format):\r\n name = extract_dir_name(input_file)\r\n fname = PurePath(input_file).name.__str__()\r\n try:\r\n os.makedirs(name)\r\n except:\r\n pass\r\n\r\n wb = pd.ExcelFile(input_file)\r\n for ws in wb.sheet_names:\r\n data = pd.read_excel(input_file, sheet_name=ws)\r\n # add constants\r\n data.index.names = ['ix']\r\n data['subdomin'] = subdomain\r\n data['audit_date'] = audit_date\r\n\r\n # strip chars we don't want in colum names\r\n cols = data.columns\r\n renamed = []\r\n for col in cols:\r\n col = re.sub('[^a-zA-Z0-9]', '', col)\r\n renamed.append(col)\r\n\r\n data.columns = renamed\r\n\r\n # build output formats\r\n if _format == 'mongo':\r\n client = MongoClient('mongodb://localhost:27017/')\r\n db = client.Sitebulb\r\n cl = db.August5\r\n\r\n try:\r\n cl.insert_many(data.to_dict('records'))\r\n except Exception as e:\r\n click.secho(f'\\nERROR in [{input_file},{ws}] -- {e}', fg='red')\r\n continue\r\n\r\n if _format == 'json' or _format == 'all':\r\n try:\r\n new_file = os.path.join(name, fname + '~' + ws + '.json')\r\n data.to_json(new_file, orient=\"records\")\r\n except Exception as e:\r\n click.secho(f'\\nERROR in [{input_file},{ws}] -- {e}', fg='red')\r\n continue\r\n\r\n if _format == 'csv' or _format == 'all':\r\n try:\r\n new_file = os.path.join(name, fname + '~' + ws + '.csv')\r\n data.to_csv(new_file)\r\n except Exception as e:\r\n click.secho(f'\\nERROR in [{input_file},{ws}] -- {e}', fg='red')\r\n continue", "def index(self):\n for block_dir_relative in sorted(next(os.walk(self.data_dir))[1]):\n td_pairs = self.parse_block(block_dir_relative)\n index_id = 'index_'+block_dir_relative\n self.intermediate_indices.append(index_id)\n with ii.InvertedIndexWriter(index_id, directory=self.output_dir, \n postings_encoding=\n self.postings_encoding) as index:\n self.invert_write(td_pairs, index)\n td_pairs = None\n self.save()\n with ii.InvertedIndexWriter(self.index_name, directory=self.output_dir, \n postings_encoding=\n self.postings_encoding) as merged_index:\n with contextlib.ExitStack() as stack:\n indices = [stack.enter_context(\n ii.InvertedIndexIterator(index_id, \n directory=self.output_dir, \n postings_encoding=\n self.postings_encoding)) \n for index_id in self.intermediate_indices]\n self.merge(indices, merged_index)", "def pre_generate(df_dic):\n file_paths = df_dic.keys()\n main_table_id = {}\n for file in file_paths:\n if find_main_id(ent_dic[file]):\n main_table_id[file] = find_main_id(ent_dic[file])\n for file in main_table_id:\n for other_file in main_table_id:\n fake_col = \"fake_\" + main_table_id[other_file]\n if (other_file != file) and list(set(main_table_id[other_file]) - set(df_dic[file].columns)) \\\n and list(set(main_table_id[other_file]) & set(df_dic[file].columns)):\n\n col_use = list(main_table_id[file]) + list(main_table_id[other_file])\n df_other_fake = df_dic[other_file][col_use]\n df_other_fake[fake_col] = df_other_fake[main_table_id[other_file]]\n del df_other_fake[main_table_id[other_file]]\n df_dic[file] = pd.merge(df_dic[file], df_other_fake, on=[main_table_id[file]],\n how='left').drop_duplicates()\n return df_dic", "def merge_results(targetfiles, skyfiles, tiles, result_dir=\".\",\n result_prefix=\"fba-\", result_split_dir=False,\n out_dir=None, out_prefix=\"fiberassign-\", out_split_dir=False,\n columns=None, copy_fba=True):\n # Load the full set of target files into memory. Also build a mapping of\n # target ID to row index. We assume that the result columns have the same\n # dtype in any of the target files. We take the first target file and\n # construct the output recarray dtype from the columns in that file.\n out_dtype = None\n cols = merged_fiberassign_req_columns.copy()\n cols.update(merged_fiberassign_req_columns_at_end)\n dcols = [(x, y) for x, y in cols.items()]\n dcolnames = [x for x in cols.keys()]\n\n tgdata = dict()\n tgdtype = dict()\n tgshape = dict()\n tghead = dict()\n\n skydata = dict()\n skydtype = dict()\n skyshape = dict()\n skyhead = dict()\n\n survey = None\n\n # AR adding any possible *_TARGET column\n # AR future-proofing for SV2, SV3, or other\n # https://github.com/desihub/fiberassign/issues/296\n for tf in targetfiles + skyfiles:\n fd = fitsio.FITS(tf, \"r\")\n minimal_target_columns.update(\n OrderedDict([\n (key,fd[1].get_rec_dtype()[0][key].str)\n for key in fd[1].get_colnames()\n if key[-7:]==\"_TARGET\" and key!=\"FA_TARGET\"]))\n fd.close()\n\n # minimal_target_columns to read\n minimal_dcolnames = [x for x in minimal_target_columns.keys()]\n minimal_dcols = [(x, y) for x, y in minimal_target_columns.items()]\n\n for tf in targetfiles:\n tm = Timer()\n tm.start()\n fd = fitsio.FITS(tf)\n tghead[tf] = fd[1].read_header()\n # Allocate a shared memory buffer for the target data\n tglen = fd[1].get_nrows()\n tgshape[tf] = (tglen,)\n #tgdtype[tf], tempoff, tempisvararray = fd[1].get_rec_dtype()\n\n #select what subset of the 'minimal_dcolnames' are present in the data.\n file_tgdtype, tempoff, tempisvararray = fd[1].get_rec_dtype()\n file_dcolnames = [x for x in file_tgdtype.names]\n dcols_to_read = []\n for i in range(len(minimal_dcolnames)):\n if minimal_dcolnames[i] in file_dcolnames:\n dcols_to_read.append(minimal_dcols[i])\n some_dt = np.dtype(dcols_to_read)\n some_columns = list(some_dt.fields.keys())\n\n #print(file_tgdtype)\n tgdtype[tf] = some_dt\n tgbytes = tglen * tgdtype[tf].itemsize\n tgdata[tf] = RawArray(\"B\", tgbytes)\n tgview = np.frombuffer(tgdata[tf],\n dtype=tgdtype[tf]).reshape(tgshape[tf])\n # Read data directly into shared buffer\n tgview[:] = fd[1].read(columns=some_columns)[some_columns]\n #if survey is None: # AR commented out, not used apparently\n # (survey, col, sciencemask, stdmask, skymask, suppskymask, # AR commented out, not used apparently\n # safemask, excludemask) = default_target_masks(tgview) # AR commented out, not used apparently\n\n # Sort rows by TARGETID if not already done\n tgviewids = tgview[\"TARGETID\"]\n if not np.all(tgviewids[:-1] <= tgviewids[1:]):\n tgview.sort(order=\"TARGETID\", kind=\"heapsort\")\n\n tm.stop()\n tm.report(\"Read {} into shared memory\".format(tf))\n\n # Add any missing columns to our output dtype record format.\n tfcols = list(tgview.dtype.names)\n if columns is not None:\n tfcols = [x for x in tfcols if x in columns]\n for col in tfcols:\n subd = tgview.dtype[col].subdtype\n colname = col\n if col in merged_fiberassign_swap:\n colname = merged_fiberassign_swap[col]\n if colname not in dcolnames:\n if subd is None:\n dcols.extend([(colname, tgview.dtype[col].str)])\n else:\n dcols.extend([(colname, subd[0], subd[1])])\n dcolnames.append(colname)\n\n for tf in skyfiles:\n tm = Timer()\n tm.start()\n fd = fitsio.FITS(tf)\n skyhead[tf] = fd[1].read_header()\n # Allocate a shared memory buffer for the target data\n skylen = fd[1].get_nrows()\n skyshape[tf] = (skylen,)\n\n # AR adding here the minimal set of columns to be read\n # AR just copying what is done for the targets,\n # AR with replacing \"tg\" by \"sky\"\n #select what subset of the 'minimal_dcolnames' are present in the data.\n file_skydtype, tempoff, tempisvararray = fd[1].get_rec_dtype()\n file_dcolnames = [x for x in file_skydtype.names]\n dcols_to_read = []\n for i in range(len(minimal_dcolnames)):\n if minimal_dcolnames[i] in file_dcolnames:\n dcols_to_read.append(minimal_dcols[i])\n some_dt = np.dtype(dcols_to_read)\n some_columns = list(some_dt.fields.keys())\n\n #print(file_skydtype)\n skydtype[tf] = some_dt\n skybytes = skylen * skydtype[tf].itemsize\n skydata[tf] = RawArray(\"B\", skybytes)\n\n #skydtype[tf], tempoff, tempisvararray = fd[1].get_rec_dtype() # AR commented out\n #skybytes = skylen * skydtype[tf].itemsize # AR commented out\n #skydata[tf] = RawArray(\"B\", skybytes) # AR commented out\n skyview = np.frombuffer(skydata[tf],\n dtype=skydtype[tf]).reshape(skyshape[tf])\n # Read data directly into shared buffer\n #skyview[:] = fd[1].read() # AR commented out\n skyview[:] = fd[1].read(columns=some_columns)[some_columns]\n\n\n # Sort rows by TARGETID if not already done\n skyviewids = skyview[\"TARGETID\"]\n if not np.all(skyviewids[:-1] <= skyviewids[1:]):\n skyview.sort(order=\"TARGETID\", kind=\"heapsort\")\n\n tm.stop()\n tm.report(\"Read {} into shared memory\".format(tf))\n\n # Add any missing columns to our output dtype record format.\n tfcols = list(skyview.dtype.names)\n if columns is not None:\n tfcols = [x for x in tfcols if x in columns]\n for col in tfcols:\n subd = skyview.dtype[col].subdtype\n colname = col\n if col in merged_fiberassign_swap:\n colname = merged_fiberassign_swap[col]\n if colname not in dcolnames:\n if subd is None:\n dcols.extend([(colname, skyview.dtype[col].str)])\n else:\n dcols.extend([(colname, subd[0], subd[1])])\n dcolnames.append(colname)\n\n\n end_keys = [k for k,v in merged_fiberassign_req_columns_at_end.items()]\n dcols = ([c for c in dcols if c[0] not in end_keys] +\n [c for c in dcols if c[0] in end_keys])\n out_dtype = np.dtype(dcols)\n\n # AR adding any *_TARGET columns to the TARGETS columns\n merged_targets_columns.update(\n OrderedDict([\n (name,out_dtype[name]) for name in out_dtype.names\n if name[-7:]==\"_TARGET\"]))\n\n # For each tile, find the target IDs used. Construct the output recarray\n # and copy data into place.\n\n merge_tile = partial(merge_results_tile, out_dtype, copy_fba)\n\n if out_dir is None:\n out_dir = result_dir\n\n tile_map_list = [(x, result_path(x, dir=result_dir, prefix=result_prefix,\n split=result_split_dir),\n result_path(x, dir=out_dir, prefix=out_prefix,\n create=True, split=out_split_dir))\n for x in tiles]\n\n with mp.Pool(processes=default_mp_proc,\n initializer=merge_results_tile_initialize,\n initargs=(tgdata, tgdtype, tgshape, skydata,\n skydtype, skyshape)) as pool:\n results = pool.map(merge_tile, tile_map_list)\n\n return", "def find_records():\r\n\r\n print(\"begin find records\")\r\n\r\n study_list = retrieve_ref('study_list')\r\n sensor_list = retrieve_ref('sensor_list')\r\n # sensor_unit_list = retrieve_ref('sensor_unit_list')\r\n\r\n for study in study_list:\r\n # print('study = ' + str(study))\r\n source_path = os.path.join(study, 'source')\r\n # print('source_path = ' + str(source_path))\r\n\r\n source_folders = os.listdir(source_path)\r\n # print(str(study) + ' source_folders = ')\r\n # print(source_folders)\r\n\r\n df_meta = pd.DataFrame()\r\n df_meta['source_path'] = source_folders\r\n save_meta(study, df_meta)\r\n record_to_summary(study, 'Records found', str(len(source_folders)))\r\n\r\n print(\"completed find records\")", "def importDataFrame(self, file_origin):\n # A list to hold each file's DataFrame\n self.listRawFileDataFrame = []\n\n # Use while loop to add DataFrames of input files to a list\n fileNo = 1\n while fileNo in range(1, self.numberOfFiles + 1):\n # Prompt user for file name\n fileName = self.sourcePath + os.sep + \\\n input(\"File \" + str(fileNo) + \":-\\n\\n \")\n try:\n\n # Read file into DataFrame\n if file_origin == 'X':\n rawFileDataFrame = xcmsReformat.reformat(fileName, self.firstRepOffset)\n self.firstRepOffset = 3\n else:\n rawFileDataFrame = pd.read_table(fileName, sep=',')\n\n # Add additional series to frame listing file number before m/z\n # column, to facillitate record removal based on overlap.\n rawFileDataFrame.insert(\n self.firstRepOffset - 2, 'File', \"File \" + str(fileNo))\n\n # Put time bucket (Retention time as an integer)\n rawFileDataFrame.insert(\n self.firstRepOffset + 1, 'Time Bucket', rawFileDataFrame['Time'].astype(int))\n\n # For all files except the first and last remove the first and\n # last minute in the file\n if fileNo > 1 and fileNo < self.numberOfFiles:\n rawFileDataFrame = rawFileDataFrame[(rawFileDataFrame['Time Bucket'] != rawFileDataFrame[\n 'Time Bucket'].max()) & (rawFileDataFrame['Time Bucket'] != rawFileDataFrame['Time Bucket'].min())]\n # For the first file where more than 1 file remove the last\n # minute in the file for overlap\n elif fileNo == 1 and self.numberOfFiles > 1:\n rawFileDataFrame = rawFileDataFrame[rawFileDataFrame[\n 'Time Bucket'] != rawFileDataFrame['Time Bucket'].max()]\n # For last file where more than 1 file remove the first minute\n # in the file for overlap\n elif fileNo == self.numberOfFiles and self.numberOfFiles > 1:\n rawFileDataFrame = rawFileDataFrame[rawFileDataFrame[\n 'Time Bucket'] != rawFileDataFrame['Time Bucket'].min()]\n # Where only 1 file do not remove any file frames\n # Append the new DataFrame to the list\n self.listRawFileDataFrame.append(rawFileDataFrame)\n # Move to next slot in DataFrame list\n fileNo += 1\n except IOError as e:\n print(\"\\n \" + str(e) + \"!\")\n # Prompt user again for same file number\n\n # Make a copy of the DataFrame at index 0 in the list of input file\n # DataFrames, this will always exist\n self.fullRawData = self.listRawFileDataFrame[0].copy()\n\n # Loop through the rest of the list of input file DataFrames and\n # concatenate to the DataFrame from above\n for rawFileDataFrameIndex in range(1, len(self.listRawFileDataFrame)):\n self.fullRawData = pd.concat([self.fullRawData, self.listRawFileDataFrame[\n rawFileDataFrameIndex]], ignore_index=True)\n\n # Remove overlap\n # Count the number of each time bucket for each file\n countFullRawData = pd.DataFrame({'count': self.fullRawData.groupby(\n ['File', 'Time Bucket']).size()}).reset_index()\n # Get a groupby object to index the highest count for each time bucket\n # in countFullRawData\n timeBucketSelectFile = countFullRawData.groupby(\n 'Time Bucket')['count'].agg(lambda col: col.idxmax())\n # A DataFrame listing which file to use for each time bucket\n fileTimeBucketToUse = countFullRawData.ix[\n timeBucketSelectFile, ['File', 'Time Bucket']]\n # Merge fileTimeBucketToUse with fullRawData leaving most abundant over\n # lapping file frames\n self.fullRawData = pd.merge(self.fullRawData, fileTimeBucketToUse, on=[\n 'File', 'Time Bucket'])\n\n # 'Time Bucket' serves no further purpose so drop for clarity\n self.fullRawData.drop('Time Bucket', inplace=True, axis=1)\n # This DataFrame is the curent highest processed DataFrame\n self.processedDataFrame = self.fullRawData.copy()\n\n # Set the firstRepOffset to match the processedDataFrame\n self.incFirstRepOffset()", "def update_existing_records(self,tickr=''):\n stock_master_df=self.__dbconn.get_tickr(tickr)\n #print(stock_master_df.head())\n if stock_master_df.empty:\n raise ValueError(f\"data does not exist for {tickr}\")\n\n last_updated_date=stock_master_df.iloc[0].LastUpdatedDate.strftime('%Y-%m-%d')\n print(f'Last updated date for {tickr},{last_updated_date}')\n current_date=datetime.now().strftime('%Y-%m-%d')\n\n if current_date!=last_updated_date and current_date>last_updated_date:\n df=self.__get_all_data(tickr)\n if not df.empty:\n mask = df.loc[last_updated_date:current_date, :]\n mask=mask.iloc[1:]\n if not mask.empty:\n \"\"\"we found records to be updated\"\"\"\n self.__endDate=datetime(2012,5,12)\n d1=self.__dbconn.get_stock_details(stock_master_df.iloc[0].Id,last_updated_date,self.__endDate.strftime(\"%Y-%m-%d\"))\n d1['Date'] = pd.to_datetime(d1['Date'], format = '%Y-%m-%d')\n d1.set_index(['Date'], inplace=True)\n result=pd.concat([d1,mask])\n df=self.__calculate_all_indicators(result)\n self.__dbconn.delete_stock_details(int(stock_master_df.iloc[0].Id))\n df['stockId']=int(stock_master_df.iloc[0].Id)\n self.__dbconn.save_data('stockdetails',df)\n sql = \"\"\" UPDATE public.stockmaster SET \"LastUpdatedDate\" = %s WHERE \"Id\" = %s\"\"\"\n self.__dbconn.update_stock_master(sql,current_date,int(stock_master_df.iloc[0].Id))\n print(f\"Records updated for {tickr}\")\n else:\n print(\"no rows found for update\")", "def download_all_sheets(self, sheet_id, sheet_name):\n # Get spreadsheet as a whole and iterate through each sheet\n results = self.get_spreadsheet(sheet_id)\n\n print(\"[INFO] Saving {} sheets to {}...\".format(len(results[\"sheets\"]), self.out_dir))\n sheet_name_no_xlsx = sheet_name[:-5]\n \n # Validate sheet name\n if sheet_name_no_xlsx + '.xlsx' != sheet_name:\n raise Exception(f\"[ERROR] Sheet does not have expected name with '.xlsx' extension: {sheet_name}\")\n\n prod_def_dir = os.path.join(self.out_dir, 'product-definitions')\n tsv_dir = os.path.join(prod_def_dir, 'tsv', sheet_name_no_xlsx)\n spreadsheet_dir = os.path.join(prod_def_dir, 'spreadsheet')\n \n for sdir in (tsv_dir, spreadsheet_dir):\n if not os.path.isdir(sdir):\n os.makedirs(sdir)\n\n print('[INFO] Saving TSV files to: {}...'.format(tsv_dir))\n worksheets = set()\n\n for sheet in results[\"sheets\"]:\n name = sheet[\"properties\"][\"title\"]\n worksheets.add(name)\n\n # Check worksheet name is valid\n if name not in ALLOWED_WORKSHEET_NAMES:\n print('[ERROR] Worksheet name not recognised: {}'.format(name))\n\n cell_range = \"'{}'!A1:Z{}\".format(name, NROWS_TO_PARSE)\n out_file = os.path.join(tsv_dir, \"{}.tsv\".format(name))\n\n if os.path.isfile(out_file) and not self.regenerate:\n print(f\"[WARNING] Not regenerating TSV...file already exists: {out_file}\")\n else:\n self.write_values_to_tsv(self.get_sheet_values(sheet_id, cell_range), out_file)\n\n # Check the expected worksheet files were processed\n # For general (relating to all products) spreadsheets\n if sheet_name.startswith(\"_\"):\n if not set(workflow_data[sheet_name]) == worksheets:\n raise Exception(f\"[ERROR] Could not find/process all expected worksheets for \"\n f\"spreadsheet '{sheet_name}'. Difference is:\\n\"\n f\"\\tExpected: {sorted(workflow_data[sheet_name])}\\n\"\n f\"\\tFound: {sorted(worksheets)}\")\n\n # For product-specific spreadsheets\n else:\n required = {wsheet for wsheet in workflow_data[\"per-product\"] if \"*\" not in wsheet}\n\n if not required.issubset(worksheets): \n raise Exception(f\"[ERROR] Could not find/process product-specific worksheets \"\n f\"for '{sheet_name}'. Missing: {required.difference(worksheets)}\") \n \n # Now download the raw spreadsheet \n spreadsheet_file = os.path.join(spreadsheet_dir, sheet_name)\n\n if os.path.isfile(spreadsheet_file) and not self.regenerate:\n print(f\"[WARNING] Download not initiated...file already exists: {spreadsheet_file}\")\n return\n else:\n print(f\"[INFO] Saving spreadsheet to: {spreadsheet_file}...\")\n self.save_raw_spreadsheet(sheet_id, spreadsheet_file)", "def get_files(self):\n\n # Grab master data - use existing header, remove unhappy columns\n\n self.df_mas_lab_data = pd.read_csv(\n self.master_csv, dtype=str, usecols=self.columns\n )\n\n # Delete rows, where column FACILITY_TYPE != Independent, Hospital,\n # Physician Office\n facility_type_keep_list = [\"Independent\", \"Hospital\", \"Physician Office\"]\n self.df_mas_lab_data = self.df_mas_lab_data[\n self.df_mas_lab_data[\"FACILITY_TYPE\"].isin(facility_type_keep_list)\n ]\n\n # Make everything a string and remove trailing and leading whitespaces\n self.df_mas_lab_data = self.df_mas_lab_data.astype(str)\n self.df_mas_lab_data = self.df_mas_lab_data.applymap(\n lambda x: x.strip() if isinstance(x, str) else x\n )\n\n print_banner(\"Computing all the Data\")\n print(f\"{len(self.df_mas_lab_data)} original master CLIA labs...\")\n\n # Grab other inputed files to make new data file to compare with\n self.df_new_lab_data = pd.concat(\n [\n pd.read_csv(file, names=self.columns, header=None, dtype=str, usecols=self.columns)\n for file in self.new_files\n ]\n )\n\n # Probably not needed for the new data but just in case:\n # Delete rows, where column FACILITY_TYPE != Independent, Hospital,\n # Physician Office\n self.df_new_lab_data = self.df_new_lab_data[\n self.df_new_lab_data[\"FACILITY_TYPE\"].isin(facility_type_keep_list)\n ]\n\n # Make everything a string and remove trailing and leading whitespaces\n self.df_new_lab_data = self.df_new_lab_data.astype(str)\n self.df_new_lab_data = self.df_new_lab_data.applymap(\n lambda x: x.strip() if isinstance(x, str) else x\n )\n\n print(f\"{len(self.df_new_lab_data)} inputted CLIA labs for comparison...\")", "def get_data(self, date_time):\n id_columns = ','.join([col for col in self.table_primary_keys if col not in ['EFFECTIVEDATE', 'VERSIONNO']])\n return_columns = ','.join(self.table_columns)\n with self.con:\n cur = self.con.cursor()\n cur.execute(\"DROP TABLE IF EXISTS temp;\")\n cur.execute(\"DROP TABLE IF EXISTS temp2;\")\n cur.execute(\"DROP TABLE IF EXISTS temp3;\")\n cur.execute(\"DROP TABLE IF EXISTS temp4;\")\n # Store just the unique sets of ids that came into effect before the the datetime in a temporary table.\n query = \"\"\"CREATE TEMPORARY TABLE temp AS \n SELECT * \n FROM {table} \n WHERE EFFECTIVEDATE <= '{datetime}';\"\"\"\n cur.execute(query.format(table=self.table_name, datetime=date_time))\n # For each unique set of ids and effective dates get the latest versionno and sore in temporary table.\n query = \"\"\"CREATE TEMPORARY TABLE temp2 AS\n SELECT {id}, EFFECTIVEDATE, MAX(VERSIONNO) AS VERSIONNO\n FROM temp\n GROUP BY {id}, EFFECTIVEDATE;\"\"\"\n cur.execute(query.format(id=id_columns))\n # For each unique set of ids get the record with the most recent effective date.\n query = \"\"\"CREATE TEMPORARY TABLE temp3 as\n SELECT {id}, VERSIONNO, max(EFFECTIVEDATE) as EFFECTIVEDATE\n FROM temp2\n GROUP BY {id};\"\"\"\n cur.execute(query.format(id=id_columns))\n # Inner join the original table to the set of most recent effective dates and version no.\n query = \"\"\"CREATE TEMPORARY TABLE temp4 AS\n SELECT * \n FROM {table} \n INNER JOIN temp3 \n USING ({id}, VERSIONNO, EFFECTIVEDATE);\"\"\"\n cur.execute(query.format(table=self.table_name, id=id_columns))\n # Inner join the most recent data with the interconnectors used in the actual interval of interest.\n query = \"\"\"SELECT {cols} \n FROM temp4 \n INNER JOIN (SELECT * \n FROM DISPATCHINTERCONNECTORRES \n WHERE SETTLEMENTDATE == '{datetime}') \n USING (INTERCONNECTORID);\"\"\"\n query = query.format(datetime=date_time, id=id_columns, cols=return_columns)\n data = pd.read_sql_query(query, con=self.con)\n return data", "def get_file_df(self, file_list):\n file_dict = {\n file.split(\".\")[0]: {\"Date\": file.split(\".\")[1], \"File\": file}\n for file in file_list\n }\n df = pd.DataFrame(file_dict).T\n df[\"Date\"] = pd.to_datetime(df[\"Date\"])\n df[\"File\"] = df[\"File\"].astype(\"string\")\n df = df.reset_index()\n df.rename(columns={\"index\": \"League\"}, inplace=True)\n df = df.sort_values(by=[\"Date\"], ascending=False)\n return df", "def stage_output(stage_file, excel_filepath):\n # Creates a raw dataframe for the raw excel sheet and a dataframe\n # to be used in the aggregation.\n df_raw, df, site, site_code = _data_reader(stage_file)\n df_raw = df_raw.rename(columns={'Unnamed: 0': ''})\n\n # Seperate data into years and creates an excel file for each year.\n start_year = df.index[0].to_pydatetime()\n start_year = start_year.year\n years = np.arange(start_year, 2017)\n years_str = np.array([str(x) for x in years])\n for year in years_str:\n df_year = df.loc[year]\n df_15_reindex, df_30_reindex, df_1h_reindex, df_1d_reindex = _resampler(\n df_year, year)\n del df_year\n # Saving data to a excel file if to_excel is True.\n save_path = (excel_filepath + site + '_'\n + str(year) + '_stage_data.xlsx')\n\n # Takes raw and each time interval of data and creates a sheet for each.\n writer = pd.ExcelWriter(save_path, engine='xlsxwriter',\n datetime_format='m/d/yyyy h:mm',\n date_format='m/d/yyyy')\n df_raw.to_excel(writer, 'raw_stationID_' + site_code, index=False)\n df_15_reindex.to_excel(writer, '15min', index=False, na_rep='#N/A')\n df_30_reindex.to_excel(writer, '30min', index=False, na_rep='#N/A')\n df_1h_reindex.to_excel(writer, 'hourly', index=False, na_rep='#N/A')\n df_1d_reindex.to_excel(writer, 'daily', index=False, na_rep='#N/A')\n\n # Formatting of the excel sheets. Without format1 the time is saved\n # in decimal form in the excel sheet.\n workbook = writer.book\n format1 = workbook.add_format({'num_format': 'h:mm'})\n worksheet_raw = writer.sheets['raw_stationID_' + site_code]\n worksheet_15 = writer.sheets['15min']\n worksheet_30 = writer.sheets['30min']\n worksheet_1h = writer.sheets['hourly']\n worksheet_1d = writer.sheets['daily']\n worksheets = [worksheet_15, worksheet_30, worksheet_1h, worksheet_1d]\n for worksheet in worksheets:\n worksheet.set_column('A:L', 22)\n worksheet.set_column('D:E', 22, format1)\n worksheet_raw.set_column('A:F', 20)\n writer.save()\n workbook.close()\n\n # Deletes dataframes after each year loop to save memory.\n del df_15_reindex, df_30_reindex, df_1h_reindex, df_1d_reindex\n # Deletes dataframes after the year loop is completed to save memory.\n del df_raw, df\n return", "def rebuild_index():\n print('Building indexes...')\n print(data_fldr)\n ndx = []\n for root, _, files in os.walk(data_fldr):\n for f in files:\n if f[-3:].upper() in ['CSV','TXT']:\n ndx.extend(get_index_terms(root + os.sep + f))\n with open(ndx_file, 'w') as fio:\n for i in ndx:\n fio.write(i + '\\n')", "def _index_group_with_subgroup(self, **kwargs):\n\n log.setLevel(self.log_level)\n # get a list of all the uri to index\n uri_list = kwargs.get('uri_list', self.get_uri_list())\n if not uri_list:\n log.info(\"0 items to index\")\n return\n # results = results[:100]\n # Start processing through uri\n batch_file = os.path.join(CFG.dirs.logs, \"batch_list.txt\")\n # with open(batch_file, \"w\") as fo:\n # fo.write(\"{\")\n log.info(\"'%s' items to index\", len(uri_list))\n self.time_start = datetime.datetime.now()\n batch_size = kwargs.get(\"batch_size\", 12000)\n if len(uri_list) > batch_size:\n batch_end = batch_size\n else:\n batch_end = len(uri_list)\n batch_start = 0\n batch_num = 1\n self.batch_data = {}\n self.batch_data[batch_num] = {}\n self.batch_data[batch_num]['main'] = []\n self.batch_uris = {}\n self.batch_uris[batch_num] = []\n for name, indexer in self.other_indexers.items():\n self.batch_data[batch_num][name] = []\n end = False\n last = False\n final_list = []\n expand_index = kwargs.get(\"expand_index\", True)\n while not end:\n log.debug(\"batch %s: %s-%s\", batch_num, batch_start, batch_end)\n sub_batch = []\n j = 0\n for i in range(batch_start, batch_end):\n # for i, subj in enumerate(uri_list[batch_start:batch_end]):\n qry_size = kwargs.get(\"qry_size\", 1000)\n if j < qry_size:\n try:\n sub_batch.append(uri_list.pop()) #subj)\n except IndexError:\n pass\n if j == qry_size -1 or i == batch_end - 1:\n try:\n sub_batch.append(uri_list.pop()) #subj)\n except IndexError:\n pass\n # with open(batch_file, \"a\") as fo:\n # fo.write(json.dumps({str('%s-%s' % (batch_num, i+1)):\n # [item[0].sparql\n # for item in sub_batch]})[1:-1]+\",\\n\")\n if not kwargs.get(\"no_threading\", False):\n th = threading.Thread(name=batch_start + i + 1,\n target=self._index_sub,\n args=(sub_batch,\n i+1,\n batch_num,))\n th.start()\n else:\n self._index_sub(sub_batch, i+1, batch_num)\n j = 0\n final_list += sub_batch\n sub_batch = []\n else:\n j += 1\n log.debug(datetime.datetime.now() - self.time_start)\n if not kwargs.get(\"no_threading\", False):\n main_thread = threading.main_thread()\n for t in threading.enumerate():\n if t is main_thread:\n continue\n t.join()\n action_list = []\n for key, items in self.batch_data[batch_num].items():\n if key == 'main':\n es_worker = self.es_worker\n else:\n es_worker = self.other_indexers[key]\n action_list += es_worker.make_action_list(items)\n result = self.es_worker.bulk_save(action_list)\n final_list += self.batch_uris[batch_num]\n self._update_triplestore(result, action_list)\n del action_list\n del self.batch_uris[batch_num]\n del self.batch_data[batch_num]\n try:\n del pyrdf.memorized\n pyrdf.memorized = {}\n except AttributeError:\n pass\n while gc.collect() > 0:\n pass\n # pdb.set_trace()\n batch_end += batch_size\n batch_start += batch_size\n if last:\n end = True\n if len(uri_list) <= batch_size:\n batch_end = len(uri_list)\n last = True\n batch_num += 1\n self.batch_uris[batch_num] = []\n self.batch_data[batch_num] = {}\n self.batch_data[batch_num]['main'] = []\n for name, indexer in self.other_indexers.items():\n self.batch_data[batch_num][name] = []\n log.debug(datetime.datetime.now() - self.time_start)\n # with open(batch_file, 'rb+') as fo:\n # fo.seek(-2, os.SEEK_END)\n # fo.truncate()\n # # fo.close()\n # fo.write(\"}\".encode())", "def write_to_excel(sample_id, \n df_param_indexed_transpose, \n df_val, \n df_val_params, \n output_filename):\n param_sheet = 'Parameters'\n\n # try to open existing workbook: File not found -> create, add parameter sheet with header, add value sheet\n # if file is found, check if current sample exists; if not, write parameters without header, add value \n try:\n print('Trying to open workbook {}.'.format(output_filename))\n df_existing_param = pd.read_excel(output_filename, \n sheet_name=param_sheet)\n if sample_id not in df_existing_param.values:\n print('Writing parameters {} ...'.format(sample_id), end=' ', flush=True)\n start_action = datetime.now()\n append_df_to_excel(output_filename, \n df_param_indexed_transpose, \n param_sheet, \n index=False, \n header=False)\n stop_action = datetime.now()\n duration = stop_action - start_action\n print('Duration: {} seconds.'.format(duration.total_seconds()))\n else:\n # should add check that there aren't already multiple paramter rows with same id, which can\n # occur if the filename and sample id in file don't match (filename manually changed)\n idx_sample_id = df_existing_param.index[df_existing_param[\n 'Sample ID'] == sample_id][0]\n print(\n 'A parameter row with Sample ID {} already exists in this workbook.'.format(sample_id))\n overwrite_parameter = query_yes_no(\n 'Do you want to overwrite this parameter row?')\n if overwrite_parameter:\n print('Overwriting parameters {} ...'.format(sample_id), end=' ', flush=True)\n start_action = datetime.now()\n append_df_to_excel(output_filename, \n df_param_indexed_transpose, \n param_sheet, \n startrow=idx_sample_id+1, \n index=False, \n header=False)\n stop_action = datetime.now()\n duration = stop_action - start_action\n print('Duration: {} seconds.'.format(duration.total_seconds()))\n except FileNotFoundError:\n print('Creating workbook {}'.format(output_filename))\n append_df_to_excel(output_filename, \n df_param_indexed_transpose, \n param_sheet, \n index=False)\n pass\n\n wb = pd.ExcelFile(output_filename)\n\n if sample_id not in wb.sheet_names:\n print('Writing values sheet {} ...'.format(sample_id), end=' ', flush=True)\n start_action = datetime.now()\n append_df_to_excel(output_filename, \n df_val_params, \n sample_id, \n header=False)\n append_df_to_excel(output_filename, \n df_val, \n sample_id, \n index=False)\n stop_action = datetime.now()\n duration = stop_action - start_action\n print('Duration: {}'.format(duration.total_seconds()))\n else:\n print('A values sheet with Sample ID {} already exists in this workbook.'.format(sample_id))\n overwrite_sheet = query_yes_no('Do you want to overwrite this values sheet?')\n if overwrite_sheet:\n print('Overwriting values sheet {} ...'.format(sample_id), end=' ', flush=True)\n start_action = datetime.now()\n append_df_to_excel(output_filename, \n df_val_params, \n sample_id, \n startrow=0, \n header=False)\n append_df_to_excel(output_filename, \n df_val, \n sample_id, \n startrow=2, \n index=False)\n stop_action = datetime.now()\n duration = stop_action - start_action\n print('Duration: {}'.format(duration.total_seconds()))", "def get_primers(self, sheetname):\n df_primers_dups = pd.read_excel(self.excel_file, header=0, parse_cols='A:M, O:X', skiprows=2,\n names=['Gene', 'Exon', 'Direction', 'Version', 'Primer_seq', 'Chrom', 'M13_tag',\n 'Batch', 'project', 'Order_date', 'Frag_size', 'anneal_temp', 'Other',\n 'snp_check', 'no_snps', 'rs', 'hgvs', 'freq', 'ss', 'ss_proj', 'other2',\n 'action_to_take', 'check_by'],\n sheetname=sheetname, index_col=None)\n\n to_drop = ['Version', 'M13_tag', 'Batch', 'project', 'Order_date', 'Frag_size', 'anneal_temp', 'Other',\n 'snp_check', 'no_snps', 'rs', 'hgvs', 'freq', 'ss', 'ss_proj', 'other2', 'action_to_take',\n 'check_by']\n\n df_primers_dups = df_primers_dups.where((pd.notnull(df_primers_dups)), None) # easier to work with than NaN\n df_primers = df_primers_dups.drop(to_drop, axis=1)\n df_primers = df_primers.drop_duplicates(subset=('Gene', 'Exon', 'Direction', 'Chrom'))\n df_primers = df_primers.reset_index(drop=True)\n\n return df_primers_dups, df_primers", "def _index_sub(self, uri_list, num, batch_num):\n bname = '%s-%s' % (batch_num, num)\n log.debug(\"batch_num '%s' starting es_json conversion\",\n bname)\n qry_data = get_all_item_data([item[0] for item in uri_list],\n self.tstore_conn,\n rdfclass=self.rdf_class)\n log.debug(\"batch_num '%s-%s' query_complete | count: %s\",\n batch_num,\n num,\n len(qry_data))\n # path = os.path.join(CFG.dirs.cache, \"index_pre\")\n # if not os.path.exists(path):\n # os.makedirs(path)\n # with open(os.path.join(path, bname + \".json\"), \"w\") as fo:\n # fo.write(json.dumps(qry_data))\n data = RdfDataset(qry_data)\n del qry_data\n log.debug(\"batch_num '%s-%s' RdfDataset Loaded\", batch_num, num)\n for value in uri_list:\n try:\n\n self.batch_data[batch_num]['main'].append(\\\n data[value[0]].es_json())\n self.count += 1\n except KeyError:\n pass\n for name, indexer in self.other_indexers.items():\n for item in data.json_qry(\"$.:%s\" % name.pyuri):\n val = item.es_json()\n if val:\n self.batch_data[batch_num][name].append(val)\n self.batch_uris[batch_num].append(item.subject)\n del data\n del uri_list\n log.debug(\"batch_num '%s-%s' converted to es_json\", batch_num, num)", "def merge_logs(dfs):\n return pd.concat(dfs, ignore_index=True)", "def load_plothrm_detailed(filename):\n\n filename = Path(filename)\n\n book = xlrd.open_workbook(filename)\n xlsx_file = pd.ExcelFile(filename)\n\n df_seq = None\n df_meta = None\n\n for sheetidx, sheetname in enumerate(xlsx_file.sheet_names):\n\n print(f'Parsing \"{filename}\" sheet \"{sheetname}\"', flush=True)\n\n try:\n\n sheetnumber = sheetidx + 1\n worksheet = book.sheet_by_name(sheetname)\n\n # try to load a value from the row and column\n def value_default(row, col, d):\n if row >= worksheet.nrows or col >= worksheet.ncols:\n return d\n s = worksheet.cell_value(row, col)\n if type(s) is str and len(s) == 0:\n return d\n else:\n return s\n\n # load metadata\n meta = dict()\n meta['sheetname'] = sheetname\n meta['sheetnumber'] = sheetnumber\n meta['plothrm_version'] = str(worksheet.cell_value(0, 1))\n meta['data_filename'] = str(worksheet.cell_value(1, 1))\n meta['seq_filename'] = str(worksheet.cell_value(2, 1))\n meta['nchan'] = int(worksheet.cell_value(3, 1))\n meta['nsamp'] = int(worksheet.cell_value(4, 1))\n meta['sampling_hz'] = float(worksheet.cell_value(5, 1))\n meta['opt_zero_above'] = float(value_default(6, 2, np.nan))\n meta['opt_remove_baselines'] = bool (value_default(6, 4, 0))\n meta['opt_remove_sync'] = bool (value_default(6, 6, 0))\n meta['opt_channel_smooth'] = bool (value_default(6, 8, 0))\n meta['opt_sample_smooth'] = float(value_default(6, 10, np.nan))\n meta['height_res'] = float(worksheet.cell_value(7, 1))\n meta['sync_bound'] = float(worksheet.cell_value(8, 1))\n # TODO load region data at rows 9, 10, 11 (Excel 1-based index rows 10-12)\n\n for k, v in meta.items():\n meta[k] = [v]\n meta = pd.DataFrame(meta)\n\n if df_meta is None:\n df_meta = meta\n else:\n df_meta = df_meta.append(meta)\n\n # load sequence data\n df = pd.read_excel(filename, sheetname, header=13, na_values=['ERROR', 'Infinity'])\n df['sheetname'] = sheetname\n df['sheetnumber'] = sheetnumber\n if df_seq is None:\n df_seq = df\n else:\n df_seq = df_seq.append(df)\n\n except Exception as e:\n print(f' error parsing worksheet \"{sheetname}\": {e}')\n continue\n\n return df_seq.reset_index(drop=True), df_meta.reset_index(drop=True)", "def execute(self):\n\n _logger.info('Setting up database connection and google doc access...')\n self._connect_to_rdr_replica()\n service_key_info = gcp_get_iam_service_key_info(self.gcp_env.service_key_id)\n gs_creds = gspread.service_account(service_key_info['key_path'])\n gs_file = gs_creds.open_by_key(self.doc_id)\n\n # These origin strings will be converted to lowercase when used as query filter values\n for origin in ['Vibrent', 'CareEvolution']:\n self._set_origin_value(origin)\n _logger.info(f'Retrieving consent validation records for {self.origin_value}.....')\n # consent_df will contain all the outstanding NEEDS_CORRECTING issues that still need resolution\n # start_date/end_date refer to the consent authored date range; the validation end date (when the\n # consent_file records were created) is up to a day later than the consent authored end date\n self.consent_df = self._get_consent_validation_dataframe(\n self.report_sql.format_map(SafeDict(start_date=self.start_date.strftime(\"%Y-%m-%d\"),\n end_date=self.end_date.strftime(\"%Y-%m-%d\"),\n validation_end_date=self.validation_end_date.strftime(\"%Y-%m-%d\"),\n report_date=self.report_date.strftime(\"%Y-%m-%d\"),\n origin_filter=self.origin_value.lower())))\n # Workaround: filtering out results for older consents where programmatic PDF validation flagged files\n # where it couldn't find signature/signing date, even though the files looked okay on visual inspection\n self.consent_df = self.remove_potential_false_positives_for_missing_signature(self.consent_df)\n\n # Get all the resolved/OBSOLETE issues for generating resolution stats\n self.resolved_df = self.get_resolved_consent_issues_dataframe()\n _logger.info('Generating report data...')\n self.create_weekly_report(gs_file)\n\n _logger.info('Report complete')\n self._clear_report()", "def load_all(self, force_checksum: bool = False):\n self.by_id.clear()\n self.all_ids.clear()\n self.conflicts.clear()\n self.by_path.clear()\n\n # Update all of the indices\n for name, info in self.index_directory.items():\n index = self.cached.get(name, None)\n if index is None:\n index = self.index_builder.create(name, info[\"path\"])\n\n self.index_builder.update(index, force_checksum)\n\n self.indices[name] = index\n\n # Go through each note and detect any conflicts against the global ID registry, and if there are none add\n # the note to the unified id dictionary. If there is a conflict we'll add this note to the conflict\n # dictionary, but we will not remove the conflicting note yet, as this would prevent further conflicts\n # from being detected. Instead we'll handle the original conflict after all of the indices have been\n # merged.\n for note in self.indices[name].notes.values():\n self.by_path[note.file_path] = note\n if note.id is not None:\n self.all_ids.add(note.id)\n if note.id in self.by_id:\n if note.id not in self.conflicts:\n self.conflicts[note.id] = []\n self.conflicts[note.id].append(note)\n else:\n self.by_id[note.id] = note\n else:\n note.state = MetaData.NO_ID\n\n # Now that we're done merging all indices we'll go through all of the conflicts and remove the original note\n # which was first merged into the global dictionary, since it was added based on the happenstance of what\n # order things occurred in, and is not actually privileged over the other notes.\n for id_, conflict_list in self.conflicts.items():\n conflict_list.append(self.by_id[id_])\n del self.by_id[id_]\n for note in conflict_list:\n note.state = MetaData.CONFLICT\n\n for id_, note in self.by_id.items():\n note.state = MetaData.OK\n\n if self.on_load is not None:\n self.on_load(self)", "def merge_docs(self):" ]
[ "0.5289423", "0.52650183", "0.52591753", "0.52265924", "0.51847774", "0.5076782", "0.5037608", "0.50096446", "0.5009643", "0.5001173", "0.4953621", "0.494462", "0.493642", "0.49348438", "0.49298787", "0.48461", "0.48410356", "0.48382828", "0.4834661", "0.48010856", "0.47960046", "0.47942355", "0.47829294", "0.47728148", "0.47712734", "0.47591963", "0.47439104", "0.4741437", "0.47197947", "0.47165987", "0.46961623", "0.46888012", "0.46827033", "0.46570444", "0.46378973", "0.46348286", "0.46290076", "0.46275294", "0.4611373", "0.46091074", "0.46083045", "0.46055934", "0.45987967", "0.45849732", "0.45839274", "0.45729426", "0.4571894", "0.45709482", "0.45661753", "0.45614567", "0.45611468", "0.45591015", "0.45512545", "0.4543873", "0.45434418", "0.45421416", "0.45335487", "0.45323712", "0.4531216", "0.45277557", "0.45149642", "0.45121965", "0.45119068", "0.45071116", "0.44952232", "0.4492542", "0.44874063", "0.4485736", "0.44795448", "0.44790706", "0.44756007", "0.4474384", "0.44662791", "0.44591257", "0.44527248", "0.4451073", "0.44452807", "0.44392818", "0.44361857", "0.44285113", "0.442543", "0.44212216", "0.44208634", "0.4410643", "0.44042897", "0.4402303", "0.4401544", "0.43965185", "0.4393443", "0.43933254", "0.4383896", "0.43755466", "0.43693534", "0.43669584", "0.43627304", "0.4362033", "0.43566865", "0.43497902", "0.43493354", "0.43471527" ]
0.63463265
0
Attempts to parse a surt string into its component parts.
def __init__(self, surt): self._surt = surt try: self.protocol, surt = surt.split('://(') except ValueError: self.protocol = surt surt = '' try: self.domain, surt = surt.split('/', 1) except ValueError: self.domain = surt surt = '' try: self.path, surt = surt.split('?') except ValueError: self.path = surt self.query = '' surt = '' try: self.query, self.hash = surt.split('#') except ValueError: if surt != '': self.query = surt self.hash = '' surt = '' if self.path: self.path_parts = self.path.split('/') self.path_parts = [part for part in self.path_parts if part != ''] else: self.path_parts = [] if self.domain: self.domain_parts = self.domain.replace(')', '').split(',') self.domain_parts = [ part for part in self.domain_parts if part != ''] else: self.domain_parts = [] self.parts = [] self.parts.append(self.protocol + '://(') for domain_part in self.domain_parts: self.parts.append('{},'.format(domain_part)) self.parts[-1] = '{})'.format(self.parts[-1]) for path_part in self.path_parts: self.parts.append('/{}'.format(path_part)) self.parts.append(self.query) self.parts.append(self.hash) self.parts = [part for part in self.parts if part != '']
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse(s):\n return s", "def parseString(self, s):\n pass", "def parse(cls, s):\n raise NotImplementedError", "def parse_string(self, in_str):\n match = MAIN_REGEX.search(in_str)\n if not match:\n err_str = \"Unable to parse string: %s\" % in_str\n raise ValueError(err_str)\n self.parse_completed(match.group(1))\n self.parse_priority(match.group(2))\n if match.group(3) and match.group(4):\n self.parse_completion_date(match.group(3))\n self.parse_creation_date(match.group(4))\n else:\n self.parse_creation_date(match.group(3))\n self.parse_description(match.group(5))", "def split_citation_part(string: str):\n\n # Tokenization\n\n # fmt: off\n string = regex.sub(\n r\"(\"\n r\"\\d+(?>\\.\\d+)?[a-z]?|\"\n r\"\\b[ivx]+|\"\n r\"\\b[a-z]\\)?\"\n r\")\"\n r\"(\\sff?\\.|\\sff\\b)\",\n r\"\\1ff.\",\n string,\n flags=regex.IGNORECASE,\n )\n # fmt: on\n tokens = split_unit_number_pattern.split(\n string,\n )\n\n # Building pairs of units with their resp. values\n\n while len(tokens) > 0:\n token = tokens.pop(0)\n if StatutesParser.is_unit(token):\n if len(tokens) > 0:\n unit = StatutesParser.stem_unit(token)\n token = tokens.pop(0)\n numb = token\n assert StatutesParser.is_numb(numb), numb\n else: # when citation ends with unit\n print(\n f\"Citation {string} ends with unit {token}. Ignoring last unit.\"\n )\n break\n\n elif StatutesParser.is_pre_numb(token):\n numb = token\n token = tokens.pop(0)\n if not StatutesParser.is_unit(token):\n print(token, \"is not a unit in\", string)\n continue\n # to fix citation \"§ 30 DRITTER ABSCHNITT\"\n # Last part in now ignored,\n # but reference areas can still be improved.\n unit = StatutesParser.stem_unit(token)\n\n elif StatutesParser.is_numb(token):\n unit = None\n numb = token\n else:\n raise StringCaseException(token, \"in\", string)\n numb = regex.sub(r\"(ff?\\.|ff|\\))$\", \"\", numb)\n yield [unit, numb]", "def parse_string(self, data):\n pass", "def parseString(self, s):\n return self.parser.parseString(s)", "def p_parse(toks):\n return p_question_group.parseString(toks[0])", "def parse_rads_component(storage: RadsStorage, component: str):\n\n m = re.match(r'^(?:([sp]):)?(\\w+)(?:=(|[0-9]+(?:\\.[0-9]+)*|main)?)?$', component)\n if not m:\n raise ValueError(f\"invalid component: {component}\")\n typ, name, version = m.group(1, 2, 3)\n if not typ:\n if name == 'patch':\n typ = 'patch'\n elif name.endswith('_sln'):\n typ = 's'\n else:\n typ = 'p'\n\n if typ == 'p':\n project = RadsProject(storage, name)\n if version is None:\n return project\n elif version == '':\n return project.versions()[0]\n else:\n return RadsProjectVersion(project, RadsVersion(version))\n elif typ == 's':\n solution = RadsSolution(storage, name)\n if version is None:\n return solution\n elif version == '':\n return solution.versions()[0]\n else:\n return RadsSolutionVersion(solution, RadsVersion(version))\n elif typ == 'patch':\n if version is None:\n raise ValueError(f\"patch requires a version\")\n elif version == '':\n return storage.patch(None)\n else:\n return storage.patch(version)", "def parse(t):\n return t", "def parseStr(s):\n\n return _parseHelper(s.split(\"\\n\"))", "def parse_str( s: str ) -> list:\n\n tree = ET.fromstring( s )\n if tree is None: return None\n return parse_tree( tree )", "def parse_template(string):\n count = 0\n list1 = []\n for character in string:\n count = count + 1\n if character == \"{\":\n end = string.find(\"}\", count)\n s_strg = string[count:end]\n list1.append(s_strg)\n string = string.replace(s_strg, \"\", 1)\n count = count - len(s_strg)\n\n subs = tuple(list1)\n\n return(string, subs)\n print(subs)", "def test_beginstring_combos():\n vc = vtec.parse(\"/O.NEW.KJAN.TO.W.0130.000000T0000Z-000000T0000Z/\")\n assert vc[0].get_begin_string(None) == \"\"", "def _parse(self, fmtstr):\n def _match_brace(string, start_pos, pair='[]'):\n \"\"\"Pairing brackets (used internally in _parse method)\"\"\"\n depth = 1\n if string[start_pos] != pair[0]:\n return None\n for index, char in enumerate(string[start_pos + 1:]):\n if char == pair[0]:\n depth += 1\n elif char == pair[1]:\n depth -= 1\n if depth == 0:\n return start_pos + index + 1\n return None\n\n #----------------------------------------------------------------------\n\n t_fmt = self.__class__._T_FMT\n t_prefix = self.__class__._T_PREFIX\n\n ptr = 0\n # it seems that field id 0 is invalid\n field_id = 1\n length = len(fmtstr)\n parsed_list = []\n\n while ptr < length:\n parsed = {}\n m_prefix = t_prefix.match(fmtstr[ptr:])\n if m_prefix:\n ptr += _get_length_of_match(m_prefix)\n parsed['prefix'] = m_prefix.group(1)\n\n # check if we have a nested structure\n if m_prefix.group(2):\n brace_offset = _match_brace(fmtstr, ptr - 1)\n\n # bracket not match\n if not brace_offset:\n raise BadFormatString(\n 'Unmatched brace on position {0}'.format(ptr)\n )\n parsed['field_id'] = field_id\n parsed['field_type'] = 'a'\n parsed['subcontent'] = self._parse(\n fmtstr[ptr:brace_offset]\n )\n ptr = brace_offset + 1\n field_id += 1\n\n parsed_list.append(parsed)\n continue\n m_fmt = t_fmt.match(fmtstr[ptr:])\n if m_fmt:\n ptr += _get_length_of_match(m_fmt)\n\n # fmt is an alias\n if m_fmt.group(2):\n parsed['field_type'] = self.__class__\\\n .FIELD_ALIAS[m_fmt.group(2)]\n # fmt is an actual field type\n elif m_fmt.group(1):\n parsed['field_type'] = m_fmt.group(1)\n\n # save field id\n parsed['field_id'] = field_id\n\n # check for type clones (e.g. `v3')\n if m_fmt.group(3):\n parsed['repeat'] = int(m_fmt.group(3))\n field_id += int(m_fmt.group(3))\n else:\n parsed['repeat'] = 1\n field_id += 1\n\n parsed_list.append(parsed)\n\n else:\n raise BadFormatString(\n 'Invalid token on position {0}'.format(ptr)\n )\n\n # all set\n return parsed_list", "def parseOne(self, s):\n \n m = self.re.match(s)\n if not m:\n self.findParsingFailure(s)\n mtuple = m.groups()\n\n # Strip douple quotes\n if self.strings:\n mlist = list(mtuple)\n for si in self.strings:\n s = mlist[si]\n if len(s) == 0: continue\n\n if s[0] == '\"':\n if s[-1] == '\"':\n s = s[1:-1]\n else:\n logging.log(50, 'missing end-quote in %s: %s' % (self.name, s,))\n s = s[1:]\n s = s.replace(r'\\\\\\\\', r'\\\\').replace(r'\\\"', r'\"') \n mlist[si] = s\n \n # Strange lesson mentioned in idlutils, and seen at least in idReport files.\n if len(s) > 0 and s[0] == '{': \n if re.search(\"^{ *{ *} *}\\s*$\", s):\n mlist[si] = \"\"\n mtuple = tuple(mlist)\n \n # It actually pays to know the length of the longest string, for .seal()\n strlens = list(map(len,mtuple))\n self.strlens = np.array([self.strlens, strlens]).max(axis=0) \n \n self.records.append(mtuple)", "def parse_info(s:str) -> dict:\n d = {}\n d[\"SVTYPE\"] = re.search(r'(?<=SVTYPE=)\\w+',s).group(0)\n d[\"SUPPORT\"] = re.search(r'(?<=SUPPORT=)\\d+',s).group(0)\n if d[\"SVTYPE\"] in [\"BND\"]:\n return d\n d[\"END\"] = re.search(r'(?<=END=)\\d+',s).group(0)\n if d[\"SVTYPE\"] in [\"INV\"]:\n return d\n d[\"SVLEN\"] = re.search(r'(?<=SVLEN=)(.*?)(?=;)',s).group(0)\n d[\"READS\"] = re.search(r'(?<=READS=)(.*?)(?=$)',s).group(0).split(\",\")\n if d[\"SVTYPE\"] == \"INS\":\n d[\"SEQS\"] = re.search(r'(?<=SEQS=)(.*?)(?=;)',s).group(0).split(\",\")\n return d", "def parse(jidstring):\n user = None\n host = None\n resource = None\n\n # Search for delimiters\n user_sep = jidstring.find(\"@\")\n res_sep = jidstring.find(\"/\")\n\n if user_sep == -1:\n if res_sep == -1:\n # host\n host = jidstring\n else:\n # host/resource\n host = jidstring[0:res_sep]\n resource = jidstring[res_sep + 1:] or None\n else:\n if res_sep == -1:\n # user@host\n user = jidstring[0:user_sep] or None\n host = jidstring[user_sep + 1:]\n else:\n if user_sep < res_sep:\n # user@host/resource\n user = jidstring[0:user_sep] or None\n host = jidstring[user_sep + 1:user_sep + (res_sep - user_sep)]\n resource = jidstring[res_sep + 1:] or None\n else:\n # host/resource (with an @ in resource)\n host = jidstring[0:res_sep]\n resource = jidstring[res_sep + 1:] or None\n\n return prep(user, host, resource)", "def _decompose(cls,\n s = '',\n element = False):\n\n s = s.strip()\n\n x = cls._html.findall(s)\n if len(x) > 0:\n s = ''.join(x[0][::-1])\n\n s = cls._translate.get(s.lower(), s)\n\n name = s.strip()\n n = len(name)\n el = ''\n a = ''\n e = ''\n\n # get numbers\n n = re.findall(\"\\d+\", name)\n\n # get strings\n cx = re.findall(\"\\D+\", name)\n\n c = []\n for x in cx:\n xx = x.split('-')\n cy = [y for y in xx if y != '']\n c += cy\n if len(c) == 2:\n if c[0] in ('m', 'g'):\n c = c[::-1]\n if c[0][0] == '*':\n c = c[::-1]\n if len(n) > 0: a = n[0]\n if len(n) > 1: e = n[1]\n if len(n) > 2: raise ValueError(\"Can't understand isotope '{}'.\".format(s))\n if len(c) > 0: el = c[0]\n if len(el) > 0:\n if el[-1] in cls.EXCITE and len(c) == 1 and len(n) == 2:\n c.append(el[-1])\n el = el[:-1]\n if len(c) == 2 and c == ['(', ')']:\n if len(n) == 1:\n a = n[0]\n el = 'Z='\n e = ''\n c = []\n n = []\n else:\n return (s,) + ('',)*3\n if len(c) == 2:\n if c[1] in ('g', 'G'):\n e = '0'\n if len(n) > 1:\n return (s,) + ('',)*3\n elif c[1] in ('m', 'M') and len(n) == 1:\n e = '1'\n elif c[1][0] == '*' and len(n) == 1:\n e = str(len(c[1]))\n assert c[1].count('*') == len(c[1])\n if e == '1':\n e = str(cls.EANY)\n if not c[1] in ('m', 'g', 'M', 'G') and not c[1][0] == '*':\n return (s,) + ('',)*3\n\n if len(c) == 1 and c[0][-1] == '*':\n e = 0\n while c[0][-1] == '*':\n c[0] = c[0][:-1]\n e += 1\n assert e == 1\n e = str(e)\n el = c[0]\n\n if len(c) == 1 and c[0][0] == '*':\n e = 0\n while c[0][0] == '*':\n c[0] = c[0][1:]\n e += 1\n assert e == 1\n e = str(e)\n el = c[0]\n\n if s == 'a' and a == '':\n el = 'He'\n a = '4'\n # this is a possible conflict with potassium\n elif (element) and s == 'p':\n el = 'P'\n elif s == 'p':\n el = 'H'\n a = '1'\n elif el in ('p', 'pn') and a == '1':\n el = 'H'\n elif s == 'pn':\n el = 'H'\n a = ''\n elif el in ('d', 'D'):\n el = 'H'\n if not a in ('', '2'):\n raise AttributeError('\"d\" already implies mass; if supplied needs to be \"2\".')\n a = '2'\n elif el in ('t','T'):\n el = 'H'\n if not a in ('', '3'):\n raise AttributeError('\"t\" already implies mass; if supplied needs to be \"3\"')\n a = '3'\n elif (element) and s == 'n':\n el = 'N'\n elif s == 'n':\n el = 'nt'\n a = '1'\n elif el in ('n', 'nt') and a == '1':\n el = 'nt'\n elif s in ('g', 'G'):\n el = ''\n a = ''\n e = '1'\n elif (s.lower() in ('e-', 'b-', 'bd', 'pc')):\n s = el = 'e-'\n elif ((s.lower() in ('e+', 'b+', 'ec'))\n or ((not element) and (s.lower() == 'pd'))):\n s = el = 'e+'\n elif ((not element) and (s.lower() == 'ps')):\n s = 'h1'\n a = '1'\n el = 'h'\n elif ((not element) and (s.lower() == 'ns')):\n s = 'nt1'\n a = '1'\n el = 'nt'\n el = el.strip()\n# if len(el) == 2 and el(2)\n a = a.strip()\n e = e.strip()\n return s, el, a, e", "def parse_string(cstr):\n ret = ''\n if _RUNNING_PYTHON3 and ULog._disable_str_exceptions:\n ret = _parse_string(cstr, 'ignore')\n else:\n ret = _parse_string(cstr)\n return ret", "def parse_component(component):\n name = component.find('Name').text\n #hardware_id = component.find('HardwareId').text\n #fixed_id = component.find('FixedId').text\n variables = [parse_variable(var) for var in component.findall(\"Variables/Variable\")]\n return Component(name, variables)", "def state_str_parse(self):\n # no completed mesg received\n if self.state_buf_str.find(self.end_indicator) == -1:\n return None\n\n # split completed data and uncompleted data\n last_ei_idx = self.state_buf_str.rfind(self.end_indicator)\n temp = self.state_buf_str[:last_ei_idx]\n self.state_buf_str = self.state_buf_str[last_ei_idx+1:]\n\n # parse the string\n temp = temp.split(self.end_indicator)\n while '' in temp:\n temp.remove('')\n for i in range(len(temp)):\n temp[i] = temp[i].split(self.separator)\n while '' in temp[i]:\n temp[i] = temp[i].remove('')\n return temp", "def _parse_wkt(s):\n if s.startswith('SRID'):\n s = s[s.index(';') + 1:]\n return shapely.wkt.loads(s)", "def _parse(self):\n\n self.specification = {}\n\n while True:\n try:\n line = self._lines.current\n if ':' in line:\n self.specification.update(self._parse_spec())\n elif line.startswith('TOUR_SECTION'):\n next(self._lines)\n self.tour = self._parse_tour()\n else:\n break\n except StopIteration:\n break\n\n del self._lines\n\n if 'TYPE' in self.specification and \\\n self.specification['TYPE'] != 'TOUR':\n raise TypeError('Unsupported TSPLib file type. Only TOUR type \\\n is supported')", "def parse(str):\n if len(str) != 16:\n raise ValueError(\"Invalid time length %d\" % len(str))\n if (str[-1]) == 'R':\n return parse_relative_time(str)\n return parse_absolute_time(str)", "def _parse_crs(crs):\n\n #\n # NOTE: This doesn't currently throw an error if the EPSG code is invalid.\n #\n if isinstance(crs, CRS):\n parsed = crs\n elif isinstance(crs, str):\n try:\n # proj-string or wkt\n parsed = CRS.from_string(crs)\n except CRSError:\n # wkt\n parsed = CRS.from_wkt(crs)\n elif isinstance(crs, dict):\n parsed = CRS(crs)\n elif isinstance(crs, int):\n parsed = CRS.from_epsg(crs)\n else:\n raise CRSError('Could not parse CRS: {}'.format(crs))\n\n return parsed", "def parse(self, string, root=None):\n\n\t\tphrases = []\n\n\t\tmeta = self.meta.search(string)\n\n\t\twhile meta:\n\n\t\t\t# Save some function calls\n\t\t\tpos = meta.start()\n\n\t\t\tif meta.group() == \"<\":\n\t\t\t\tstring, child, meta = self.open_phrase(string, pos)\n\n\t\t\t\tif child and root:\n\t\t\t\t\troot.nested.append(child)\n\t\t\t\telif child:\n\t\t\t\t\tphrases.append(child)\n\n\t\t\t\t# else it was escaped (+ new meta)\n\t\t\t\tcontinue\n\n\t\t\telif root:\n\n\t\t\t\tif meta.group() == \"(\":\n\t\t\t\t\tmeta = self.meta.search(string, pos + 1)\n\t\t\t\t\tif meta.group() == \")\":\n\t\t\t\t\t\tstring, root, meta = self.handle_arguments(string,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t root,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t pos,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t meta.start())\n\t\t\t\t\t\tcontinue\n\n\t\t\t\telif meta.group() == \">\":\n\t\t\t\t\tstring, phrase, meta = self.close_phrase(string,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t root,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t pos)\n\t\t\t\t\tif phrase:\n\t\t\t\t\t\treturn string, phrase\n\n\t\t\t\t\t# else was escaped (+ new meta)\n\t\t\t\t\tcontinue\n\n\t\t\tstring, meta = self.escape_meta(string, pos)\n\n\t\tif not root:\n\t\t\treturn string, phrases\n\n\t\t# If this is not the first stack-depth the function should\n\t\t# have returned upon finding a closing tag,\n\t\t# i.e. we should never have gotten here.\n\t\tword = re.search(r\"([\\w\\s]+)(?![\\d]*>[\\w\\s]+>)\", string)\n\n\t\twhat = \"No closing tag found for opening tag\"\n\n\t\tif word:\n\t\t\twhat += \" after expression '{0}'\".format(word.group())\n\n\t\traise errors.ParseError(what + \"!\")", "def _parse_interval_str(cls, s):\n\n start, stop = s.split(':')\n if start == '':\n start = 0\n else:\n start = int(start)\n if stop == '':\n stop = None\n else:\n stop = int(stop)\n return slice(start, stop)", "def _parseString(self, instring):\n\n if self._compiled is None:\n self.compile()\n\n match = self._compiled.match(instring)\n if match is None:\n return None\n\n Count.reset()\n struct = deepcopy(self.structure)\n\n mymatch, substructs, preprocess_func = self._parse_preprocess(match)\n struct.map(preprocess_func)\n struct.map(self._func_parse_leaf(mymatch, substructs))\n struct.parse_end = match.end()\n return struct", "def parseString(self, val):\n \n if not isinstance(val, str):\n raise Exception('Input must be a string!')\n if len(val) < 9:\n raise Exception( 'ESDT Names must be 9 characters!' )\n self.setType( val[:2] )\n self.setTime( val[2] )\n self.setFrequency( val[3] )\n self.setHRes( val[4] )\n self.setVRes( val[5] )\n self.setGroup( val[6:9] )\n tmp = val.split('.')\n if len(tmp) == 4:\n self.setVersion( *tmp[1:] )", "def parse_campus(p: str) -> Campus:\n # return '' #stub\n #template from optional\n if p is '':\n return None\n else:\n return p", "def parse_track_string(track):\n sanitized_performer_index = (args.performer_index\n if args.performer_index else 0)\n if len(track) < (max(args.name_index, args.time_index, args.performer_index\n if args.performer_index else 0) + 1):\n raise ValueError(\n 'Not enough fields for track {}, skipping.'.format(track))\n\n name = track[args.name_index]\n time_string = track[args.time_index]\n performer = (track[args.performer_index]\n if args.performer_index else args.performer)\n\n logger = logging.getLogger(__name__)\n logger.debug('Got name %s, time %s, and performer %s.', name, time_string,\n performer)\n\n return name, parse_time_string(time_string), performer", "def haiku_string_parser():\n pass", "def parse_string(str_value):\n comps = str_value.split(\"/\")\n assert len(comps) <= 2, \"Cannot parse {0} as Rational\".format(str_value) \n \n num_string = comps[0].strip()\n if (len(comps) == 1):\n return Rational(int(num_string))\n else:\n den_string = comps[1].strip()\n return Rational(int(num_string), int(den_string))", "def parse(s):\n\n rise = False\n set = False\n if s[-1:] == \"R\":\n rise = True\n s = s[:-1]\n elif s[-1:] == \"T\":\n set = True\n s = s[:-1]\n \n x = s.split(\":\")\n if len(x) == 1:\n x.append(\"0\")\n if len(x) == 2:\n x.append(\"0\")\n \n return Time(int(x[0]), int(x[1]), int(x[2]), after_sunrise=rise,\n after_sunset=set)", "async def parse(self, raw: str) -> dict:", "def parse_raw_string(string):\n raw = string.split('\\n')\n # ignore time elapsed between the recording start\n # and the arrival of the first IR signal\n raw = raw[2:]\n return ''.join(raw).split()", "def parse_feature_value(s,next_index=0):\n next_index = jump_over_space(s,next_index)\n start_index = next_index\n while True:\n if not s[next_index].isspace():\n next_index += 1\n else:\n break\n feature_value = s[start_index:next_index]\n if feature_value == '':\n feature_value = None\n feature_value = feature_value.split('/')\n return (feature_value,next_index)", "def structure_parse(source):\r\n return structure_grammar().parseString(source)", "def parse(s):\n return expr.parseString(s, parseAll=True)", "def test_parse_part_A():\n\n result = parse_part_A(PART_A)\n\n assert result, \"unexpected result %r\" % result\n # time-parsing is a test-case on it's own\n assert result[1] == u\"OSD4l1BEUOkAAHZ8Y3QAAAAH\"\n assert result[2] == u\"209.90.77.54\"\n assert result[3] == 64995\n assert result[4] == u\"80.68.80.233\"\n assert result[5] == 80", "def parse_job_string(cls, st):\n toks = st.split(',')\n assert 10 == len(toks)\n my_id = int(toks[0])\n acc_r, acc_s = toks[1:3]\n #assert acc_r[1:3] == 'RR'\n #assert acc_s[1:3] == 'RP'\n url_1, url_2, url_3, checksum_1, checksum_2, checksum_3 = toks[3:9]\n\n def noneize(x):\n if x == 'None' or x == 'NA':\n return None\n else:\n return x\n\n url_1 = noneize(url_1)\n url_2 = noneize(url_2)\n url_3 = noneize(url_3)\n checksum_1 = noneize(checksum_1)\n checksum_2 = noneize(checksum_2)\n checksum_3 = noneize(checksum_3)\n retrieval_method = toks[9]\n assert retrieval_method != 'None' and retrieval_method != 'NA'\n return my_id, acc_r, acc_s, url_1, url_2, url_3, \\\n checksum_1, checksum_2, checksum_3, retrieval_method", "def get_chrom_start_end_from_string(s):\n try:\n chrom, s_e = s.split('__substr__')\n start, end = s_e.split('_')\n return chrom, int(start), int(end)\n except Exception:\n raise ValueError(\"String %s must be of format '{chrom}__substr__{start}_{end}'\" % s)", "def parse(self, string, parse_all=False):\n return self._parseString(string, parse_all=parse_all)", "def input_parser(input_string: str) -> str: \n if is_int(input_string):\n return input_string\n #he is int, give back plz.\n else:\n try:\n modified_input: str = input_string.strip()\n\n evaluatable_pairs: str = regex_splitter(modified_input)\n\n while not (is_int(evaluatable_pairs)):\n evaluatable_pairs = regex_splitter(evaluatable_pairs)\n\n return (evaluatable_pairs)\n\n except:\n raise Exception(\"Invalid Input\")", "def __parse_hgvs_syntax(self, hgvs_str):\n self.is_valid = True # assume initially the syntax is valid\n if self.is_substitution:\n sub_pattern = '(?:(\\d+)([+-]\\d+)?_)?(\\d+)([+-]\\d+)?([A-Z]+)>([A-Z]+)$'\n matches = re.findall(sub_pattern, hgvs_str)\n if matches:\n init_pos, init_intron, reg_pos, reg_intron, initial, mutated = matches[0]\n if not init_pos:\n self.pos = int(reg_pos)\n self.intron_pos = int(reg_intron) if reg_intron != '' else None\n self.initial = initial\n self.mutated = mutated\n else:\n init_pos = init_pos.strip('_') # remove separating underscore\n self.pos = [int(init_pos), int(reg_pos)]\n intron_tmp1 = int(init_intron) if init_intron != '' else None\n intron_tmp2 = int(reg_intron) if reg_intron != '' else None\n self.intron_pos = [intron_tmp1, intron_tmp2]\n self.initial = initial\n self.mutated = mutated\n else:\n self.is_valid = False\n self.intron_pos = None\n self.logger.debug('(Parsing-Problem) Invalid DNA Substitution: ' + hgvs_str)\n return\n elif self.is_deletion:\n del_pattern = '(?:([0-9?]+)([-+]\\d+)?(?:_))?([0-9?]+)([-+]\\d+)?del([A-Z?0-9]+)$'\n matches = re.findall(del_pattern, hgvs_str)\n if matches:\n init_pos, init_intron, reg_pos, reg_intron, del_nuc = matches[0]\n if not init_pos:\n # only one nucleotide deleted\n self.pos = int(reg_pos) if reg_pos != '?' else reg_pos\n self.intron_pos = int(reg_intron) if reg_intron != '' else None\n self.mutated = ''\n self.initial = del_nuc\n else:\n # more than one nucleotide deleted\n init_pos = init_pos.strip('_') # remove '_' because of regex\n pos1 = int(init_pos) if init_pos != '?' else init_pos\n pos2 = int(reg_pos) if reg_pos != '?' else reg_pos\n self.pos = [pos1, pos2]\n intron_tmp1 = int(init_intron) if init_intron != '' else None\n intron_tmp2 = int(reg_intron) if reg_intron != '' else None\n self.intron_pos = [intron_tmp1, intron_tmp2]\n self.mutated = ''\n self.initial = del_nuc\n else:\n self.intron_pos = False\n elif self.is_insertion:\n ins_pattern = '(?:([0-9?]+)([-+]\\d+)?(?:_))?([0-9?]+)([-+]\\d+)?ins([A-Z?0-9]+)$'\n matches = re.findall(ins_pattern, hgvs_str)\n if matches:\n init_pos, init_intron, reg_pos, reg_intron, ins_nuc = matches[0]\n if not init_pos:\n # only one nucleotide inserted\n self.pos = int(reg_pos) if reg_pos != '?' else reg_pos\n self.intron_pos = int(reg_intron) if reg_intron != '' else None\n self.initial = ''\n self.mutated = ins_nuc\n else:\n # more than one nucleotide inserted\n init_pos = init_pos.strip('_') # remove '_' because of regex\n pos1 = int(init_pos) if init_pos != '?' else init_pos\n pos2 = int(reg_pos) if reg_pos != '?' else reg_pos\n self.pos = [pos1, pos2]\n intron_tmp1 = int(init_intron) if init_intron != '' else None\n intron_tmp2 = int(reg_intron) if reg_intron != '' else None\n self.intron_pos = [intron_tmp1, intron_tmp2]\n self.initial = ''\n self.mutated = ins_nuc\n else:\n self.intron_pos = None\n elif self.unknown_effect:\n # unknown effect for mutation. usually denoted as c.?\n self.intron_pos = None\n return\n else:\n # mutation did not fall into any of the categories. thus it likely\n # has invalid syntax\n self.is_valid = False\n self.intron_pos = None\n self.logger.debug('(Parsing-Problem) Invalid HGVS DNA syntax: ' + hgvs_str)\n return", "def parse_expression_into_parts(expression):\n raise NotImplementedError(\"complete me!\")", "def _parse(self, string):\n modern_scheme = r\"\"\"\nssh://\n(?:\n (?P<user>[^@]+)\n@)? # user is anything but @, then the @ separator\n(?P<host>[^:/]+) # host is anything but : and /\n(:(?P<port>\\d+))? # optional port\n(/(?P<remote_dir>.*))? # optional remote directory\n\"\"\"\n match = re.match(modern_scheme, string, re.VERBOSE)\n if match:\n self._handle_match(match)\n else:\n old_scheme = \"\"\"\n(?P<user>[^@]+) # user is anything but @, and optional\n@ # mandatory @ separator\n(?P<host>[^:/]+) # host is anything but : and /\n(\n (:|/)? # directory separator is either : or /\n (?P<remote_dir>.*))? # remote directory is optional\n \"\"\"\n match = re.match(old_scheme, string, re.VERBOSE)\n if match:\n self._handle_match(match)\n else:\n raise URLParseError(\"\"\" \\\nCould not parse %s as a valid url.\nSupported schemes are\n\n user@host:directory\n\n ssh://user@host:port/directory\n\"\"\" % self.as_string)", "def parse_tags(source):\n unmatched_count = 0\n start_pos = 0\n opened = False\n open_pos = 0\n cur_pos = 0\n\n finished = []\n segments = []\n\n for character in source:\n #scan for mismatched parenthesis:\n if character == '(':\n unmatched_count += 1\n if not opened:\n open_pos = cur_pos\n opened = True\n\n if character == ')':\n unmatched_count -= 1\n\n if opened and unmatched_count == 0:\n clean = source[start_pos:open_pos]\n clean = clean.strip()\n if clean:\n finished.extend(clean.split())\n\n segment = source[open_pos:cur_pos+1]\n #segments.append(segment)\n \n #get rid of bounding parentheses:\n pruned = segment[1:-1]\n group = pruned.split()\n finished.append(group)\n\n opened = False\n start_pos = cur_pos+1\n \n cur_pos += 1\n\n assert unmatched_count == 0\n\n if start_pos != cur_pos:\n #get anything that was left over here\n remainder = source[start_pos:cur_pos].strip()\n finished.extend(remainder.split())\n \n ## #now check on recursion:\n ## for item in segments:\n ## #get rid of bounding parentheses:\n ## pruned = item[1:-1]\n ## if recurse:\n ## results = parse_tags(pruned, recurse)\n ## finished.expand(results)\n ## else:\n ## finished.append(pruned.strip())\n \n return finished", "def parse(s):\n Term.str = s\n new_term = None\n while Term.str != '':\n new_term, Term.str = Term.parse_prefix(Term.str)\n return new_term", "def _parse(val: str):\n\n if not isinstance(val, str):\n raise TypeError(\"Method requires string input\")\n\n value = re.findall(r'^([-+]?\\d*\\.\\d*(?=\\s)|\\d+(?=\\s))', val)\n if not (value and val[:len(value[0])] == value[0]):\n return val, None\n\n # string starts with value\n value = value[0]\n val = val[len(value):]\n\n val = val.strip()\n if val:\n unit = val\n else:\n unit = 'dimensionless'\n\n return value, unit", "def _combineFragmentedString (cls, st : String) -> String:\n\n Logging.trace(\">>: %r\", st)\n\n ParseState_inLimbo = 0\n ParseState_inOther = 1\n ParseState_inString = 2\n ParseState_inLiteral = 3\n ParseState_inEscape = 4\n\n parseState = ParseState_inLimbo\n result = \"\"\n\n for ch in st:\n # process finite state automaton with three states based\n # on next character in string\n # Logging.trace(\"--: (%d) character: %r\", parseState, ch)\n\n if parseState == ParseState_inLimbo:\n if ch == cls._doubleQuoteCharacter:\n parseState = ParseState_inString\n elif not cls._whiteSpaceCharRegExp.search(ch):\n parseState = ParseState_inLiteral\n result += ch\n elif parseState == ParseState_inString:\n if ch == cls._doubleQuoteCharacter:\n parseState = ParseState_inLimbo\n else:\n result += ch\n parseState = iif(ch == cls._escapeCharacter,\n ParseState_inEscape, parseState)\n elif parseState == ParseState_inLiteral:\n result += ch\n if cls._whiteSpaceCharRegExp.search(ch):\n parseState = ParseState_inLimbo\n elif parseState == ParseState_inEscape:\n result += ch\n parseState = ParseState_inString\n else:\n Assertion.check(False,\n \"bad parse state - %s\" % parseState)\n\n Logging.trace(\"<<: %r\", result)\n return result", "def parse(self, string):\r\n # Tidy up our line\r\n string = self._check_line_is_good(string)\r\n \r\n # Break up into origin, token and body\r\n high_level_parts = string.split(None, 2)\r\n origin = parse_numeric(high_level_parts[0], self._maxclientnum)\r\n command = high_level_parts[1]\r\n if not command.isupper() and not command.isdigit():\r\n raise ProtocolError('Command not in uppercase', string)\r\n if len(high_level_parts) > 2:\r\n params = self._parse_params(high_level_parts[2])\r\n else:\r\n params = []\r\n \r\n # If this is an invalid command, pass it upwards\r\n try:\r\n self._pass_to_handler(origin, command, params)\r\n except ParseError, error:\r\n raise ParseError(error.value, string)", "def parse(self, inputstring: str, document: nodes.document) -> None:\n raise NotImplementedError(\"subclass must override this method\")", "def parse(self, script_str):\n lines = script_str.split('\\n')\n for line in lines:\n self.parse_line(line.strip())", "def _parse_start(self, start_str, year=None):\n # Allow for overriding year where not provided\n date_re = r\"\\w+\\s+\\d{1,2}\" if year else r\"\\w+\\s+\\d{1,2},\\s+\\d{4}\"\n date_match = re.search(date_re, start_str)\n if not date_match:\n return\n date_str = date_match.group().replace(\",\", \"\")\n if year:\n date_str += \" {}\".format(year)\n\n time_match = re.search(r\"\\d{1,2}:\\d{2}\\s+[APM\\.]{2,4}\", start_str)\n # Override for defaulting 2019 meetings to 8am, otherwise default to midnight\n if not year or year == \"2019\":\n time_str = \"8:00 AM\"\n else:\n time_str = \"12:00 AM\"\n if time_match:\n time_str = time_match.group().replace(\".\", \"\").strip()\n\n return datetime.strptime(\n \"{} {}\".format(date_str, time_str), \"%B %d %Y %I:%M %p\"\n )", "def parse_curie(s: str):\n if \":\" in s:\n cidx = s.index(\":\")\n if cidx == 0:\n raise RuntimeError(\"CURIE namespace element is empty\")\n ns = s[:cidx]\n value = s[cidx+1:]\n return (ns, value)\n else:\n raise RuntimeError(\"Could not parse CURIE: \" + s)", "def street_parser(*street_data):\n\n # parsing tuples\n if len(street_data) == 2:\n if not isinstance(street_data[0], str) and not isinstance(street_data[1], str):\n raise WrongInput(\"Invalid format\")\n # street name as the tuple's first item\n strname, strnumber = street_data\n # street number as the tuple's first item\n if street_data[0][0] in digits:\n strname, strnumber = strnumber, strname\n\n # parsing strings\n else:\n if not isinstance(street_data[0], str):\n raise WrongInput(\"Invalid format\")\n if not street_data[0]:\n raise WrongInput(\"Input cannot be blank\")\n\n # string starting with street number\n if street_data[0][0] in digits:\n street_pattern = re.compile(r'''\n ^ # beginning of string\n (\\d+) # street number is any number of digits\n \\W+ # separator\n (\\w+\\W*\\w*\\W*) # street name is one or more words with optional separators\n $ # end of string\n ''', re.VERBOSE)\n street_obj = street_pattern.search(street_data[0])\n strnumber, strname = street_obj.groups()\n\n # string starting with street name\n else:\n street_pattern = re.compile(r'''\n ^ # beginning of string\n (\\w+\\W*\\w*\\s*) # street name is one or more words with optional separators\n \\W+ # separator\n (\\d+) # street number is any number of digits\n $ # end of string\n ''', re.VERBOSE)\n street_obj = street_pattern.search(street_data[0])\n (strname, strnumber) = street_obj.groups()\n\n # replace specific words in street name with their abbreviates\n strname = strname.lower()\n special = {r'\\baleje\\b': 'Al.', r'\\bavenue\\b': 'Av.', r'\\broad\\b': 'Rd.', r'\\bsquare\\b': 'Sq.',\n r'\\bstreet\\b': 'St.', r'\\bdrive\\b': 'Dr.'}\n for key in special:\n strname = re.sub(key, special[key], strname)\n return strname.title(), strnumber", "def parse(text):\n # Make sure that there's text to be split\n if text == None:\n return text\n return text.split(',')", "def parse(self, input):\n pass", "def parse_spec(spec: str) -> Tuple[str, str]:\n\n # Single \"*\" is treated as wildcard for date, not channel.\n if spec == \"*\":\n return \"*\", \"*\"\n\n channel_rex = r\"\"\"\n (?P<channel>\n nightly | beta | stable | \\* | (?: \\d+\\.\\d+\\.\\d+ )\n )\n \"\"\"\n\n date_rex = r\"\"\"\n (?P<date>\n \\d\\d\\d\\d-\\d\\d-\\d\\d | latest | \\*\n )\n \"\"\"\n\n m = re.match(\n r\"{} (?: - {})? $\".format(channel_rex, date_rex), spec, re.VERBOSE\n )\n if m:\n channel = m.group(\"channel\")\n date = m.group(\"date\") or \"\"\n return date, channel\n\n m = re.match(r\"{} $\".format(date_rex), spec, re.VERBOSE)\n if m:\n date = m.group(\"date\")\n return date, \"*\"\n\n raise error.UsageError(\"invalid SPEC {}\".format(repr(spec)))", "def parse(s: str) -> StateFormula:\n tree = PCTL_PARSER.parse(s.replace(\" \", \"\"))\n return PCTLTransformer.transform(tree)", "def parse(string):\n posslash = string.find('/')\n if posslash < 0:\n return Rational(int(string), 1)\n else:\n strs = string.split('/')\n return Rational(int(strs[0].strip()), int(strs[1].strip()))", "def parse_component(component):\n # Volume of the component in m^3\n volume = (component.X * component.Y * component.Z).values[0]\n\n # How many slots the component takes up\n internal_slots = component['Internal Slots'].values[0]\n\n if not component['External Slots'].values == 0:\n external = True\n external_slots = component['External Slots'].values[0]\n else:\n external = False\n external_slots = 0\n\n min_temp = component['Min Temp'].values[0]\n max_temp = component['Max Temp'].values[0]\n\n mass = component['Mass'].values[0]\n max_voltage = component['Voltage'].values[0]\n nom_power = component['Nom Power'].values[0]\n max_power = component['Power (W)'].values[0] - nom_power # This returns the difference when activated\n discharge_time = component['Discharge Time (Wh)'].values[0]\n pixel_resolution = component['Resolution (m)'].values[0]\n wavelength_resolution = component['Resolution(nm)'].values[0]\n min_wavelength = component['Min Wavelength (nm)'].values[0]\n max_wavelength = component['Max Wavelength (nm)'].values[0]\n field_of_view = component['Field of View (deg)'].values[0]\n rx_min = component['Receiver Min (MHz)'].values[0]\n rx_max = component['Receiver Max'].values[0]\n tx_min = component['Transmitter Min'].values[0]\n tx_max = component['Transmitter Max'].values[0]\n duplex = component['Duplex'].values[0] + 1\n br_down = component['Bit Rate Down'].values[0]\n br_up = component['Bit Rate Up'].values[0]\n data = component['Data Storage (MB)'].values[0]\n code = component['Code Storage (MB)'].values[0]\n ram = component['RAM'].values[0]\n att_know = component['Attitude Know (deg)'].values[0]\n att_view = component['Attitude View'].values[0]\n att_mom = component['Attitude Control moment'].values[0]\n max_prop = component['Max Propulsion (mN)'].values[0]\n att_type = component['Attitude Type'].values[0]\n axis = component['Axis control'].values[0]\n ctrl_area = component['Control Area (m^2)'].values[0]\n disposal = component['Disposal time(km/day)'].values[0]\n int_comms = component['Internal Comms'].values[0]\n comm_conn = component['IntCommConn'].values[0]\n price = component['Price ($US)'].values[0]\n\n metric_sums = np.array([[mass, duplex, br_down, br_up, data, code, ram, att_view, att_mom, max_prop, axis,\n ctrl_area, disposal, price, pixel_resolution, wavelength_resolution, min_wavelength,\n max_wavelength]]).T.astype(np.float)\n metric_mins = np.array([[att_know]]).T.astype(np.float)\n metric_maxs = np.array([[]]).T.astype(np.float)\n\n summation_values = np.array([[volume, mass, internal_slots, external_slots, nom_power, discharge_time, duplex,\n br_down, br_up, data, code, ram, att_know, att_view, att_mom, max_prop, att_type,\n axis, ctrl_area, disposal, price]]).T\n min_max_values = np.array([[max_voltage, max_power, pixel_resolution, wavelength_resolution, min_temp, max_temp,\n min_wavelength, max_wavelength, field_of_view, rx_min, rx_max, tx_min, tx_max]]).T\n\n #Todo, figure out a way to deal with the comms issue. possibly a later problem\n\n # print(summation_values)\n\n # Todo create matrix from arrays then sum each feature on the correct axis\n # Todo This will create the correct feature set\n # Other features will be made from summation of available slots/connects vs used\n return metric_sums, metric_mins, metric_maxs, summation_values, min_max_values", "def parse_part(self):\n parts = []\n for part in re.split(r'\\*\\*\\* ([A-Z- ]+) \\*\\*\\*', self.hand_file): # return [ 'part1', 'splitter1', 'part2',..\n parts.append(part)\n\n for i in range(0, len(parts)):\n if i == 0:\n self.part_dict['HEADER'] = parts[i]\n if i % 2 != 0: # number is odd\n self.part_dict[parts[i]] = parts[i + 1]", "def parseString(self, s):\n\n t0 = time.time()\n lines = self.getline(s)\n lineno = 0\n for l in lines:\n lineno += 1\n logging.log(10, \"raw line %05d: %s\" % (lineno, l))\n if len(l) == 0 or l[0] == '#':\n continue\n \n if l.startswith('typedef'):\n lidx = self.parseTypedef(l, lines)\n lineno += lidx\n else:\n # Not a typedef -- see if the 1st token matches a known\n # structure name. If not, create a new variable.\n sidx = l.find(' ')\n if sidx > 0:\n name = l[0:sidx]\n struct = self.structs.get(name.upper(), None)\n if struct:\n struct.parseOne(l)\n else:\n v = YPFVar(l, debug=0)\n if v.name in self.vars:\n newValue = v.value\n oldValue = self.vars[v.name].value\n if newValue != oldValue:\n print(\"Variable %s is being defined with a new value, overwriting it. old=%s, new=%s\" \n % (v.name, oldValue, newValue))\n self.vars[v.name] = v", "def _parse_string(\n value_expr: str, target_expr: str, ref_parts: List[str],\n a_type: mapry.String, auto_id: mapry.py.generate.AutoID) -> str:\n uid = auto_id.next_identifier()\n\n return _PARSE_STRING_TPL.render(\n uid=uid,\n value_expr=value_expr,\n ref_parts=ref_parts,\n target_expr=target_expr,\n a_type=a_type).rstrip(\"\\n\")", "def readTsp(self, String0):\n Name = re.match(r\"NAME : (.*)\", String0)[1]\n COMMENT = re.search(r\"COMMENT : (.*)\", String0)[1]\n TYPE = re.search(r\"TYPE : (.*)\", String0)[1]\n DIMENSION = re.search(r\"DIMENSION : (.*)\", String0)[1]\n EDGE_WEIGHT_TYPE = re.search(r\"EDGE_WEIGHT_TYPE : (.*)\", String0)[1]\n NODE_COORD_SECTION = []\n split = String0.split(\"\\n\")\n for s0 in split:\n if (s0 and s0[0] <= '9' and s0[0] >= '0'):\n one = s0.split(\" \")\n One = []\n One.append(float(one[0]))\n One.append(float(one[1]))\n One.append(float(one[2]))\n if (One != []):\n NODE_COORD_SECTION.append(One)\n return Name, COMMENT, TYPE, DIMENSION, EDGE_WEIGHT_TYPE, NODE_COORD_SECTION", "def _parse_input(self):\n #temperature\n regex = re.compile(\"TEMP=(\\d+\\.\\d*|\\d+)\")\n r = regex.search(self.file_dic['input'])\n if r:\n self.temperature = r.groups()[0]\n else:\n self.temperature = 298.15\n #theory\n regex = re.compile('(\\$contrl.+\\$end|\\$basis.+ \\$end)')\n temp_theory = regex.findall(self.file_dic['input'])\n contrl = temp_theory[0][:-4][7:].strip()\n basis = temp_theory[1][:-4][6:].strip()\n self.theory = contrl + ' ' + basis", "def parse(spec: str):\n parts = spec.split(\":\", maxsplit=1)\n chromosome = parts[0]\n if len(parts) == 1 or not parts[1]:\n start, end = 0, None\n else:\n try:\n sep = \":\" if \":\" in parts[1] else \"-\"\n start_end = parts[1].split(sep, maxsplit=1)\n start = int(start_end[0]) - 1\n if len(start_end) == 1 or not start_end[1]:\n end = None\n else:\n end = int(start_end[1])\n if end <= start:\n raise InvalidRegion(\"end is before start in specified region\")\n except ValueError:\n raise InvalidRegion(\"Region must be specified as chrom[:start[-end]])\") from None\n return Region(chromosome, start, end)", "def parseTime(string):\t\n \n if string == \"\":\n result = None\n if 'T' in string:\n string = string.replace('T', ' ')\n if 'Z' in string:\n string = string.replace('Z', '') \n\n if len(string) < 19:\n # string has some single digits\n p = \"\"\"^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2}) \n ([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}).*$\"\"\"\n s = re.findall(p, string)\n if len(s) > 0:\n string = '{0}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}'\\\n .format(*[int(x) for x in s[0]])\n\n for date_format in DATE_FORMATS:\n try:\n result = datetime.datetime.strptime(string, date_format)\n except ValueError:\n pass\n\n return result", "def readString(self, st):\n if not isinstance(st, str):\n raise ValueError(\"String must be of type string, not %s\" % type(st))\n return etree.fromstring(st)", "def _ProcessSubstring(self, substring):\n if not substring:\n return\n stripped_substring = StripStartParens(substring)\n stripped_remaining = StripStartParens(self.remaining_string)\n if not stripped_remaining.startswith(stripped_substring):\n raise BadlySpecifiedTemplateError(\n 'string \"{}\" should be in string \"{}\"'\n .format(stripped_substring, stripped_remaining))\n self.remaining_string = self.remaining_string.split(\n stripped_substring, 1)[1]", "def parse(s: str):\n Term.str = s\n new_Formula = None\n while Term.str != '':\n new_Formula, Term.str = Formula.parse_prefix(Term.str)\n return new_Formula\n # Task 7.4.2", "def make_terms_from_string(s):\n u = s\n return u.split()", "def parse_tags(self, in_str):\n self.projects = []\n self.contexts = []\n for match in TAG_REGEX.finditer(in_str):\n tag_str = match.group(1)\n if tag_str[0] == \"+\":\n self.projects.append(tag_str[1:])\n elif tag_str[0] == \"@\":\n self.contexts.append(tag_str[1:])\n else:\n err_str = \"Unable to parse tag: %s\" % tag_str\n raise ValueError(err_str)\n words = re.sub(TAG_REGEX, \"\", in_str).split()\n return \" \".join(words)", "def unpack(struct, s, callback=None):\n\toutput = []\n\t\n\twhile len(struct) > 0:\n\t\tchar = struct[0]\n\t\tstruct = struct[1:]\n\n\t\tif char == ' ' or char == '!':\n\t\t\tcontinue\n\t\telif char == '{':\n\t\t\t# Find the closing brace\n\t\t\tsubstruct, struct = string.split(struct, '}', maxsplit=1)\n\t\t\tdata, s = unpack_list(\"L\", substruct, s, callback)\n\t\t\t\n\t\t\toutput.append(data)\n\t\telif char == '[':\n\t\t\t# Find the closing brace\n\t\t\tsubstruct, struct = smartsplit(struct, '[', ']')\n\t\t\tdata, s = unpack_list(\"I\", substruct, s, callback)\n\t\t\t\n\t\t\toutput.append(data)\n\t\telif char in 'Tt':\n\t\t\tdata, s = unpack_time(s, times[char])\n\t\t\t\n\t\t\toutput.append(data)\n\t\telif char == 'S':\n\t\t\tdata, s = unpack_string(s)\n\t\t\t\n\t\t\toutput.append(data)\n\t\telif char in string.digits:\n\t\t\t# Get all the numbers\n\t\t\tsubstruct = char\n\t\t\twhile struct[0] in string.digits:\n\t\t\t\tsubstruct += struct[0]\n\t\t\t\tstruct = struct[1:]\n\t\t\t# And the value the number applies to\n\t\t\tsubstruct += struct[0]\n\t\t\tstruct = struct[1:]\n\t\t\t\n\t\t\tsize = _calcsize(substruct)\n\t\t\tsize = _calcsize(substruct)\n\t\t\tif size > len(s):\n\t\t\t\traise TypeError(\"Not enough data for %s, needed %s bytes got %r (%s bytes)\" % (substruct[1:], size, s, len(s)))\n\n\t\t\tdata = _unpack(\"!\"+substruct, s[:size])\n\t\t\ts = s[size:]\n\n\t\t\toutput += data\n\t\telif char == 'x':\n\t\t\to, s = callback(s)\n\t\t\toutput += o\n\t\telse:\n\t\t\tif char in semi.keys():\n\t\t\t\tsubstruct = \"!\"+semi[char][1]\n\t\t\telse:\n\t\t\t\tsubstruct = \"!\"+char\n\n\t\t\tsize = _calcsize(substruct)\n\t\t\tif size > len(s):\n\t\t\t\traise TypeError(\"Not enough data for %s, needed %s bytes got %r (%s bytes)\" % (substruct[1:], size, s, len(s)))\n\n\t\t\ttry:\n\t\t\t\tdata = _unpack(substruct, s[:size])\n\t\t\texcept _error, e:\n\t\t\t\tprint \"Struct\", substruct, \"Args '%s'\" % (s[:size],)\n\t\t\t\traise\n\t\t\ts = s[size:]\n\n\t\t\tif char in semi.keys():\n\t\t\t\tif data[0] == 2**semi[char][0]-1:\n\t\t\t\t\tdata = (-1,)\n\t\t\toutput += data\n\n\treturn tuple(output), s", "def _parse_sub(self, parsetree, text, fpos=0):\r\n curr = 0\r\n for match in self._reSubstitution.finditer(text):\r\n start = match.start()\r\n if start > curr:\r\n parsetree.append((\"str\", self._reComment.sub('', text[curr:start])))\r\n\r\n if match.group(\"sub\") is not None:\r\n if not match.group(\"end\"):\r\n raise TemplateSyntaxError(\"Missing closing tag '%s' for '%s'.\" \r\n % (self._sub_end, match.group()), self._errpos(fpos+start))\r\n if len(match.group(\"sub\")) > 0:\r\n self._testexpr(match.group(\"sub\"), fpos+start)\r\n parsetree.append((\"sub\", match.group(\"sub\")))\r\n else:\r\n assert(match.group(\"escsub\") is not None)\r\n if not match.group(\"escend\"):\r\n raise TemplateSyntaxError(\"Missing closing tag '%s' for '%s'.\"\r\n % (self._subesc_end, match.group()), self._errpos(fpos+start))\r\n if len(match.group(\"escsub\")) > 0:\r\n self._testexpr(match.group(\"escsub\"), fpos+start)\r\n parsetree.append((\"esc\", self.escape, match.group(\"escsub\")))\r\n\r\n curr = match.end()\r\n\r\n if len(text) > curr:\r\n parsetree.append((\"str\", self._reComment.sub('', text[curr:])))", "def _parse_data(data: str) -> Tuple[str, str, str, int, int, int, str]:\n\n phg = None\n rng = None\n dfs = None\n course = None\n speed = None\n altitude = None\n comment = None\n\n if re.match(r'^PHG[0-9]{4}', data[:7]):\n # Packet has a PHG (power, antenna height/gain/directivity) value\n phg = data[3:7]\n logger.debug(\"PHG is {}\".format(phg))\n data = data[7:]\n\n elif re.match('^RNG[0-9]{4}', data[:7]):\n # Packet has an RNG (radio range) value\n rng = data[3:7]\n logger.debug(\"RNG is {}\".format(rng))\n data = data[7:]\n\n elif re.match('^DFS[0-9]{4}', data[:7]):\n # Packet has a DFS (DF signal strength, antenna height/gain/directivity) value\n dfs = data[3:7]\n logger.debug(\"DFS is {}\".format(dfs))\n data = data[7:]\n\n elif re.match('^[0-9]{3}/[0-9]{3}', data[:7]):\n # Packet has course and speed values\n course = int(data[:3])\n speed = int(data[4:7])\n logger.debug(\"Course is {}, speed is {}\".format(course, speed))\n data = data[7:]\n\n # TODO - parse BRG/NRQ\n\n # Check for comment\n if len(data) > 0:\n\n # Check for altitude\n # As per APRS 1.01 C6 P26, altitude as /A=nnnnnn may appear anywhere in the comment\n has_altitude = re.match('.*/A=([0-9]{6}).*', data)\n if has_altitude:\n # TODO - fix altitude format\n altitude = int(has_altitude.groups()[0])\n logger.debug(\"Altitude is {} ft\".format(altitude))\n\n # Strip out the altitude from the comment\n data = re.sub(r'/A=[0-9]{6}', \"\", data)\n\n # Set the comment as the remainder of the information field\n comment = data\n logger.debug(\"Comment is {}\".format(comment))\n\n return (phg, rng, dfs, course, speed, altitude, comment)", "def parse(string, format):\n # Count the number of spaces in the format string (N), and\n # truncate everything after the (N+1)th space\n spaces = format.count(' ') + 1\n string = ' '.join(string.split()[:spaces])\n\n try:\n result = dt.datetime.strptime(string, format)\n except ValueError, err:\n raise CannotParse(str(err))\n else:\n return result", "def parse(self, text, start=None):\n return self.parser.parse(text, start=start)", "def deserialize(self, str):\n try:\n end = 0\n start = end\n end += 4\n (self.numberOfTSPTurtles,) = _get_struct_i().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill", "def parse_issue_tag(issue_tag_content):\n # <issue> contents:\n # <issue>2</issue>\n # <issue>Suppl</issue>\n # <issue>3 Suppl 1</issue>\n # <issue>Suppl 1</issue>\n number, suppl_label, suppl = [None, None, None]\n if issue_tag_content:\n lower_issue = issue_tag_content.lower()\n if 'sup' in lower_issue:\n # number\n number = lower_issue[0:lower_issue.find('sup')].strip()\n if number == '':\n number = None\n\n # supplement label\n suppl_label = issue_tag_content[lower_issue.find('sup'):]\n if ' ' in suppl_label:\n suppl_label = suppl_label[0:suppl_label.find(' ')]\n\n # supplement\n suppl = issue_tag_content[issue_tag_content.find(suppl_label) + len(suppl_label):].strip()\n if suppl == '':\n suppl = None\n else:\n number = issue_tag_content\n\n return (number, suppl_label, suppl)", "def parse_gate(s):\n if \"(\" in s:\n gate = s[:s.find(\"(\")]\n params = (s[s.find(\"(\") + 1:s.find(\")\")]).split(',')\n else:\n gate = s\n params = []\n return gate, params", "def parse_string(txt):\n return str(txt[1:-1])", "def parse_uri(string):\n # Note that this function is currently pretty silly looking, but it will\n # grow as the package expands to provide more functionality.\n\n # The scheme and location are separated by the FIRST colon\n try:\n scheme, location = string.split(':', 1)\n except ValueError:\n # If no colon was found in the passed URI.\n raise URIParseError('Unable to find scheme and location in URI %s. '\n 'No : character present.' % string)\n\n return scheme, location", "def parse(data:str) -> object:\n\n return ast.parse(data)", "def parse_entry(self, entry_string):\n entry_type, entry_string = self.pop_entry_type(entry_string)\n cite_key, entry_string = self.pop_key(entry_string)\n field_dict = dict(self.extract_fields(entry_string))\n field_dict[\"type\"] = entry_type\n self.force_field.citations[cite_key] = field_dict", "def parseTypeString(inTypeString):\n curType = Type()\n curStack = []\n for c in inTypeString:\n if c == '<':\n curStack.append(curType)\n curType = Type()\n curStack[-1].templateParams.append(curType)\n elif c == '>':\n curType = curStack.pop()\n elif c == ',':\n curType = Type()\n curStack[-1].templateParams.append(curType)\n else:\n curType.name += c\n curType.trimNames()\n return curType", "def _parse(url):\n url = url.strip()\n parsed = urlparse(url)\n return _parsed_url_args(parsed)", "def _parse_start(self, item_text, time_str, year_str):\n date_str = re.sub(\n r\"\\s+\", \" \", re.search(r\"[a-zA-Z]{3,10}\\s+\\d{1,2}\", item_text).group()\n )\n return datetime.strptime(date_str + year_str + time_str, \"%B %d%Y%I:%M %p\")", "def parse (self, phrase):\r\n\r\n if isinstance(phrase,str):\r\n #If the phrase is a string\r\n if self.is_simple(phrase):\r\n #EXITS the recursion\r\n if phrase[0:2] == '~~':\r\n return phrase[2:]\r\n #Eliminates negations that cancel each other\r\n return phrase\r\n elif self.bracketed(phrase):\r\n #Eliminate top-level parantheses\r\n return self.parse(phrase[1:-1])\r\n elif phrase[0] == '~':\r\n #If the phrase begins with a negating prefix...\r\n negations,phrase = self.heading_count(phrase)\r\n \r\n if self.bracketed(phrase):\r\n #If the negated phrase is bracketed\r\n if negations % 2 == 1:\r\n subphrase = self.split_into_phrases(phrase[1:-1])\r\n if subphrase[0] != '@': \r\n #De Morgan's Law \r\n return self.parse(['@']+['~'+x for x in subphrase])\r\n else:\r\n #De Morgan's Law\r\n return self.parse(['~'+x for x in subphrase[1:]])\r\n else:\r\n return self.parse(phrase[1:-1])\r\n return self.parse(self.split_into_phrases((negations%2)*'~'+phrase))\r\n \r\n else:\r\n return self.parse(self.split_into_phrases(phrase))\r\n # IF the phrase is a list\r\n if self.all_is_P(phrase,predicate_function=self.is_simple):\r\n #If every terms of the phrase list is simple...\r\n #This prepares for EXIT from recursion\r\n return [self.parse(x) for x in phrase]\r\n return self.parse([self.parse(x) for x in phrase])", "def parse(self, string):\n parse = re.match(\"^((?:[0-9]{1,3}\\.){3}[0-9]{1,3})\\s\\(((?:\\d)*\\.(?:\\d)*|(?:\\d)*)\\sms\\)$\", string)\n parse_result = parse.groups()\n return parse_result[0], parse_result[1]", "def parse_puzzle(puzzle):\n puzzle = re.sub(\"\\sGrid \\d{2}\",\"\", sample)\n puzzle = puzzle.strip().split(\"\\n\") \n return puzzle", "def parse(s):\n # Use _PARSE_RE to check that it's valid.\n if not CFGProduction._PARSE_RE.match(s):\n raise ValueError, 'Bad production string'\n # Use _SPLIT_RE to process it.\n pieces = CFGProduction._SPLIT_RE.split(s)\n pieces = [p for i,p in enumerate(pieces) if i%2==1]\n lhside = Nonterminal(pieces[0])\n rhsides = [[]]\n for piece in pieces[2:]:\n if piece == '|':\n rhsides.append([]) # Vertical bar\n elif piece[0] in ('\"', \"'\"):\n rhsides[-1].append(piece[1:-1]) # Terminal\n else:\n rhsides[-1].append(Nonterminal(piece)) # Nonterminal\n return [CFGProduction(lhside, rhside) for rhside in rhsides]", "def process_time_string(timestr):\n timestr = timestr.strip()\n toks = timestr.split('+')\n timeslices = []\n for t in toks:\n tm = t.strip()\n mobj = re.search('\\\\*', tm)\n if mobj == None:\n timeslices += [int(tm)]\n else:\n tms = tm.split('*')\n timeslices += int(tms[0]) * [int(tms[1])]\n\n return timeslices", "def from_string(cls, compound_term_string):\n compound_term_string = compound_term_string.replace(\" \", \"\")\n subterms, connector, intervals = cls.parse_toplevel_subterms_and_connector(compound_term_string)\n return cls(subterms, connector,intervals=intervals)", "def parse_string(self, data):\r\n return self._parse(antlr3.ANTLRStringStream(data))", "def parse(string):\n doc = nlp(string)\n return [str(n) for n in doc.noun_chunks]", "def _parse_full_position(cls, full_position_string):\n try:\n before,after = [cls._parse_single_position(s) for s in full_position_string.split('-')]\n except (ValueError,AttributeError):\n raise ValueError(\"The full_position argument must be a string of the form '100-200', '?-200' or '100-?'!\"\n \"Got '%s'\"%(full_position_string,))\n if before is None and after is None:\n raise ValueError(\"At least one section of the full_position argument must be a number!\")\n return before,after" ]
[ "0.60741115", "0.60195297", "0.59897935", "0.5646392", "0.5526063", "0.5425705", "0.5401909", "0.5301537", "0.52829874", "0.52163225", "0.5190413", "0.5171466", "0.5171442", "0.5124075", "0.50967103", "0.5088532", "0.5067676", "0.50537384", "0.50519437", "0.50450486", "0.5031276", "0.50070274", "0.49879488", "0.49858525", "0.49813846", "0.4980069", "0.49694574", "0.49532706", "0.49397704", "0.49368837", "0.49124646", "0.489587", "0.48955312", "0.48929492", "0.486996", "0.48643836", "0.48607492", "0.48496786", "0.4845406", "0.48390624", "0.48320425", "0.48242757", "0.48106498", "0.48054022", "0.4800808", "0.47947198", "0.4793772", "0.47909802", "0.4774959", "0.47457877", "0.47282138", "0.47192076", "0.4714959", "0.47100204", "0.47093612", "0.47050148", "0.46991107", "0.4695845", "0.4693939", "0.46878317", "0.46839485", "0.46821687", "0.4678298", "0.46678275", "0.46663097", "0.46627393", "0.46545696", "0.46466205", "0.46458262", "0.4645164", "0.4643665", "0.464191", "0.46308285", "0.46294996", "0.46286133", "0.46268147", "0.46233115", "0.462022", "0.46167824", "0.46111485", "0.4609756", "0.46093875", "0.45982373", "0.4592734", "0.45823798", "0.45819616", "0.45813605", "0.45802402", "0.4577274", "0.45748165", "0.4566535", "0.45644057", "0.4556306", "0.45537892", "0.45536715", "0.45533758", "0.45513427", "0.45475057", "0.4544928", "0.45440897" ]
0.55399895
4
UCB1 algorithm ucb1 = winrate + sqrt(2lnn / ni)
def ucbScore(self,totalPlayedTimes): winRate = self.winRate() #print totalPlayedTimes #print self.playedTimes confidenceInterval = math.sqrt(2 * math.log(totalPlayedTimes,math.e) / self.playedTimes) return winRate + confidenceInterval
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def algo_UCB(mu, Na):\n i = 0\n while i < Na.size:\n if Na[i] < 1:\n return i\n else:\n i+= 1\n t = Na.sum()\n return np.argmax(mu + np.sqrt(2*np.log(t)/Na))", "def _ucb(self, s, k):\n if self.confidence_method == 'ucb-standard':\n ucb_factor = self._ucb_standard_factor(s, k)\n return self.mu[k][s] + ucb_factor\n elif self.confidence_method == 'ucb-standard-relaxed':\n ucb_factor = self._ucb_standard_factor(s, k) - self.ucb_eps\n return self.mu[k][s] + ucb_factor\n elif self.confidence_method == 'clopper-pearson-ucb':\n return self._bernoulli_upper(self.mu[k][s], self.count[k][s],\n self.delta(k, s))\n elif self.confidence_method == 'clopper-pearson-ucb-relaxed':\n return self._bernoulli_upper(self.mu[k][s], self.count[k][s],\n self.delta(k, s)) - self.ucb_eps\n else:\n raise ValueError('Did not recognise confidence method {}'.format(\n self.confidence_method))", "def UCB(options, steps, iteration):\n\n assert (0 not in steps), 'UCB algorithm requires an initial single pull of each bandit'\n weights = [(2 * np.log(iteration) / n)**0.5 for n in steps]\n adjusted_options = [(x + y) for x, y in zip(options, weights)]\n return np.argmax(adjusted_options)", "def method1(self):\n cres=0. # Variable for storing Chern number.\n # The U matrices from Fukui's method; storage...\n Ux=np.zeros((self.kS.Nx+1,self.kS.Ny+1),dtype=complex)\n Uy=np.zeros((self.kS.Nx+1,self.kS.Ny+1),dtype=complex)\n \n # ... and calculation of U matrices\n for ix in range(self.kS.Nx+1):\n for iy in range(self.kS.Ny+1):\n mat1=self.alleigvecs[:,:,ix ,iy ]\n if ix<self.kS.Nx:\n mat2=self.alleigvecs[:,:,ix+1,iy ]\n else:\n mat2=self.alleigvecs[:,:,1 ,iy ]\n if iy<self.kS.Ny:\n mat3=self.alleigvecs[:,:,ix ,iy+1]\n else:\n mat3=self.alleigvecs[:,:,ix ,1 ]\n Ux[ix,iy]=np.linalg.det(np.dot(np.conj(mat1.T),mat2)[:self.NL,:self.NL])\n Uy[ix,iy]=np.linalg.det(np.dot(np.conj(mat1.T),mat3)[:self.NL,:self.NL])\n \n # Local estimates of Berry curvature; storage ...\n ftempall=np.zeros((self.kS.Nx,self.kS.Ny),complex)\n # ... and calculation\n for ix in range(self.kS.Nx):\n for iy in range(self.kS.Ny):\n ftemp=np.log(Ux[ix,iy]*Uy[ix+1,iy]/Ux[ix,iy+1]/Uy[ix,iy])\n ftempall[ix,iy]=ftemp # ... of local Berry curvature ...\n cres+=ftemp/2./pi/1j # ... and of Berry phase (Chern number).\n\n return cres.real, ftempall", "def runUCB(self):\n \n #Init vars, N number of user sessions, d=number of ads\n N = self.myDS.shape[0] \n d = self.myDS.shape[1] \n total_reward=0\n self.opt_selected=[]\n \n #Declare vars to count to calculate upper bounds\n numbers_of_selections = [0] * d\n sums_of_rewards = [0] * d\n \n #Calcultate confidance bounds\n for n in range(0,N):\n ad=0\n max_upper_bound=0\n for i in range (0,d):\n if (numbers_of_selections[i]>0):\n average_reward=sums_of_rewards[i]/numbers_of_selections[i]\n delta_i=math.sqrt(3/2 * math.log(n+1) / numbers_of_selections[i])\n upper_bound=average_reward+delta_i\n else:\n upper_bound=1e400\n if upper_bound>max_upper_bound:\n max_upper_bound=upper_bound\n ad = i\n self.opt_selected.append(ad)\n numbers_of_selections[ad]=numbers_of_selections[ad]+1\n reward=self.myDS.values[n,ad]\n sums_of_rewards[ad]=sums_of_rewards[ad]+reward\n total_reward=total_reward+reward\n \n return total_reward", "def _ucbpe_2ucb(x):\n mu, sigma = gp.eval(x, uncert_form='std')\n return mu + 2 * beta_th * sigma", "def _ucbpe_2ucb(x):\n mu, sigma = gp.eval(x, uncert_form='std')\n return mu + 2 * beta_th * sigma", "def b_n(n):\n if n <= 0.36: # MCH03\n ei = np.array([0, 1, 2, 3, 4])\n ai = np.array([0.01945, -0.8902, 10.95, -19.67, 13.43])\n else: # CB99\n ei = np.array([1, 0, -1, -2])\n ai = np.array([2, -1./3, 4./405, 46./25515])\n return np.sum(ai * np.power(float(n), ei))", "def ccx_otest(n):\n b = [bina(n + 1)[0]]\n # b = bina(n+1)\n for bi in b:\n print(bi)\n q = QuantumRegister(n + 1, 'q')\n circ = QuantumCircuit(q)\n for i in bi:\n circ.x(q[i])\n cnx_o(circ, q, [q[i] for i in range(n - 2, -1, -1)], q[n], q[n - 1])\n # circ.mct([q[i] for i in range(n-2,-1,-1)], q[n], q[n-1])\n circ.barrier(q)\n launch2(circ)\n\n circ_m = measure(circ, q, [i for i in range(n + 1)])\n return circ_m", "def ComputeNrb(self):\r\n pass", "def question_18():\n rbf = RadialBiasFunction()\n wins = 0.0\n for i in range(100):\n rbf.fit(1.5, 9)\n rbf_error = rbf.error()\n if rbf_error == 0:\n wins += 1\n rbf.resample()\n return wins / 100", "def chi2(u,v):\n\n u[u==0] = 1e-6\n v[v==0] = 1e-6\n r = np.sum(((u-v)**2).astype(np.float)/(u+v))\n\n # m = (u != 0) & (v != 0)\n # r = np.sum(((u[m]-v[m])**2).astype(np.float)/(u[m]+v[m]))\n\n # r = np.nansum(((u-v)**2).astype(np.float)/(u+v))\n return r", "def part1b_1():\n xs = exampleInput\n backward = submission.computeBackward(simpleCRF, xs)\n for i in xrange(len(xs)):\n grader.requireIsEqual( 1.0, sum( backward[i].values() ) )", "def UCB1(self, T, naive=False):\n # Initialization\n number_draws, rewards = self.initialize()\n rew = []\n draw = []\n\n for t in range(T):\n print(\"len = {}\".format(t))\n if naive:\n opt_func = rewards / number_draws\n else:\n opt_func = rewards / number_draws + np.sqrt(np.log(t + 1) / (2. * number_draws))\n print(\"optimization function from which we get the argmax: {}\".format(opt_func))\n\n # Get the argmax from the optimization function\n next_action = np.argmax(opt_func)\n print(\"Next Arm to draw: {}\".format(next_action + 1))\n\n next_arm = self.MAB[next_action]\n r = next_arm.sample()\n print(\"Reward of the next arm drawn: {}\".format(r))\n\n # Updating the N(t) and S(t)\n number_draws[next_action] += 1\n print(\"N vector updated: {}\".format(number_draws))\n\n rewards[next_action] += r\n print(\"S vector updated: {}\".format(rewards))\n\n # Lists of rewards and actions(arms drawn)\n draw.append(next_action)\n rew.append(r)\n\n return rew, draw", "def _U_table(ci, hi, co, ho):\n # TODO: Base U on Table 18.5, Warren D. Seider et. al. Product and Process Design Principles. (2016)\n cip, hip, cop, hop = ci.phase, hi.phase, co.phase, ho.phase\n phases = cip + hip + cop + hop\n if 'g' in phases:\n if ('g' in hip and 'l' in hop) and ('l' in cip and 'g' in cop):\n return 1.0\n else:\n return 0.5\n else:\n return 0.5", "def king2(r, n0, rc0, b0, n1, rc1, b1):\n return n0 * (1. + (r/rc0)**2)**b0 + n1 * (1. + (r/rc1)**2)**b1", "def calculate_bleu(output_lns, refs_lns):\n return round(corpus_bleu(output_lns, [refs_lns]).score, 4)", "def test_chao1_bias_corrected(self):\n obs = chao1_bias_corrected(*osd(self.TestData))\n self.assertEqual(obs, 9.75)", "def ohms(self):\n # Rwb = Rwiper + Rtotal * (counts / 256)\n # Rwa = Rwiper + Rtotal * ((256 - counts) / 256)\n g = 0\n rtotal=0.0\n reach=[]\n for chan in self.get_channel_list(self.nchans):\n self.rwa[chan] = float( 256 - self.vals[chan] ) / 256.0\n self.rwb[chan] = float( self.vals[chan] ) / 256.0\n self.rwa[chan] *= self.Rtotal\n self.rwb[chan] *= self.Rtotal \n self.rwa[chan] += self.Rwiper\n self.rwb[chan] += self.Rwiper", "def wci(B,mu):\n return eV2J*B/mp/mu", "def ucb1(self, game, parent_game):\n assert game != parent_game\n wins = self._wins[game]\n plays = self._plays[game]\n parent_plays = self._plays[parent_game]\n assert parent_plays >= plays\n assert plays >= wins\n if not plays:\n return 1e10\n exploit = wins / plays\n explore = math.sqrt(2. * math.log(parent_plays) / plays)\n return exploit + explore", "def part1b_0():\n xs = exampleInput\n _, forward = submission.computeForward(simpleCRF, xs)\n for i in xrange(len(xs)):\n grader.requireIsEqual( 1.0, sum( forward[i].values() ) )", "def powAlpha( n ):\n return (1-betaval)*Fib(n) + Fib(n-1)\n #return Fib(n+1) - Fib(n) * betaval", "def get_crossover_accept_rate(n: int):\n return 1 / (2 ** n)", "def bukinn6fcn(x: np.ndarray) -> np.ndarray:\n\n n = x.shape[1]\n assert n == 2, \"The Bukin N. 6 functions is only defined on a 2D space.\"\n\n X = x[:, 0]\n X2 = X**2\n Y = x[:, 1]\n\n scores = 100 * np.sqrt(np.abs(Y - 0.01 * X2)) + 0.01 * np.abs(X + 10)\n return scores", "def mscb(t):\n\treturn int(np.log2(t ^ (t + 1)))", "def kernal_mus(n_kernels):\n l_mu = [1]\n if n_kernels == 1:\n return l_mu\n\n bin_size = 2.0 / (n_kernels - 1) # score range from [-1, 1]\n l_mu.append(1 - bin_size / 2) # mu: middle of the bin\n for i in range(1, n_kernels - 1):\n l_mu.append(l_mu[i] - bin_size)\n print(l_mu)\n return l_mu", "def generate_pn2kc_weights(nb_pn, nb_kc, min_pn=10, max_pn=20, aff_pn2kc=None, nb_trials=100000, baseline=25000,\r\n rnd=np.random.RandomState(2018), dtype=np.float32):\r\n\r\n dispersion = np.zeros(nb_trials)\r\n best_pn2kc = None\r\n\r\n for trial in range(nb_trials):\r\n pn2kc = np.zeros((nb_pn, nb_kc), dtype=dtype)\r\n\r\n if aff_pn2kc is None or aff_pn2kc <= 0:\r\n vaff_pn2kc = rnd.randint(min_pn, max_pn + 1, size=nb_pn)\r\n else:\r\n vaff_pn2kc = np.ones(nb_pn) * aff_pn2kc\r\n\r\n # go through every kenyon cell and select a nb_pn PNs to make them afferent\r\n for i in range(nb_pn):\r\n pn_selector = rnd.permutation(nb_kc)\r\n pn2kc[i, pn_selector[:vaff_pn2kc[i]]] = 1\r\n\r\n # This selections mechanism can be used to restrict the distribution of random connections\r\n # compute the sum of the elements in each row giving the number of KCs each PN projects to.\r\n pn2kc_sum = pn2kc.sum(axis=0)\r\n dispersion[trial] = pn2kc_sum.max() - pn2kc_sum.min()\r\n # pn_mean = pn2kc_sum.mean()\r\n\r\n # Check if the number of projections per PN is balanced (min max less than baseline)\r\n # if the dispersion is below the baseline accept the sample\r\n if dispersion[trial] <= baseline: return pn2kc\r\n\r\n # cache the pn2kc with the least dispersion\r\n if best_pn2kc is None or dispersion[trial] < dispersion[:trial].min():\r\n best_pn2kc = pn2kc\r\n\r\n # if non of the samples have dispersion lower than the baseline,\r\n # return the less dispersed one\r\n return best_pn2kc", "def part_2():\n input_ = parse_input() + list(range(10, 1_000_001))\n cups = turn_input_into_cups(input_)\n cups = solve(cups, first_cup=cups[input_[0]], turns=10_000_000)\n\n return cups[1].next.number * cups[1].next.next.number", "def _compute_bn(self, lvl):\n bn = [0] # number of samples crossing the left/right boundary\n for n in range(lvl):\n # 1. down-sampling of N samples by the factor scl gives (N-1)//scl + 1 samples\n # 2. bn[-1]+M-1 is the number of samples acrossing the left/right boundary, with M being the number of freqeuncies\n # => hence after the downsampling the number of boundary crossing samples is:\n bn.append((bn[-1]+self.nfreq-2)//self.scaling+1)\n bn.append(bn[-1]) # repeat the value of the coarsest scale for the approximation coefficient\n return bn[1:][::-1]", "def nac_w_optimal_r(fan_in, fan_out):\n fan = max(fan_in + fan_out, 5)\n r = scipy.optimize.bisect(lambda r: fan * nac_w_variance(r) - 2, 0, 10)\n return r", "def powBeta( n ):\n return (1-alphaval)*Fib(n) + Fib(n-1)\n #return Fib(n+1) - Fib(n) * alphaval", "def u_crit(state, sys):\n s = state[0]\n i = state[1]\n tau = scipy.interpolate.interp1d(sys.tau.s, sys.tau.i, kind = \"cubic\")\n phi = scipy.interpolate.interp1d(sys.phi.s, sys.phi.i, kind = \"cubic\")\n cc = scipy.interpolate.interp1d(sys.commutation_curve[0],\n sys.commutation_curve[1],\n kind = \"cubic\")\n if i > sys.imax:\n return sys.umax\n if s <= sys.commutation_curve[0][-1]:\n #print(\"Case 1\")\n if s < sys.sbar or i < tau(s):\n return 0\n return sys.umax\n elif s > sys.commutation_curve[0][-1] and s < sys.commutation_curve[0][0]:\n #print(\"Case 2\")\n if ((i > tau(s)) and (i < cc(s))) or (i > sys.imax):\n return sys.umax\n elif i > cc(s) and i < sys.imax:\n return 0\n else:\n return 0\n else:\n #print(\"Case 3\")\n if i > sys.imax:\n return sys.umax\n elif s > sys.sstar and i > phi(s):\n return sys.umax\n return 0", "def calculate_br_up_metric(br_up):\n if br_up < 1:\n br_up = 1\n min_baud = 1200\n max_baud = 38400\n\n num = np.log(br_up) - np.log(min_baud)\n den = np.log(max_baud) - np.log(min_baud)\n\n return (num / den + 0.1).clip(min=0, max=1)", "def part1b_2():\n xs = exampleInput\n z = 5.881\n forward = [\n Counter({'-FEAT-': 0.622, '-SIZE-': 0.377}), \n Counter({'-SIZE-': 0.761, '-FEAT-': 0.238}), \n Counter({'-SIZE-': 0.741, '-FEAT-': 0.258})]\n \n z_, forward_ = submission.computeForward(simpleCRF, xs)\n for vec, vec_ in zip( forward, forward_):\n grader.requireIsTrue( Counters.approximateEquals( vec, vec_ ) )\n grader.requireIsEqual( z, z_, 1e-2)", "def fn(i, s0, s1, c0, c1):\n if s0 > n or s1 > n: return 0 # impossible \n if i == len(balls): return int(c0 == c1)\n ans = 0 \n for x in range(balls[i]+1): \n ans += fn(i+1, s0+x, s1+balls[i]-x, c0+(x > 0), c1+(x < balls[i])) * comb(balls[i], x)\n return ans", "def calculate_bleu(candidate, reference):\n pn_sum = 0\n for n in range(1, 4):\n cand_grama, ref_grama = n_grama(candidate, n), n_grama(reference, n)\n p = calculate_p(cand_grama, ref_grama)\n if p != 0:\n # If there are no matches between n_grams,\n # don't add nothing\n pn_sum += (0.33 * math.log(p))\n bp = calculate_bp_penality(candidate, reference)\n return bp * math.exp(pn_sum)", "def binomial_coefficient3(n, k):\n return reduce(lambda a, b: a * (n - b) / (b + 1), xrange(k), 1)", "def plus_state(n_qubits):\n return np.array([1]*(2**n_qubits))/np.sqrt(2**n_qubits)", "def bernul(n, k, p):\n return comb(n, k) * p ** k * (1 - p) ** (n-k)", "def nCWRk(n, r):\n val = 1\n for i in range(1, r+1):\n val *= n + r - i\n val //= i\n return val", "def get_roi_onec0_posl1_ub_cost(fm_h, fm_w, fm_c0, dtype, pooled_h, pooled_w):\n return get_roi_onec0_posl1_ub_rois_cost(pooled_h, pooled_w) + \\\n get_roi_onec0_posl1_ub_fm_cost(fm_h, fm_w, fm_c0, dtype,\n pooled_h, pooled_w)", "def _iou(self, bb_test,bb_gt):\n xx1 = np.maximum(bb_test[0], bb_gt[0])\n yy1 = np.maximum(bb_test[1], bb_gt[1])\n xx2 = np.minimum(bb_test[2], bb_gt[2])\n yy2 = np.minimum(bb_test[3], bb_gt[3])\n w = np.maximum(0., xx2 - xx1)\n h = np.maximum(0., yy2 - yy1)\n wh = w * h\n o = wh / ((bb_test[2]-bb_test[0])*(bb_test[3]-bb_test[1])\n + (bb_gt[2]-bb_gt[0])*(bb_gt[3]-bb_gt[1]) - wh)\n return(o)", "def calc_gain(s, i):\n return math.sqrt((i + s) / (6 * s))", "def _ucb_acq(x):\n mu, sigma = gp.eval(x, uncert_form='std')\n return mu + beta_th * sigma", "def _ucb_acq(x):\n mu, sigma = gp.eval(x, uncert_form='std')\n return mu + beta_th * sigma", "def NUT1(self,NUT1b,n=2.0):\n if self.tipo == 'contra':\n return n*NUT1b\n if self.tipo == 'paralelo':\n return n*NUT1b\n if self.tipo == 'misto':\n return n*NUT1b", "def _ucb_halluc_acq(x):\n mu, sigma = gp.eval_with_hallucinated_observations(x, halluc_pts, uncert_form='std')\n return mu + beta_th * sigma", "def _ucb_halluc_acq(x):\n mu, sigma = gp.eval_with_hallucinated_observations(x, halluc_pts, uncert_form='std')\n return mu + beta_th * sigma", "def conv_1x1_bn(self, inp, oup):\n return nn.Sequential(\n nn.Conv2d(inp, oup, 1, 1, 0, bias=False),\n self.get_bn_module(oup),\n nn.ReLU6(inplace=True)\n )", "def getK1(inp):\n\td0 = getD0(inp)\n\treturn 0.32745 + 1/(2 * d0) - 8/(81 * d0)", "def wabbits(n, k):\n a, b = 1, 1\n for i in range(3, n+1):\n a, b = b, a*k + b\n return b", "def fRCrim(Swe,Vc1,Vc2,Vc3,Vk,PHIe,Rc1,Rc2,Rc3,Rk,Rw,Rh,Cwv,Ckv,Alpha,Tout):\n#\n# 1. Compute and normalise volumetric components:\n#\t-----------------------------------------------\n\tVw=PHIe*Swe\n\tVh=PHIe*(1-Swe)\n\tVwe=(Vw-Cwv)/(1-Cwv)\n\tVwe=ImposeLimits(Vwe,0,1)\n\tVke=(Vk-Ckv)/(1-Ckv)\n\tVke=ImposeLimits(Vke,0,1)\n\tSum=abs(Vc1)+abs(Vc2)+abs(Vc3)+abs(Vke)+abs(Vwe)+abs(Vh)\n\tVc1=abs(Vc1)/Sum\n\tVc2=abs(Vc2)/Sum\n\tVc3=abs(Vc3)/Sum\n\tVk=abs(Vk)/Sum\n\tVw=abs(Vw)/Sum\n\tVh=abs(Vh)/Sum\n#\n#\t2. Determine conductivity of components:\n#\t----------------------------------------\n\tSigc1=1/Rc1\n\tSigc2=1/Rc2\n\tSigc3=1/Rc3\n\tSigk=1/Rk\n\tSigw=1/Rw\n\tSigh=1/Rh\n#\n#\t3. Compute Conductivity:\n#\t========================\n\tTrm1=Vc1*(Sigc1**(1/Alpha))\n\tTrm2=Vc2*(Sigc2**(1/Alpha))\n\tTrm3=Vc3*(Sigc3**(1/Alpha))\n\tTrm4=(Vk**2.2)*(Sigk**(1/Alpha)) # Factor of 2.2 included to get data to fit to Yang et al\n\tTrm5=Vw*(Sigw**(1/Alpha))\n\tTrm6=Vh*(Sigh**(1/Alpha))\n\tCrf=(Trm1+Trm2+Trm3+Trm4+Trm5+Trm6)**Alpha\n#\n#\n# 4. Output result:\n#\t-----------------\n\tif(Tout==0):\n\t\tFr=Crf\n\telse:\n\t\tFr=1/Crf\n\treturn Fr", "def _get_ucb_beta_th(dim, time_step):\n return np.sqrt(5 * dim * np.log(2 * dim * time_step + 1))", "def _ucbpe_lcb(x):\n mu, sigma = gp.eval(x, uncert_form='std')\n return mu - beta_th * sigma", "def _ucbpe_lcb(x):\n mu, sigma = gp.eval(x, uncert_form='std')\n return mu - beta_th * sigma", "def binomial_coefficient2(n, k):\n if 0 <= k <= n:\n p = 1\n for t in xrange(min(k, n - k)):\n p = (p * (n - t)) // (t + 1)\n return p\n else:\n return 0", "def ccxtest(n):\n b = bina(n)\n for bi in b:\n print(bi)\n q = QuantumRegister(n, 'q')\n circ = QuantumCircuit(q)\n for i in bi:\n circ.x(q[i])\n cnx(circ, q, [q[i] for i in range(n - 2, -1, -1)], q[n - 1])\n circ.barrier(q)\n launch2(circ)\n\n circ_m = measure(circ, q, [i for i in range(n)])\n return circ_m", "def clebschSU2(idx1, idx2, idx3):\n j1, m1 = idx1\n j2, m2 = idx2\n j3, m3 = idx3\n\n if m3 != m1 + m2:\n return 0\n vmin = int(np.max([-j1 + j2 + m3, -j1 + m1, 0]))\n vmax = int(np.min([j2 + j3 + m1, j3 - j1 + j2, j3 + m3]))\n\n C = np.sqrt(\n (2.0 * j3 + 1.0)\n * factorial(j3 + j1 - j2)\n * factorial(j3 - j1 + j2)\n * factorial(j1 + j2 - j3)\n * factorial(j3 + m3)\n * factorial(j3 - m3)\n / (\n factorial(j1 + j2 + j3 + 1)\n * factorial(j1 - m1)\n * factorial(j1 + m1)\n * factorial(j2 - m2)\n * factorial(j2 + m2)\n )\n )\n S = 0\n for v in range(vmin, vmax + 1):\n S += (\n (-1.0) ** (v + j2 + m2)\n / factorial(v)\n * factorial(j2 + j3 + m1 - v)\n * factorial(j1 - m1 + v)\n / factorial(j3 - j1 + j2 - v)\n / factorial(j3 + m3 - v)\n / factorial(v + j1 - j2 - m3)\n )\n C = C * S\n return C", "def Z(n):\n count5 = 0\n i = 1\n while 1:\n a = pow(5, i)\n if a > n:\n return count5\n else:\n count5 += n/a\n i += 1", "def luc(n):\r\n if n==1:\r\n return 2\r\n elif n==2:\r\n return 1\r\n else:\r\n return luc(n-1)+luc(n-2)", "def csrbf(r):\n return num.power((num.maximum(0, 1-r)), 3)*(3*r+1)", "def _get_cu(self):\n c_undrained=0\n #group_index = self._data['GI']\n if self.is_clayey():\n c_undrained = self.qu(self._data[SoilProperty.N60])/2\n #c_undrained=_clamp(c_undrained, 10, 103)\n # Plasix calculation needs very small c_undrained\n #if c_undrained<0.21:\n # c_undrained = 0.21\n #use 0.2 as per plasix recommendation\n return c_undrained#the cu is always 103 check with small value of n_60, some mistake maybe", "def pi_chudnovsky_bs(digits):\n C = 640320\n C3_OVER_24 = C**3 // 24\n def bs(a, b):\n \"\"\"\n Computes the terms for binary splitting the Chudnovsky infinite series\n\n a(a) = +/- (13591409 + 545140134*a)\n p(a) = (6*a-5)*(2*a-1)*(6*a-1)\n b(a) = 1\n q(a) = a*a*a*C3_OVER_24\n\n returns P(a,b), Q(a,b) and T(a,b)\n \"\"\"\n if b - a == 1:\n # Directly compute P(a,a+1), Q(a,a+1) and T(a,a+1)\n if a == 0:\n Pab = Qab = mpz(1)\n else:\n Pab = mpz((6*a-5)*(2*a-1)*(6*a-1))\n Qab = mpz(a*a*a*C3_OVER_24)\n Tab = Pab * (13591409 + 545140134*a) # a(a) * p(a)\n if a & 1:\n Tab = -Tab\n else:\n # Recursively compute P(a,b), Q(a,b) and T(a,b)\n # m is the midpoint of a and b\n m = (a + b) // 2\n # Recursively calculate P(a,m), Q(a,m) and T(a,m)\n Pam, Qam, Tam = bs(a, m)\n # Recursively calculate P(m,b), Q(m,b) and T(m,b)\n Pmb, Qmb, Tmb = bs(m, b)\n # Now combine\n Pab = Pam * Pmb\n Qab = Qam * Qmb\n Tab = Qmb * Tam + Pam * Tmb\n return Pab, Qab, Tab\n # how many terms to compute\n DIGITS_PER_TERM = math.log10(C3_OVER_24/6/2/6)\n N = int(digits/DIGITS_PER_TERM + 1)\n # Calclate P(0,N) and Q(0,N)\n P, Q, T = bs(0, N)\n one_squared = mpz(10)**(2*digits)\n sqrtC = gmpy2.isqrt(10005*one_squared)\n return (Q*426880*sqrtC) // T", "def u(self, k, m, z):\n result = self.ProfNFW.nfw(k, m, z) * self.Ngal(m) / self.nBarGal(1./(1.+z))\n return result", "def ncusps(self):\n n = self.level()\n return sum([arith.euler_phi(arith.gcd(d,n//d)) for d in n.divisors()])", "def clenshaw_curtis1D(u, quad=\"GC\"): # pragma: no cover\n assert u.ndim == 1\n N = u.shape[0]\n if quad == 'GL':\n w = np.arange(0, N, 1, dtype=float)\n w[2:] = 2./(1-w[2:]**2)\n w[0] = 1\n w[1::2] = 0\n ak = dct(u, 1)\n ak /= (N-1)\n return np.sqrt(np.sum(ak*w))\n\n assert quad == 'GC'\n d = np.zeros(N)\n k = 2*(1 + np.arange((N-1)//2))\n d[::2] = (2./N)/np.hstack((1., 1.-k*k))\n w = dct(d, type=3)\n return np.sqrt(np.sum(u*w))", "def UCB2(x, gp, ndim, t,delta = 0.1,v=1):\n\td=ndim\n\t#t=X_init.shape[0]\n#\tv=3\n#\tdelta=0.1\n\tx1=np.array(x).reshape(-1,ndim)\n\tmuNew, stdNew = gp.predict(x1, return_std=True)\n\t#fMax=max(Y_init)\n\t#Kappa = np.sqrt( v* (2* np.log((t**(d/2. + 2))*(np.pi**2)/(3. * delta) )))\n\tKappa = delta*((v**d)/t) \n\t#plt.plot(t,Kappa,'o')\n\treturn -(muNew + Kappa * stdNew)", "def kernel_mus(self, n_kernels: int):\n l_mu = [1.0]\n if n_kernels == 1:\n return l_mu\n\n bin_size = 2.0 / (n_kernels - 1) # score range from [-1, 1]\n l_mu.append(1 - bin_size / 2) # mu: middle of the bin\n for i in range(1, n_kernels - 1):\n l_mu.append(l_mu[i] - bin_size)\n return l_mu", "def sugg(n):\n print (\"%s\\t\"*3)%(\"p\", \"m(bytes)\", \"ok\")\n for p in (0.1, 0.01, 0.001, 0.0001, 0.00001):\n m=BloomFilter.calBitLen(n,p)\n ok=BloomFilter.calHash(n,m)\n print (\"%.5f\\t\"+\"%d\\t\"*2)%(p, m/8, ok)\n for k in BloomFilter.KRange:\n rp=BloomFilter.calPFP(n,m,k)\n print (\"\\t\"*2+\"%d\\t%f\")%(k, rp)", "def KsCB(fiter):\n \n fiter.mean = RooRealVar(\"mean1\",\"mean1\",490,510)#5168.)\n fiter.sigma = RooRealVar(\"sigma\",\"sigma\", 2,6)#20., 12.,40.)#35.)\n \n fiter.n = RooRealVar(\"exponent\", \"exponent\",1.)#, 0., 12 )\n \n fiter.a = RooRealVar(\"transition\",\"transition\", 0.5, 3) ## Transition point, in sigmas\n fiter.sig = RooCBShape(\"Sigmodel\",\"Sigmodel\", fiter.mass, fiter.mean, fiter.sigma, fiter.a, fiter.n) \n return 1", "def get_cnu(nu_min, nu_max, n_nu):\n ## Frequency grids; border, difference, centre [b, d, c]\n bnu = nu_min * (nu_max/nu_min)**(np.arange(n_nu+1)/float(n_nu))\n #dnu = bnu[1:] - bnu[0:n_nu]\n cnu = np.sqrt( bnu[1:] * bnu[0:n_nu] )\n return cnu", "def _u_naught(self):\n adjusted_cost = self.c/self.a_csc.dot(np.ones(self.mrows))\n cost_matrix = adjusted_cost*self.a + np.amax(adjusted_cost)*(~self.a)\n return adjusted_cost[np.argmin(cost_matrix, axis=1)]", "def _find_cusps(self):\n N = self.level()\n s = []\n\n for d in arith.divisors(N):\n w = arith.gcd(d, N//d)\n if w == 1:\n if d == 1:\n s.append(Cusp(1,0))\n elif d == N:\n s.append(Cusp(0,1))\n else:\n s.append(Cusp(1,d))\n else:\n for a in range(1, w):\n if arith.gcd(a, w) == 1:\n while arith.gcd(a, d//w) != 1:\n a += w\n s.append(Cusp(a,d))\n return sorted(s)", "def f1_osyczka2(x1, x2, x3, x4, x5, x6):\n return -1 * ((25 * pow(x1 - 2, 2)) +\n pow((x2 - 2), 2) +\n pow(x3 - 1, 2) * pow(x4 - 4, 2) +\n pow(x5 - 1, 2))", "def _clenshaw_curtis_weights(n):\n from scipy.fftpack import ifft\n\n N = np.arange(start=1, stop=n, step=2)[:, None]\n l = N.size\n m = n - l\n\n v0 = np.vstack([2. / N / (N-2), 1. / N[-1]] + [0] * m)\n v2 = -v0[:-1] - v0[:0:-1]\n\n g0 = -np.ones((n, 1))\n g0[l] += n\n g0[m] += n\n g = g0 / (n ** 2 - 1 + n % 2)\n\n wcc = ifft((v2 + g).flatten()).real\n wcc = np.hstack([wcc, wcc[0]])\n\n return wcc * np.pi / (n / 2 + 1)", "def bjs(l, c):\n if len(l) == 4:\n l = mbvector(l)\n elif len(l) == 3:\n pass\n else:\n return 0\n v = np.array([1, pi, e])\n r = l / np.linalg.norm(l)\n m = np.cross(r, v)\n n = np.cross(r, m)\n m = m / np.linalg.norm(m)\n n = n / np.linalg.norm(n)\n w = np.arange(0, 2 * pi, 0.001)\n s = len(w)\n\n mm = vect_contract(m, c, m)\n mn = vect_contract(m, c, n)\n nm = vect_contract(n, c, m)\n nn0 = vect_contract(n, c, n)\n nn = np.linalg.inv(nn0)\n\n val1 = mm - np.dot(np.dot(mn, nn), nm)\n R = BB = np.zeros(shape=(3, 3))\n for i in range(1, s):\n t = 1 - cos(w[i])\n CO = cos(w[i])\n SI = sin(w[i])\n R[0, 0] = t * r[0] ** 2 + CO\n R[0, 1] = t * r[0] * r[1] - SI * r[2]\n R[0, 2] = t * r[0] * r[2] + SI * r[1]\n R[1, 0] = t * r[0] * r[1] + SI * r[2]\n R[1, 1] = t * r[1] ** 2 + CO\n R[1, 2] = t * r[1] * r[2] - SI * r[0]\n R[2, 0] = t * r[0] * r[2] - SI * r[1]\n R[2, 1] = t * r[1] * r[2] + SI * r[0]\n R[2, 2] = t * r[2] ** 2 + CO\n\n mr = np.dot(R, np.transpose(m))\n nr = np.dot(R, np.transpose(n))\n\n mm = vect_contract(mr, c, mr)\n mn = vect_contract(mr, c, nr)\n nm = vect_contract(nr, c, mr)\n nn0 = vect_contract(nr, c, nr)\n nn = np.linalg.inv(nn0)\n val2 = mm - np.dot(np.dot(mn, nn), nm)\n BB = BB + 0.5 * (val2 + val1) * (w[i] - w[i - 1])\n val1 = val2\n B = BB / (8 * pi**2)\n return B", "def king(r, n0, rc, b):\n return n0 * (1. + (r/rc)**2)**b", "def trueDiversity(blau):\n\n return - np.sqrt(1/(blau-1))", "def binomC(k,n):\n return np.double( comb(n, k, exact=1) )", "def Tinker05(self,dc,nu):\n if len(self.bias_par.keys()) == 0:\n a = 0.707\n b = 0.35\n c = 0.8\n else:\n a = self.bias_par['a']\n b = self.bias_par['b']\n c = self.bias_par['c']\n sa = a**0.5\n return 1.+(sa*(a*nu**2) + sa*b*(a*nu**2)**(1.-c) - (a*nu**2)**c/((a*nu**2)**c + \\\n b*(1.-c)*(1.-c/2.)))/(dc*sa)", "def maclaurin_binomial(value,m,k):\n global first_value\n first_value = 0.0\n error(value)\n\n #attempt to Approximate (1+x)^m for given values \n try:\n \n for item in xrange(1,k):\n next_value =m*(value**item)/factorial(item)\n \n for i in range(2,item+1): \n next_second_value =(m-i+1)\n next_value *= next_second_value\n first_value += next_value\n\n return first_value + 1\n \n #Raise TypeError if input is not within\n #the interval of convergence\n except TypeError,exception:\n print exception\n\n #Raise OverflowError if an over flow occur \n except OverflowError:\n print '\\n<Please enter a lower k value to avoid the Over flow\\n '", "def _get_ucb_beta_th(dim, time_step):\n return np.sqrt(0.5 * dim * np.log(2 * dim * time_step + 1))", "def combinations(n) -> float:\r\n c = math.factorial(n) / (math.factorial(2) * math.factorial(n - 2))\r\n return c", "def f1_score(confusion):\n sens = sensitivity(confusion)\n prec = precision(confusion)\n return 2 * sens * prec / (sens + prec)", "def binomial_coefficient(n, k):\n if 0 <= k <= n:\n return reduce(lambda a, b: a * (n - b) / (b + 1), xrange(k), 1)\n else:\n return 0", "def factor_circulant_multiplication(u, x, k=1):\n n = len(u) \n D_k = (k**(1/n))**np.arange(0,n)\n Lambda = fft(D_k*x)\n return (1/D_k)*real(ifft(Lambda*fft(D_k*u))) # y", "def bayesian_UCB(deques: List[deque],\n model: BetaBernoulli,\n mode: str,\n topk: int = 1,\n ucb_c: int = 1,\n **kwargs) -> Union[int, List[int]]:\n metric_val = model.eval\n if mode == 'max':\n metric_val += ucb_c * model.variance\n ranked = np.argsort(metric_val)[::-1]\n elif mode == 'min':\n metric_val -= ucb_c * model.variance\n ranked = np.argsort(metric_val)\n\n if topk == 1:\n for j in range(len(deques)):\n category = ranked[j]\n if len(deques[category]) != 0:\n return category\n else:\n categories_list = []\n candidates = set([i for i in range(len(deques)) if len(deques[i]) > 0])\n # when we go through 'ranked' and len(categories_list) < topk, topk sampling is reduced to top 1\n if len(candidates) < topk:\n return bayesian_UCB(deques, model, mode, topk=1)\n else:\n for category in ranked:\n if category in candidates:\n categories_list.append(category)\n if len(categories_list) == topk:\n return categories_list", "def NormU(x):\n return sum(0.5*x**2)", "def crootnxtest(n):\n b = bina(n)\n for bi in b:\n print(bi)\n q = QuantumRegister(n, 'q')\n circ = QuantumCircuit(q)\n for i in bi:\n circ.x(q[i])\n for _ in range(2 ** (n)):\n crootnx(circ, q, q[0], q[n - 1], 2 ** n, False)\n circ.barrier(q)\n launch2(circ)\n\n circ_m = measure(circ, q, [i for i in range(n)])\n\n return circ_m", "def algorithm_1_2(p, c, x):\n\n q = np.array(c, dtype=np.float64)\n\n for k in range(1, p + 1):\n for j in range(0, p - k + 1):\n q[j] = (1 - x) * q[j] + x * q[j + 1]\n return q[0]", "def perfectrefl(wavelength):\n return 1.0", "def new_binomial_prefactor(s,l1,l2,PAx,PBx):\n with loops.Scope() as L:\n L.total = 0.\n L.t = 0\n for _ in L.while_range(lambda: L.t < s + 1):\n #TEMP TODO rewrite this. The cond_range causes a huge overhead.\n # Try Valeev implementation\n for _ in L.cond_range(((s - l1) <= L.t) & (L.t <= l2)):\n L.total += binomials[l1,s-L.t] * binomials[l2,L.t] * PAx[l1-s + L.t] * PBx[l2 - L.t]\n L.t += 1\n return L.total", "def nw(n):\n return 4*n*n + 1", "def bin_cdf(n, p, x):\n\n # p C (bin_dist) ** 0 ) *(1-bin_dist)** p\n\n # n = (p)=20\n # x = x = 1 = r\n # nCr = n! / r!(n-r)\n\n \n\n\n\n\n\n\n\n\n def bin_dist(n, p, x):\n \"\"\"\n Given n number of trials, p the probability of success,\n what is the probability of having x successes?\n\n Your function should raise a ValueError if x is higher\n than n.\n\n If you need to compute combinations, you can import the\n function \"comb\" from the package \"scipy.special\"\n\n :param n: number of trials (int)\n :param p: probability of success\n :param x: number of successes (int)\n :return: probability of having x successes\n :rtype: float\n :raise ValueError: if x > n\n \"\"\"\n def factorial(x):\n if x >= 0:\n \n factorial = 1\n\n for i in range(1, x + 1):\n factorial = float(factorial * i)\n # print(f' The factorial of {x} is {factorial}') \n return factorial\n\n else:\n raise ValueError(\"Sorry x cannot be a negative number\")\n\n def combination(n, r):\n \"\"\"\n Given n total number of items,\n what is the number of possible ways\n to choose r items from it?\n\n :param n: total number of items (integer)\n :param r: number of items to arrange (int)\n :return: number of combinations\n :rtype: integer\n \"\"\"\n\n \n\n \n numerator = factorial(n)\n denominator = factorial(r)\n subtracted_answer = factorial(n-r)\n \n\n answer = numerator/(denominator * subtracted_answer)\n print(answer)\n return answer \n\n # from scipy.special import comb\n if x > n:\n raise ValueError(\"Error, x must be less than n\")\n else:\n\n\n prob_success = float((combination(n, x)) * ((p**x)*((1-p)**(n-x))))\n\n print(prob_success)\n return prob_success \n \n # an= 1-bin_dist(n,p,x)\n # print(f'word{an}')\n # n= 12\n # p=0.25\n # # x=0??\n # ((n!)/ (x!*(n-x)!)) * (p**x) * (1-p)**(n-x)\n sum_prob = []\n for i in range(x+1):\n print(i)\n prob = bin_dist(n,p,x=i)\n sum_prob.append(prob)\n print(sum_prob)\n total =sum(sum_prob)\n print(total)", "def magic_sample(self, ys):\n\n #for each non-zero element in y\n #we want to multiply the initial state by HGate(i) SGate(i) HGate(i)\n #this turns out to be equivalent to multiplying the whole final state by\n #U H_k S_k H_k U^\\dagger\n #but H_k S_k H_k = e^{i\\pi/4} \\frac{1}{\\sqrt{2}} (I -i X_k)\n #so now we evolve identity forward by U (trivial)\n #and evolve X_k forward by U (using the AGState)\n #then we have to send the resulting Pauli through UC and UH\n #giving a third Pauli\n #then the state is of the form (we^{i\\pi/4}) UC UH (I + i^d P)/sqrt(2) |s>\n #then we apply Bravyi et al's prop. 4 to turn this into a new ch form\n \n\n chCopy = deepcopy(self.chState) #we update this copy as we go\n\n for i, y in enumerate(ys):\n if y:\n #we want to know what U_c^\\dagger U X_i U^\\dagger U_c is\n #firstly we use the A-G info\n # U X_i U^\\dagger is the i'th destabiliser\n x = self.agState.x[self.n+i]\n z = self.agState.z[self.n+i]\n r = self.agState.r[self.n+i]\n\n #print(x,z,r)\n x_col = np.array([x]).T\n z_col = np.array([z]).T\n \n #now we apply U_c to this using the CH-form info\n x_mat = chCopy.F * x_col\n z_mat = (chCopy.M * x_col + chCopy.G*z_col) % np.uint8(2)\n r = (r + util.sort_pauli_string(x_mat, z_mat)) % np.uint8(2)\n\n u = (x @ chCopy.F) % np.uint8(2)\n h = (x @ chCopy.M + z @ chCopy.G) % np.uint8(2)\n\n g = (x @ (z + chCopy.g)) % np.uint8(4)\n\n #now U_c^dag U X_i U^dag U_C = (-1)^r i^g prod_j Z_j^{h_j} X_j^{u_j}\n #we want to conjugate this by U_H\n #everywhere chCopy.v == 1 we flip a z to an x and an x to a z\n #everywhere chCopy.v == 1 and u == 1 and h == 1 we need to swap the order of our x and z so we get a minus sign\n\n u2 = u*(np.uint8(1) ^ chCopy.v) ^ (h*chCopy.v)\n h2 = (u*chCopy.v) ^ (h*(np.uint8(1) ^ chCopy.v))\n\n r = (r + (u*h*chCopy.v).sum()) % np.uint8(2)\n \n \n #now U_H^dag U_c^dag U X_i U^dag U_C U_H = (-1)^r i^g prod_j Z_j^{h2_j} X_j^{u2_j}\n\n t = u2 ^ chCopy.s\n r = (r + h2 @ t) % np.uint8(2)\n\n #now we have w UC UH |s> = w (-1)^r (i)^g UC UH |t>\n\n if all(t == chCopy.s):\n chCopy.w *= np.exp(1j*np.pi/4) * (1 + (1j)**(g+2*r -1) )/ np.sqrt(2)\n else:\n phase, VCList, v, s = util.desuperpositionise(chCopy.s, t, (g+2*r -1)%np.uint8(4), chCopy.v)\n\n chCopy.w *= phase*np.exp(1j*np.pi/4)/np.sqrt(2)\n chCopy.v = v\n chCopy.s = s\n\n for gate in VCList:\n gate.rightMultiplyC(chCopy)\n \n return chCopy", "def test_trapezoidal_conv_rate():\r\n\tfrom math import exp\r\n\tf = lambda x: 6E8*x-4E6\r\n\tF = lambda x: 3E8*x**2 - 4E6*x #Anti-derivative\r\n\ta = 1.1; b = 1.1002\r\n\tr = convergence_rates(f, F, a, b, 14)\r\n\tprint(r)\r\n\ttol = 0.01\r\n\tmsg = str(r[-4:]) # show last 4 estimated rates\r\n\tassert (abs(r[-1]) - 2) < tol, msg", "def calculate_bleu(output_lns, refs_lns, **kwargs) -> dict:\n return {\"bleu\": round(corpus_bleu(output_lns, [refs_lns], **kwargs).score, 4)}", "def calculate_bleu(output_lns, refs_lns, **kwargs) -> dict:\n return {\"bleu\": round(corpus_bleu(output_lns, [refs_lns], **kwargs).score, 4)}", "def getIoU(bbx_benchmark,bbx_detect):\r\n \r\n # get the cordinates of intersecting square\r\n x_inter_1=max(bbx_benchmark[1],bbx_detect[1])\r\n y_inter_1=max(bbx_benchmark[0],bbx_detect[0])\r\n x_inter_2=min(bbx_benchmark[3],bbx_detect[3])\r\n y_inter_2=min(bbx_benchmark[2],bbx_detect[2])\r\n# =============================================================================\r\n# x_inter_1=max(bbx_benchmark['xmin'],bbx_detect['xmin'])\r\n# y_inter_1=max(bbx_benchmark['ymin'],bbx_detect['ymin'])\r\n# x_inter_2=min(bbx_benchmark['xmax'],bbx_detect['xmax'])\r\n# y_inter_2=min(bbx_benchmark['ymax'],bbx_detect['ymax'])\r\n# =============================================================================\r\n \r\n # get intersect area\r\n inter_area = max(0, x_inter_2 - x_inter_1) * max(0, y_inter_2 - y_inter_1)\r\n \r\n # get bbx area\r\n benchmark_area = (bbx_benchmark[2]-bbx_benchmark[0]) * (bbx_benchmark[3]-bbx_benchmark[1])\r\n detect_area=(bbx_detect[2]-bbx_detect[0]) * (bbx_detect[3]-bbx_detect[1])\r\n \r\n # calculate IoU\r\n iou = inter_area / float(benchmark_area + detect_area - inter_area)\r\n \r\n return iou" ]
[ "0.63876575", "0.633093", "0.6154798", "0.6075605", "0.5960223", "0.58998066", "0.58998066", "0.5878511", "0.582961", "0.5794877", "0.5789813", "0.5785925", "0.57034636", "0.5690178", "0.5688557", "0.56797487", "0.56768996", "0.5637333", "0.5628463", "0.56043136", "0.5594184", "0.55809665", "0.5553268", "0.55522966", "0.55449504", "0.5533451", "0.55187905", "0.54977477", "0.5445191", "0.5439713", "0.5438795", "0.5426767", "0.5423738", "0.54113364", "0.5409563", "0.5402943", "0.540067", "0.5393388", "0.53858536", "0.53838027", "0.53810966", "0.53657883", "0.53571206", "0.53525835", "0.5350899", "0.5350899", "0.5349145", "0.5341573", "0.5341573", "0.5338482", "0.53237206", "0.53178585", "0.53096694", "0.5308999", "0.5295057", "0.5295057", "0.5291883", "0.52742374", "0.52682763", "0.5263422", "0.5253696", "0.52482396", "0.5241163", "0.5230126", "0.5219648", "0.521566", "0.521379", "0.5209815", "0.5203138", "0.51997596", "0.5199681", "0.51987123", "0.51931465", "0.5193054", "0.5191619", "0.51914996", "0.5187396", "0.5180341", "0.5180292", "0.5177242", "0.5174451", "0.5171418", "0.5169482", "0.51646966", "0.51634204", "0.51627755", "0.5162617", "0.5153838", "0.5152702", "0.51507264", "0.51475", "0.5145265", "0.5142237", "0.51396614", "0.51319665", "0.51318455", "0.51316375", "0.5130367", "0.5130367", "0.5128911" ]
0.55336875
25
Custom displayhook for the exec in default(), which prevents assignment of the _ variable in the builtins.
def displayhook(self, obj): # reproduce the behavior of the standard displayhook, not printing None if obj is not None: print >> self.stdout, repr(obj)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def displayhook(arg):\n if arg is not None:\n __builtin__._ = None\n print stringify_func(arg)\n __builtin__._ = arg", "def __displayhook__(*args, **kwargs): # real signature unknown\n pass", "def displayhook(p_object): # real signature unknown; restored from __doc__\n pass", "def start_displayhook(self):\n pass", "def _displayhook(self, result):\n\t\tif isinstance(result, ShellHelpModeCookie):\n\t\t\tself.helpmode = True\n\t\t\treturn\n\n\t\tif result is not None:\n\t\t\tself.outputbuffer.write(repr(result) + \"\\n\")", "def _format_default_functions(self):\n self._out_formatter = null_out_formatter\n self._f_default_names = lambda x: [0]", "def check_for_underscore(self):\n # If something injected a '_' variable in __builtin__, delete\n # ipython's automatic one so we don't clobber that. gettext() in\n # particular uses _, so we need to stay away from it.\n if '_' in __builtin__.__dict__:\n try:\n del self.shell.user_ns['_']\n except KeyError:\n pass", "def bad_default(var, default=unknown2): # [undefined-variable]\n print(var, default)\n print(xxxx) # [undefined-variable]\n augvar += 1 # [undefined-variable]\n del vardel # [undefined-variable]", "def _default(self):\n self.app.args.print_help()", "def hook_print():\n sys.stdout = PrintHook()", "def _redefine_print(is_main):\n import builtins as __builtin__\n\n builtin_print = __builtin__.print\n\n def print(*args, **kwargs):\n force = kwargs.pop(\"force\", False)\n if is_main or force:\n builtin_print(*args, **kwargs)\n\n __builtin__.print = print", "def test_override_builtin(self):\n PyLoader.register(override_builtins=True)\n self.assertIs(PRIORITY_HOOKS['.py'], PyLoader)", "def help_me():\n print(\"i'm trapped\")", "def _format_default_functions(self):\n self._out_formatter = count_out_formatter_general\n self._core_characterizer = characterizer_1sh_counter\n self._f_default_names = counter_featurenames", "def ipython_unmonkey(self):\n IP = self.IP\n if hasattr(IP, '_runsource'):\n IP.runsource = IP._runsource\n IP.handle_alias = IP._handle_alias\n IP.handle_help = IP._handle_help\n IP.handle_auto = IP._handle_auto\n IP.handle_magic = IP._handle_magic\n IP.handle_shell_escape = IP._handle_shell_escape\n IP.outputcache.update = IP.outputcache._update\n\n IP.esc_handlers = {IP.ESC_PAREN:IP._handle_auto,\n IP.ESC_QUOTE:IP._handle_auto,\n IP.ESC_QUOTE2:IP._handle_auto,\n IP.ESC_MAGIC:IP._handle_magic,\n IP.ESC_HELP:IP._handle_help,\n IP.ESC_SHELL:IP._handle_shell_escape,\n }", "def disp_x(self, *args, **kwargs) -> Any:\n pass", "def help_default_values():\n click.echo_via_pager(docgen.generate_default_value_help())", "def set_builtin(self, builtin):\n self.options['builtin'] = builtin", "def default():", "def do_display(self, arg):\n try:\n value = self._getval_or_undefined(arg)\n except:\n return\n self._get_display_list()[arg] = value", "def default_action(self):\n pass", "def default_command(self, function):\r\n if Inspection.find_calling_module() == '__main__':\r\n if None in self._commands:\r\n defaults = (self._commands[None].__name__, function.__name__)\r\n raise self.Error('Found two default commands: %s and %s' % defaults)\r\n self._commands[None] = function\r\n return function", "def repl_print_statements():\n pass", "def default(self, line):\n self.history.append(line)\n if line[:1] == '!':\n line = line[1:]\n locals = self.curframe_locals\n ns = self.curframe.f_globals.copy()\n ns.update(locals)\n try:\n code = compile(line + '\\n', '<stdin>', 'single')\n save_stdout = sys.stdout\n save_stdin = sys.stdin\n save_displayhook = sys.displayhook\n try:\n sys.stdin = self.stdin\n sys.stdout = self.stdout\n sys.displayhook = self.displayhook\n exec(code, ns, locals)\n finally:\n sys.stdout = save_stdout\n sys.stdin = save_stdin\n sys.displayhook = save_displayhook\n except:\n exc_info = sys.exc_info()[:2]\n self.error(traceback.format_exception_only(*exc_info)[-1].strip())", "def default(self, line):\n self.stdout.write(\"Unknown command: '{}'\\n\".format(line))\n self._help()", "def catch_interaction(\n noninteractive: Any, func: Callable, *args, _default: Any = \"\", **kwargs\n):\n if noninteractive:\n return _default\n return func(*args, **kwargs)", "def handle_op_general(self, op):\n\n self.change_ops_state(DISABLED)\n self.display.output_op(op)", "def _print_custom(self):\n pass", "def undefined(cmd, *args):\n return 'undefined %s called with %s' % (cmd, args)", "def _ipython_display_(self):\n with self._sc:\n self._box._ipython_display_()", "def prepost_hook_one(self) -> None:\n self.poutput(\"one\")", "def my_function(real_name, optional_display_name=None):\n optional_display_name = optional_display_name or real_name\n print(optional_display_name)", "def __call__(self, result=None):\n self.check_for_underscore()\n if result is not None and not self.quiet():\n self.start_displayhook()\n self.write_output_prompt()\n result, result_repr = self.compute_result_repr(result)\n self.write_result_repr(result_repr)\n self.update_user_ns(result)\n self.log_output(result)\n self.finish_displayhook()", "def disp(self, *args, **kwargs) -> Any:\n pass", "def _default_output(result):\n return str(result) if result is not None else None", "def _validate_builtin(_):\n pass", "def do_undisplay(self, arg):\n try:\n del self._get_display_list()[arg]\n except KeyError:\n print('** %s not in the display list **' % arg, file=self.stdout)", "def _(\n self,\n *args,\n **kwargs,\n ):\n return None # to be implemented", "def _(\n self,\n *args,\n **kwargs,\n ):\n return None # to be implemented", "def init_verbose_print(verbose=True, vfunc=print, nvfunc=None):\n global verboseprint\n if verbose:\n verboseprint = vfunc\n else:\n if not nvfunc:\n verboseprint = lambda *a, **k: None\n else:\n verboseprint = nvfunc\n return verboseprint", "def default(self, line):\n try:\n exec(line) in self._locals, self._globals\n except Exception as e:\n print(e.__class__, \":\", e)", "def add_help(text):\n global default_text\n default_text += text", "def render_defaults(stdscr):\n max_y = stdscr.getmaxyx()[0] - 1\n if superglobals.information_enabled:\n stdscr.addstr(0, 0, uname().system)\n stdscr.addstr(1, 0, uname().machine)\n \n for i in range(0, max_y + 1):\n stdscr.addstr(i, 43, \"│\") # Barrier that protects program from user input.", "def autodefaults (self):\r\n\r\n self.defaults_from_notes(identifying_key=EMPTYCHAR,\r\n mark=EQUAL,\r\n obj=self.default_dict['commands'],\r\n entrytext=COMMANDMACROSCRIPT)", "def process_cmd_default(ctx, tex, cmd, mode):\n if not (mode & MATH):\n # if cmd.name not in ctx.unprocessed_commands: print(ctx.outputfile, cmd)\n ctx.unprocessed_commands.add(cmd.name)\n return process_cmd_passthru(ctx, tex, cmd, mode)", "def _get_help_string(self, action):\n helptext = action.help\n if '%(default)' not in action.help:\n if action.default != '==SUPPRESS==' and action.default:\n # defaulting_nargs = ['?', '*']\n # if action.nargs in defaulting_nargs:\n helptext += colored(' [default: %(default)s]', 'cyan')\n return helptext", "def checkDefaultRenderGlobals(*args, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[None, Any]:\n pass", "def useful():\n\n print('I do something.')", "def initDefaultCommand(self):\n pass", "def __def_function__():\n pass", "def event_beforehide(self):\n logging.warning('beforehide undefined')", "def prepost_hook_two(self) -> None:\n self.poutput(\"two\")", "def _(x):\n return x", "def _(x):\n return x", "def add_default_numeric_op(op_name):\n add_numeric_op(\"__%s__\"%op_name)", "def undefined(self, ident, args):\n return \"\"", "def f_default(self, default = 1) :\n pass", "def ignore_builtin_verification():\n return not current_space().skip_builtin_verification", "def __window_print(self):\n pass", "def flag_dec(func):\n def wrapfunc_and_call(*args, **kwargs):\n if func.__name__ == 'prompt':\n if PROMPT_ENABLED:\n return func(*args, **kwargs)\n else:\n return '' # use empty string to indicate default\n else:\n return None\n return wrapfunc_and_call", "def event_aftershow(self):\n logging.warning('aftershow undefined')", "def showUsage():\n None", "def default(self, line):\n print \"Command not found\\n\"", "def rec_default(self):\n pass", "def rec_default(self):\n self.new_func_triggers.setText('(0,5)')\n self.new_param.setText('1')", "def default(str):\n return str + ' [Default: %default]'", "def drop_if_default(self, default):\n # type: (...) -> DisplayDataItem\n self._default = default\n self._drop_if_default = True\n return self", "def cmd_noop(args):", "def _add_default_op(op_name):\n _add_op(\"__%s__\"%op_name, getattr(operator, op_name))", "def default(self, line):\n cmd,args,line = self.parseline(line)\n try:\n background = ampersand(line)\n if background:\n bg = BackProc(cmd,args)\n else:\n self.subproc(line)\n except:\n sys.stdout.write(\"Unknown command '{}' returning to shell...\\n\".format(line))", "def set_raw_output(val: int) -> None:\n print(val)", "def handle_repl(locals):\n dump_stacktraces()\n namespace = {\n 'dump_stacktraces': dump_stacktraces,\n 'sys': sys,\n 'os': os,\n 'socket': socket,\n 'traceback': traceback,\n }\n if locals:\n namespace.update(locals)\n try:\n ManholeConsole(namespace).interact()\n except SystemExit:\n pass\n finally:\n for attribute in ['last_type', 'last_value', 'last_traceback']:\n try:\n delattr(sys, attribute)\n except AttributeError:\n pass", "def test_applies_to_default(self):\n self.assertTrue(\n self.extension.template_hook_no_applies.applies_to(self.request))\n self.assertTrue(\n self.extension.template_hook_no_applies.applies_to(None))", "def inspect_builtin(obj):\n\n print_with_indent(\"+Builtin Function: %s\" % obj.__name__)\n indent()\n print_docstr(obj)\n dedent()\n print()", "def dummy_fn(self):\n\t\tpass", "def _hook(self):", "def display_help(self):\n pass", "def add_default_reverse_numeric_op(op_name):\n add_reverse_numeric_op(\"__r%s__\"%op_name)", "def magic_p(self, parameter_s=''):\n exec 'print ' + parameter_s in self.shell.user_ns", "def process_default(self, character):\n pass", "def standard(self) -> global___Snippet.Standard:", "def event_afterhide(self):\n logging.warning('afterhide undefined')", "def builtin(self) -> pulumi.Output[bool]:\n return pulumi.get(self, \"builtin\")", "def show_help():\n pass", "def init_emulation(self):\n if not self.skip_flag:\n _ = self.runtime.service(self, \"i18n\").ugettext\n # self.display_name = _(self.display_name)\n self.fields['display_name']._default = _(self.fields['display_name']._default)\n self.skip_flag = True", "def dox(p):\n if (p['v']['trace'] == 'on'):\n print('.X')\n #endif\n m = p['sy']['pop']()\n p['sy'][m](p) \n # verb adds ok/nok", "def _render_default_main_call_into(into):\n into.append(get_short_executable())\n into.append(' -m ')\n into.append(PACKAGE_NAME)\n return into", "def noop():", "def do_show(self, args):\n\n func = getattr(args, \"func\", None)\n\n if func is not None:\n func(self, args)\n else:\n self.do_help(\"show\")", "def shout():\n # Use echo_word in nonlocal scope\n nonlocal echo_word\n\n echo_word = echo_word + '!!!'", "def getDefault():", "def _init_display(self):\n raise NotImplementedError", "def default(self, line):\n line = line.split(' ')[0]\n self.PRINT.warning(line + \": command not found\")\n self.PRINT.newline()", "def exec_builtin(self, cmd):\r\n func = Builtin.builtins.get(cmd[0])\r\n if func is None:\r\n return False\r\n func(self, cmd)\r\n return True", "def testAddUnderscoreVar(self):\n pl = Pipeline(loadInitFile=False)\n repl = REPL(pl)\n repl.runCommandLine('7')\n repl.runCommandLine('_ + 10')\n self.assertEqual(17, pl.stdin)\n self.assertEqual(REPL.DEFAULT_PS1, repl.prompt)", "def test_as_default(self):\n self.assertEqual(render('{% default_as %}...{{ snake }}'), '...hisss')", "def ipython_monkeypatch(self, IP):\n\n # new input hook for Python source\n # also, trap stdout, stderr\n IP._runsource = IP.runsource\n def runsource(source, filename=\"<input>\", symbol=\"single\"):\n code = IP._runsource(source, filename=filename, symbol=symbol)\n if code == False:\n # it's complete\n number = IP.outputcache.prompt_count\n self.add_input(source, number)\n if (self.checkpoint is not None and \n not IP.outputcache.prompt_count % self.checkpoint):\n self.write()\n return code\n IP.runsource = runsource\n\n # new input hook for aliases\n IP._handle_alias = IP.handle_alias\n def handle_alias(line,continue_prompt=None,\n pre=None,iFun=None,theRest=None):\n line_out = IP._handle_alias(line, continue_prompt, pre, iFun,\n theRest)\n number = IP.outputcache.prompt_count\n self.add_special_input(line, number)\n return line_out\n IP.handle_alias = handle_alias\n\n # new input hook for shell escapes\n IP._handle_shell_escape = IP.handle_shell_escape\n def handle_shell_escape(line, continue_prompt=None,\n pre=None,iFun=None,theRest=None):\n line_out = IP._handle_shell_escape(line, continue_prompt, pre, \n iFun, theRest)\n number = IP.outputcache.prompt_count\n self.add_special_input(line, number)\n return line_out\n IP.handle_shell_escape = handle_shell_escape\n\n # new input hook for magics\n IP._handle_magic = IP.handle_magic\n def handle_magic(line, continue_prompt=None,\n pre=None,iFun=None,theRest=None):\n line_out = IP._handle_magic(line, continue_prompt, pre, \n iFun, theRest)\n number = IP.outputcache.prompt_count\n self.add_special_input(line, number)\n return line_out\n IP.handle_magic = handle_magic\n\n # new input hook for autocall lines\n IP._handle_auto = IP.handle_auto\n def handle_auto(line, continue_prompt=None,\n pre=None,iFun=None,theRest=None):\n line_out = IP._handle_auto(line, continue_prompt, pre, \n iFun, theRest)\n number = IP.outputcache.prompt_count\n self.add_special_input(line, number)\n return line_out\n IP.handle_auto = handle_auto\n\n # new input hook for helps\n IP._handle_help = IP.handle_help\n def handle_help(line, continue_prompt=None,\n pre=None,iFun=None,theRest=None):\n line_out = IP._handle_help(line, continue_prompt, pre, \n iFun, theRest)\n number = IP.outputcache.prompt_count\n self.add_special_input(line, number)\n return line_out\n IP.handle_help = handle_help\n\n # new output hook\n IP.outputcache._update = IP.outputcache.update\n def update(arg):\n IP.outputcache._update(arg)\n self.add_output(self.get_str(arg), IP.outputcache.prompt_count)\n IP.outputcache.update = update\n\n IP.esc_handlers = {IP.ESC_PAREN:handle_auto,\n IP.ESC_QUOTE:handle_auto,\n IP.ESC_QUOTE2:handle_auto,\n IP.ESC_MAGIC:handle_magic,\n IP.ESC_HELP:handle_help,\n IP.ESC_SHELL:handle_shell_escape,\n }\n\n self.IP = IP\n\n # I'm *so* going to Hell for this.", "def help(self, dummy):\r\n help = self.doc + \"\\n\"\r\n if help.find(\"%s\") > 0:\r\n help = help.replace(\"%s\", self.progname)\r\n print_function(help, end='', file=self.stdout)\r\n self.exit(0)", "def ugly():\n\n global _pretty\n _pretty = False", "def _default_vprint_worker(*args, **kwargs):\r\n print(*args, **kwargs)" ]
[ "0.7744383", "0.6542661", "0.6142525", "0.58426565", "0.578165", "0.5749669", "0.55891895", "0.55188674", "0.550801", "0.5497482", "0.54949677", "0.54631275", "0.53636926", "0.5337594", "0.53103167", "0.53094953", "0.52910274", "0.5272987", "0.5272368", "0.52542436", "0.52296096", "0.52180856", "0.5134219", "0.50993073", "0.50950295", "0.50809586", "0.5069147", "0.50590515", "0.5051723", "0.50498194", "0.5015312", "0.499888", "0.4998412", "0.49960443", "0.49768627", "0.4972944", "0.49712086", "0.49581522", "0.49581522", "0.49432716", "0.4942656", "0.49287796", "0.49261948", "0.49241343", "0.4921429", "0.49197143", "0.49122688", "0.48956186", "0.48904082", "0.48875067", "0.48827142", "0.48818266", "0.4879385", "0.4879385", "0.4869533", "0.4862049", "0.4853199", "0.48457187", "0.48392394", "0.4836053", "0.48355278", "0.4835206", "0.48274162", "0.48269612", "0.4825385", "0.48181665", "0.48115942", "0.48067838", "0.48061728", "0.4776917", "0.4768906", "0.47603133", "0.47589937", "0.4754584", "0.47527364", "0.4737092", "0.4731118", "0.4730467", "0.47261652", "0.47185835", "0.47184464", "0.47154617", "0.47141498", "0.47096005", "0.47092247", "0.47064507", "0.4702975", "0.47012693", "0.46917838", "0.46913096", "0.4686252", "0.46658567", "0.46623537", "0.46620017", "0.46559745", "0.46527314", "0.46470028", "0.46461928", "0.4643468", "0.46334532" ]
0.56435174
6
Produce a reasonable default.
def defaultFile(self): filename = _odb.getCurrentFrame().filename if filename == '<string>' and self.mainpyfile: filename = self.mainpyfile return filename
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getDefault():", "def Default():\n return _DEFAULT", "def default():", "def f_default(self, default = 1) :\n pass", "def default():\n return DefaultSwh.default()", "def _defaulted(cls, value, default):\n return default if value is None else value", "def default(str):\n return str + ' [Default: %default]'", "def default(self):\n return self.__default", "def _default_value(self):\n return None", "def _default_value(self):\n raise NotImplementedError", "def default(value, replacement):\n return value if value is not None else replacement", "def default(self):\n raise NotImplementedError", "def get_default_value(self):\n pass", "def testDefault():\n\n conf = naiveConf.NaiveConf(exampleConfFname)\n oldX = conf.x\n conf.default('x', None)\n conf.default('Z', 5)\n\n assert conf.x == oldX\n assert conf.Z == 5", "def default():\n return DefaultPvWattsv5.default()", "def default(self):\n # easy enough\n return self._default", "def default(self):\n return self.get(name='Unknown')", "def default(self):\n return self.__default", "def default(self):\r\n return self.default_value()", "def get_default(cls):\n raise NotImplementedError", "def _get_simple_default_value(simple):\n return _SIMPLE_DEFAULT_VALUES[simple]", "def default():\n return DefaultPvWattsv8.default()", "def rec_default(self):\n pass", "def default_arg(default):\n class DefaultArg(argparse.Action):\n def __call__(self, parser, namespace, value, option_string):\n if value is None:\n setattr(namespace, self.dest, default)\n else:\n setattr(namespace, self.dest, value)\n\n return DefaultArg", "def f_get_default(self):\n raise NotImplementedError(\"Should have implemented this.\")", "def get_default(self):\n\n\t\treturn self.__default", "def argument(arg, default):\n return \"{0}={1}\".format(arg, default) if default else arg", "def default_setter(value):\n return value if value and not force else default_value", "def help_default_values():\n click.echo_via_pager(docgen.generate_default_value_help())", "def get_default(cls):\n return cls.NONE", "def default(default_value, force=False):\n def default_setter(value):\n \"\"\"\n Sets the value to the given default value, assuming the original value\n is not set or the default value is set to forced.\n\n :param Any value: Injected by CKAN core\n :rtype: Any\n \"\"\"\n return value if value and not force else default_value\n\n return default_setter", "def default(self, value):\n # save {value} as the default\n self._default = value\n # all done\n return", "def default(self):\n return self._default", "def default(self):\n return self._default", "def test_with_default() -> None:\n soup = generate_case(\"with_default\")\n\n tests.html_schema_doc_asserts.assert_default_values(soup, ['\"Linux\"', '[\"white\", \"blue\"]', \"2\"])", "def default(self, value):\n # also easy\n self._default = value\n # all done\n return", "def default():\n raise NotImplementedError(\"Pvwattsv7 default file no longer exists!\")", "def default_value(self):\n if self.default:\n return copy.deepcopy(self.default)\n else:\n return None", "def input_with_default(prompt, default):\n response = raw_input(\"%s (Default %s) \"%(prompt, default))\n if not response:\n return default\n return response", "def default():\n return DefaultPvSamv1.default()", "def default(self, default):\n\n self._default = default", "def default(self, default):\n self._default = default\n return self", "def default_from(self):\n\n return \"\"", "def default_value_scalar(source=None):\n if not default:\n return None\n if not source:\n return default\n else:\n return source", "def default(self) -> Any:\n raise NotImplementedError()", "def default(self):\n # get my default value\n default = self._default\n # if it is still at its trivial value\n if default is schemata.component.default:\n # ask my protocol\n return self.protocol.pyre_default\n # otherwise, return it\n return default", "def setdefault(self, k, d=None): # real signature unknown; restored from __doc__\n pass", "def default(prompt, default, validator=(lambda x: True), hint=None):\n user_input = input(\"{0} [{1}]\".format(prompt, default))\n while not validator(user_input):\n user_input = input(\"{0} [{1}]\".format(prompt, default))\n return user_input or default", "def set_default(self, name, default, group=None):\n opt_info = self._get_opt_info(name, group)\n opt_info['default'] = self._get_enforced_type_value(\n opt_info['opt'], default)\n opt_info['location'] = LocationInfo(\n Locations.set_default,\n _get_caller_detail(3), # this function has a decorator to skip\n )", "def default_input(prompt, default_value):\r\n item = input(prompt + \"[Enter for \" + default_value + \"]: \").lower()\r\n if item == \"\":\r\n item = default_value\r\n return item", "def setdefault(self, value: Any) -> None: # type: ignore\n self.default_factory = value \n return", "def default(self) -> object:\n return self._default", "def _get_default(self):\n if callable(self.default):\n return self.default()\n else:\n return self.default", "def get_default(name, value):\n return os.environ.get('EXAMPLE_{}'.format(name.upper()), value)", "def default(self):\n raise Error(\"Missing mandatory setting:\", self.name)", "def add_default(line, default):\n if \"XUtils\" in default:\n return line\n return line.strip(\";\") + \" = \" + default + \";\"", "def default_arg(request, arg, typ=None, default=None):\n\tif arg not in request.values:\n\t\treturn default\n\tif typ is None:\n\t\treturn request.values[arg]\n\ttry:\n\t\treturn typ(request.values[arg])\n\texcept ValueError:\n\t\treturn default", "def _create_defaults(self):\n return DefaultCommandOptionValues(\n min_confidence=3, output_format='vs7')", "def default_value(number, pow=2):\n return number ** pow", "def setdefault(self, value: Any) -> None:\n self.default_factory = value \n return", "def get_default(field):\n return field.scheme.default is None and SKIP_VALUE or field.scheme.default # noqa", "def default_value(self) -> Optional[Any]:\n return self.get(\"/DV\")", "def getorelse(self, name, default=None):\n try:\n return self._defaults[name]\n except KeyError:\n return default", "def default_values():\n return pad_keys({}, default_basenames())", "def getDefaultName(self): # real signature unknown; restored from __doc__\n pass", "def bad_default(var, default=unknown2): # [undefined-variable]\n print(var, default)\n print(xxxx) # [undefined-variable]\n augvar += 1 # [undefined-variable]\n del vardel # [undefined-variable]", "def _default_value(self, addr, size, name=None, inspect=True, events=True, key=None, **kwargs):\n pass", "def _get_default_arg(args, defaults, arg_index):\n if not defaults:\n return DefaultArgSpec(False, None)\n\n args_with_no_defaults = len(args) - len(defaults)\n\n if arg_index < args_with_no_defaults:\n return DefaultArgSpec(False, None)\n else:\n value = defaults[arg_index - args_with_no_defaults]\n if (type(value) is str):\n value = '\"%s\"' % value\n return DefaultArgSpec(True, value)", "def default_value(self):\r\n if callable(self._default):\r\n return self._default()\r\n return self._default", "def test_default(Class, default_in, default_out):\n attribute = Class(\"test\", default=default_in)\n assert attribute.default == default_out", "def default(self):\n if callable(self._default):\n return self._default()\n\n return self._default", "def get_default(self):\r\n if self.has_default:\r\n if callable(self.default):\r\n return self.default()\r\n else:\r\n return self.default", "def default(self):\r\n if callable(self._default):\r\n return self._default()\r\n\r\n return self._default", "def raw_input_default_config(q, default=None, obj=None):\n if default is None:\n if callable(q['default']):\n f1 = q['default']\n try:\n default = f1(obj)\n except TypeError:\n pass\n else:\n default = q['default']\n if 'ask' in q and not q['ask']:\n return default\n if 'obfuscate' in q and q['obfuscate']:\n return raw_input_default(q['q'], default=default, obfuscate=True)\n else:\n return raw_input_default(q['q'], default=default, obfuscate=False)", "def format_default(reg):\n\t\tif reg.size == \"accum\":\n\t\t\treturn str(float(reg.default)) + \"k\"\n\t\telse:\n\t\t\treturn str(int(reg.default)) + \"L\"", "def override(self, default: Optional[str] = None) -> Optional[str]:\n return self.type_override if self.type_override else default", "def to_string_with_default(value: Any, default_value: str) -> str:\n result = StringConverter.to_nullable_string(value)\n return result if not (result is None) else default_value", "def without_defaults(self):\n ...", "def check_default(self, sec, name, default):\n if default is None:\n raise\n\n # print debug message saying default value was used\n if not default:\n default_text = 'empty string'\n else:\n default_text = default\n\n msg = \"Setting [{}] {} to default value ({})\".format(sec,\n name,\n default_text)\n if self.logger:\n self.logger.debug(msg)\n else:\n print('DEBUG: {}'.format(msg))\n\n # set conf with default value so all defaults can be added to\n # the final conf and warning only appears once per conf item\n # using a default value\n self.set(sec, name, default)", "def default():\n return DefaultGeothermal.default()", "def getdefault(self, option, type=str, default=None):\r\n return self.get(Config.DEFAULT_SECTION, option, type, default=default)", "def as_default_string(string):\n return same_string_type_as(compat.default_string_type(), string)", "def use_defaults(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"use_defaults\")", "def default(self, default):\n\n self._set_field(\"value\", default)", "def default():\n return \"FIRST PROJECT - we have \" + str(len(get_client_rates())) + \" clients in total.\"", "def default_prf():\n par = _par.ParameterFile.from_file(_os.path.dirname(__file__) + '/default_prf.prf')\n return par", "def setdefault(self, key, defval = None):\n if key in self:\n return self[key]\n else:\n self[key] = defval\n return defval", "def validate_default(self, value):\n return self.__validate(value, self.validate_default_element)", "def create_default_identifier():\n return random.randint(0, constants.UINT64_MAX)", "def get(value, default=\"\"):\n if value is None:\n return default\n\n return value", "def default(cls):\n return cls(0b011)", "def fix_default_param(defparam, classname):\n if (classname + '::') == defparam[0:len(classname)+2:]:\n return defparam[len(classname)+2::]\n #if defparam[len(defparam)-1] == \"f\":\n # return defparam[0:len(defparam)-1]\n return defparam", "def default():\n return DefaultTcsMoltenSalt.default()", "def _format_default_value(self, default):\n return json.dumps(default)", "def _get_iterative_default_value():\n return _ITERATIVE_DEFAULT_VALUE", "def default_value(self, val):\n self.set_property(\"DefaultValue\", val)", "def default_kind_val(prop_dict, context=None):\n########################################################################\n if 'type' in prop_dict:\n vtype = prop_dict['type'].lower()\n if vtype == 'real':\n kind = 'kind_phys'\n elif vtype == 'complex':\n kind = 'kind_phys'\n elif FORTRAN_DP_RE.match(vtype) is not None:\n kind = 'kind_phys'\n else:\n kind = ''\n # End if\n else:\n kind = ''\n if 'local_name' in prop_dict:\n lname = ' {}'.format(prop_dict['local_name'])\n else:\n lname = ''\n # End if\n ctxt = context_string(context)\n raise CCPPError('No type to find default kind for {}{}'.format(lname, ctxt))\n # End if\n return kind", "def initial(self):\n from setman import settings\n return getattr(settings, self.name, self.default)", "def bootstrap_default():\n\treturn default_configuration", "def get_default(cls, opt):\n try:\n return cls._OPTS[opt].default\n except KeyError:\n raise ValueError('unknown option name %r' % (opt,))", "def _get_complex_default_value(complex):\n return _COMPLEX_DEFAULT_VALUE" ]
[ "0.77582496", "0.7588288", "0.7476828", "0.7380406", "0.7205692", "0.70597076", "0.6892593", "0.686635", "0.68469757", "0.6833807", "0.67760617", "0.67025155", "0.6625156", "0.65530145", "0.65388095", "0.65117794", "0.65041345", "0.64939255", "0.64847153", "0.64734393", "0.64676666", "0.6452955", "0.6442221", "0.6415938", "0.64078355", "0.63899547", "0.6385692", "0.6370577", "0.63576907", "0.6333686", "0.6330411", "0.62622833", "0.6254882", "0.6254882", "0.62446666", "0.62402004", "0.6232438", "0.6213518", "0.6212321", "0.61951053", "0.61943775", "0.61902225", "0.618168", "0.61804545", "0.6180435", "0.614914", "0.6139393", "0.61300397", "0.61073744", "0.6084751", "0.6083485", "0.60740274", "0.60717857", "0.60698813", "0.60668796", "0.60576135", "0.60320055", "0.6031311", "0.60074127", "0.60031044", "0.59976953", "0.5990526", "0.5987208", "0.5972092", "0.59719497", "0.5971245", "0.5965758", "0.5962356", "0.59578025", "0.5933441", "0.5925196", "0.59233594", "0.59093076", "0.5899052", "0.58921367", "0.58899623", "0.58855844", "0.5882161", "0.58779913", "0.5876189", "0.58761084", "0.58665806", "0.5863123", "0.58426017", "0.5839017", "0.58331", "0.5827793", "0.5825174", "0.5819893", "0.5819513", "0.5815068", "0.5811038", "0.58094794", "0.5799823", "0.57996047", "0.5797022", "0.5796566", "0.57956094", "0.57939136", "0.57915384", "0.57911116" ]
0.0
-1
Helper function for break/clear parsing may be overridden. lookupmodule() translates (possibly incomplete) file or module name into an absolute file name.
def lookupmodule(self, filename): if os.path.isabs(filename) and os.path.exists(filename): return filename f = os.path.join(sys.path[0], filename) if os.path.exists(f) and self.canonic(f) == self.mainpyfile: return f root, ext = os.path.splitext(filename) if ext == '': filename = filename + '.py' if os.path.isabs(filename): return filename for dirname in sys.path: while os.path.islink(dirname): dirname = os.readlink(dirname) fullname = os.path.join(dirname, filename) if os.path.exists(fullname): return fullname return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def lookup_module(filename):\r\n\r\n # stolen from pdb\r\n import os\r\n import sys\r\n\r\n if os.path.isabs(filename) and os.path.exists(filename):\r\n return filename\r\n f = os.path.join(sys.path[0], filename)\r\n if os.path.exists(f): # and self.canonic(f) == self.mainpyfile:\r\n return f\r\n root, ext = os.path.splitext(filename)\r\n if ext == '':\r\n filename = filename + '.py'\r\n if os.path.isabs(filename):\r\n return filename\r\n for dirname in sys.path:\r\n while os.path.islink(dirname):\r\n dirname = os.readlink(dirname)\r\n fullname = os.path.join(dirname, filename)\r\n if os.path.exists(fullname):\r\n return fullname\r\n return None", "def lookup_module(filename):\r\n if filename is None:\r\n return None\r\n\r\n if os.path.isabs(filename) and os.path.exists(filename):\r\n return os.path.realpath(filename)\r\n _, ext = os.path.splitext(filename)\r\n filename_ = filename\r\n if ext == '':\r\n filename_ = filename + '.py'\r\n if os.path.isabs(filename_):\r\n return os.path.realpath(filename_)\r\n for dir_name in sys.path:\r\n if dir_name is not None:\r\n while os.path.islink(dir_name):\r\n dir_name = os.readlink(dir_name)\r\n fullname = os.path.join(dir_name, filename_)\r\n if os.path.exists(fullname):\r\n return fullname\r\n return filename_", "def findModule(name):", "def _parse_module_name(program_param):\n if program_param and program_param.endswith(\".py\"):\n return program_param[:-3]\n return program_param", "def _get_module(self, filename, base):\n if not filename or not filename.endswith('.py'):\n utils._log('Cannot get module for non python-source file: ', filename)\n return '' # only pytnon modules are supported\n base = base or os.path.join(\n self.window.extract_variables().get('project_path', ''),\n self.window.extract_variables().get('project_base_name', ''))\n utils._log('Getting module for file %s relative to base %s' % (filename, base))\n if not filename.startswith(base):\n utils._log('Cannot determine module path outside of directory')\n return ''\n return filename.replace(base, '').replace(os.path.sep, '.')[:-3].strip('.')", "def resolve_name(name):\n parts = name.split('.')\n cursor = len(parts)\n module_name, rest = parts[:cursor], parts[cursor:]\n\n while cursor > 0:\n try:\n ret = __import__('.'.join(module_name))\n break\n except ImportError:\n if cursor == 0:\n raise\n cursor -= 1\n module_name = parts[:cursor]\n rest = parts[cursor:]\n ret = ''\n\n for part in parts[1:]:\n try:\n ret = getattr(ret, part)\n except AttributeError:\n raise ImportError\n\n return ret", "def resolve_relative_name(package, module, relative):\n\n if relative.startswith('.'):\n \n # Add a dummy module onto the end if this is a package. It will be\n # pulled off in the loop below.\n if package == module:\n module += '.dummy'\n \n parts = module.split('.')\n while relative.startswith('.'):\n relative = relative[1:]\n parts.pop(-1)\n relative = '.'.join(parts) + ('.' if relative else '') + relative\n\n return relative", "def _get_module_name(filename: str) -> str:\n return \".\".join(_get_relative(filename).split(os.path.sep)[2:]).replace(\".pyi\", \"\").replace(\".__init__\", \"\")", "def resolve_address(self, offset):\n symbol = self.get_name(offset)\n module = self.get_segment_name(offset)\n\n if not module and \"_\" in symbol:\n # No module name for the segment, try to determine from the symbol name\n symbol_split = symbol.split(\"_\")\n\n # Given a symbol, i.e. ws2_32_WSAStartup, can we find ws2_32.dll in the list of segments?\n for segment in idautils.Segments():\n segment_name = idc.get_segm_name(segment).lower()\n\n if segment_name.startswith(symbol_split[0].lower()):\n new_name = \"\"\n for i in range(0, len(symbol_split)):\n new_name = \"{}.dll\".format(\"_\".join(names[0:i]))\n if new_name == segment_name:\n break\n\n if new_name == segment_name:\n module = new_name\n break\n\n # Still nothing?!\n if not module and \"_\" in symbol:\n symbol_split = symbol.split(\"_\")\n\n j = 1\n if symbol_split[0] == \"ws2\":\n j += 1\n module = \"{}.dll\".format(\"_\".join(symbol_split[0:j]))\n else:\n module = \"{}.dll\".format(symbol_split[0])\n\n # Strip module name from symbol name\n if module:\n module_name = module.split(\".\")[0].lower()\n\n if symbol[:len(module_name)].lower().startswith(module_name):\n symbol = symbol[len(module_name) + 1:]\n\n if not symbol:\n symbol = \"{:x}\".format(offset)\n\n self.ret = (module, symbol)\n return self.ret", "def _resolve_name(name, package, level):\r\n if not hasattr(package, 'rindex'):\r\n raise ValueError(\"'package' not set to a string\")\r\n dot = len(package)\r\n for x in xrange(level, 1, -1):\r\n try:\r\n dot = package.rindex('.', 0, dot)\r\n except ValueError:\r\n raise ValueError(\"attempted relative import beyond top-level \"\r\n \"package\")\r\n return \"%s.%s\" % (package[:dot], name)", "def _resolve_name(path, package, start):\n\n if not hasattr(package, 'rindex'):\n raise ValueError(\"'package' not set to a string\")\n dot = len(package)\n for _ in range(start, 1, -1):\n try:\n dot = package.rindex('.', 0, dot)\n except ValueError:\n raise ValueError(\"attempted relative import beyond top-level \"\n \"package\")\n return \"{}.{}\".format(package[:dot], path)", "def _resolve_name(name, package, level):\n if not hasattr(package, 'rindex'):\n raise ValueError(\"'package' not set to a string\")\n dot = len(package)\n for x in xrange(level, 1, -1):\n try:\n dot = package.rindex('.', 0, dot)\n except ValueError:\n raise ValueError(\"attempted relative import beyond top-level package\")\n return \"%s.%s\" % (package[:dot], name)", "def getMangledName(self, name, module=None):\n if module is os.path:\n return \"os.path\"\n if isinstance(name, str) and (name.startswith(self.start) or name == self.package):\n return self.prefix + name\n return name", "def resolve(fname):\n if os.path.dirname(__file__):\n return os.path.dirname(__file__) + \"/../common/\" + fname\n else:\n return \"/common/\" + fname", "def get_module_name(module_path):\n return ntpath.split(module_path)[1].split(\".\")[0]", "def get_module_name(module_path):\n return ntpath.split(module_path)[1].split(\".\")[0]", "def testFindAASpamAbs(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(self.temp_fake_aa, 'spam')\r\n aaeggs = os.path.join(self.temp_fake_aa, 'eggs.py')\r\n self.assertEqual(expected, modulefinder.get_module_filename('_fake.a.aa.spam', aaeggs))", "def resolve_import(self, item):\n name = item.name\n # The last part in `from a.b.c import d` might be a symbol rather than a\n # module, so we try a.b.c and a.b.c.d as names.\n short_name = None\n if item.is_from and not item.is_star:\n if '.' in name.lstrip('.'):\n # The name is something like `a.b.c`, so strip off `.c`.\n rindex = name.rfind('.')\n else:\n # The name is something like `..c`, so strip off just `c`.\n rindex = name.rfind('.') + 1\n short_name = name[:rindex]\n\n if import_finder.is_builtin(name):\n filename = name + '.so'\n return Builtin(filename, name)\n\n filename, level = convert_to_path(name)\n if level:\n # This is a relative import; we need to resolve the filename\n # relative to the importing file path.\n filename = os.path.normpath(\n os.path.join(self.current_directory, filename))\n\n if not short_name:\n try_filename = True\n try_short_filename = False\n elif item.source:\n # If the import has a source path, we can use it to eliminate\n # filenames that don't match.\n source_filename, _ = os.path.splitext(item.source)\n dirname, basename = os.path.split(source_filename)\n if basename == \"__init__\":\n source_filename = dirname\n try_filename = source_filename.endswith(filename)\n try_short_filename = not try_filename\n else:\n try_filename = try_short_filename = True\n\n files = []\n if try_filename:\n files.append((name, filename))\n if try_short_filename:\n short_filename = os.path.dirname(filename)\n files.append((short_name, short_filename))\n\n for module_name, path in files:\n for fs in self.fs_path:\n f = self._find_file(fs, path)\n if not f or f == self.current_module.path:\n # We cannot import a file from itself.\n continue\n if item.is_relative():\n package_name = self.current_module.package_name\n if package_name is None:\n # Relative import in non-package\n raise ImportException(name)\n module_name = get_absolute_name(package_name, module_name)\n if isinstance(self.current_module, System):\n return System(f, module_name)\n return Local(f, module_name, fs)\n\n # If the module isn't found in the explicit pythonpath, see if python\n # itself resolved it.\n if item.source:\n prefix, ext = os.path.splitext(item.source)\n mod_name = name\n # We need to check for importing a symbol here too.\n if short_name:\n mod = prefix.replace(os.path.sep, '.')\n mod = utils.strip_suffix(mod, '.__init__')\n if not mod.endswith(name) and mod.endswith(short_name):\n mod_name = short_name\n\n if ext == '.pyc':\n pyfile = prefix + '.py'\n if os.path.exists(pyfile):\n return System(pyfile, mod_name)\n elif not ext:\n pyfile = os.path.join(prefix, \"__init__.py\")\n if os.path.exists(pyfile):\n return System(pyfile, mod_name)\n return System(item.source, mod_name)\n\n raise ImportException(name)", "def resolve_name(name):\n parts = name.split('.')\n used = parts.pop(0)\n found = __import__(used)\n for part in parts:\n used += '.' + part\n try:\n found = getattr(found, part)\n except AttributeError:\n __import__(used)\n found = getattr(found, part)\n return found", "def resolve_full_name(base, name, level):\n if level == 0:\n return name\n bits = base.rsplit(\".\", level - 1)\n base = bits[0]\n return f\"{base}.{name}\" if name else base", "def get_mod_name():\n return sys.argv[0].split(\"/\")[-1].split(\".py\")[0]", "def resolve_name(name, *additional_parts):\n additional_parts = \".\".join(additional_parts)\n\n if additional_parts:\n name = name + \".\" + additional_parts\n\n parts = name.split(\".\")\n\n if len(parts) == 1:\n # No dots in the name--just a straight up module import\n cursor = 1\n fromlist = []\n else:\n cursor = len(parts) - 1\n fromlist = [parts[-1]]\n\n module_name = parts[:cursor]\n\n while cursor > 0:\n try:\n ret = __import__(\".\".join(module_name), fromlist=fromlist)\n break\n except ImportError:\n if cursor == 0:\n raise\n cursor -= 1\n module_name = parts[:cursor]\n fromlist = [parts[cursor]]\n ret = \"\"\n\n for part in parts[cursor:]:\n try:\n ret = getattr(ret, part)\n except AttributeError:\n raise ImportError(name)\n\n return ret", "def __get_non_python_library_module_file(module_name, environment=sys.path):\n found = None\n\n # Use the longer paths first\n paths = reversed(sorted(environment))\n for path in paths:\n base_path = path.replace(\"\\\\\", \"/\")\n if stypy_parameters_copy.type_inference_file_directory_name in path:\n base_path = base_path.replace(\"/\" + stypy_parameters_copy.type_inference_file_directory_name, \"\")\n\n temp = base_path + \"/\" + module_name.replace('.', '/') + \".py\"\n if os.path.isfile(temp):\n found = temp\n # Module (__init__) names have precedence over file names\n temp = base_path + \"/\" + module_name.replace('.', '/') + \"/__init__.py\"\n if os.path.isfile(temp):\n found = temp\n break\n if found is None:\n pass\n\n return found", "def infer_module_name(filename, fspath):\n filename, _ = os.path.splitext(filename)\n for f in fspath:\n short_name = f.relative_path(filename)\n if short_name:\n # The module name for __init__.py files is the directory.\n if short_name.endswith(os.path.sep + \"__init__\"):\n short_name = short_name[:short_name.rfind(os.path.sep)]\n return short_name.replace(os.path.sep, '.')\n # We have not found filename relative to anywhere in pythonpath.\n return ''", "def _build_lookup(tree: dict, stdlib_lookup: bool = False) -> None:\n def _apply(item: dict, python_stdlib: set) -> None:\n if item[\"type\"] == \"module\" and item[\"imports\"]:\n package = item[\"fullname\"].partition(\".\")[0]\n for import_module in item[\"imports\"].values():\n import_module[\"lookup\"] = None\n name, level, relative = _get_name_level_relative_import_module(import_module)\n # So we first try to find a module with the expected name in the same directory\n # We look the parent item of the current module\n target = _look_in_package(tree, item[\"path\"], name, level=level)\n if target:\n import_module[\"lookup\"] = target\n else:\n # We now look if a package or module has the same name (within the same package)\n target = find_tree(\n tree,\n lambda x, n, p: (x[\"fullname\"] == n) and (x[\"fullname\"].partition(\".\")[0] == p),\n args=(name, package)\n )\n if target:\n import_module[\"lookup\"] = target[\"path\"]\n elif relative:\n # We haven't found so it might be a symbol imported by a package in __init__.py\n # We don't want to let an internal reference as not found\n import_module[\"lookup\"] = \"@internal\"\n elif name.partition(\".\")[0] == item[\"fullname\"].partition(\".\")[0]:\n # This is in case a module from within the same package has not been found\n # We don't want to let an internal reference as not found\n import_module[\"lookup\"] = \"@internal\"\n else:\n # In last resort, we look for the package in the standard library\n if name in python_stdlib:\n import_module[\"lookup\"] = \"@stdlib\"\n apply_tree(tree, _apply, args=(_build_python_stdlib(stdlib_lookup),))", "def gethandlername(URL):\n match = re.search(\"/([a-zA-Z0-9_-]+)\\.prog($|/|\\?)\", URL)\n if not match:\n # Couldn't find the requested module\n raise404(\"Couldn't find a module name in URL \" + URL)\n return match.group(1)", "def getmodulename(path):\r\n info = getmoduleinfo(path)\r\n if info: return info[0]", "def get_module_reference_name(a_module):\n return a_module.__name__.split('.')[-1]", "def import_module(self, location, name):", "def modulename():\n from inspect import getmodulename,getfile\n return getmodulename(getfile(lambda x:x))", "def get_full_path_of_import(import_module_reference):\n f = inspect.getfile(import_module_reference)\n p = os.path.split(f)\n return p[0]", "def get_module_base_address(pid, module_name):\n address_to_return = None\n h_module_snap = None\n try:\n h_module_snap = kernel32.create_tool_help32snapshot(\n kernel32.TH32CS_SNAPMODULE, pid\n )\n except OSError:\n while kernel32.get_last_error() == ERROR_BAD_LENGTH:\n try:\n h_module_snap = kernel32.create_tool_help32snapshot(\n kernel32.TH32CS_SNAPMODULE, pid\n )\n except OSError:\n pass\n\n if h_module_snap:\n try:\n me32 = kernel32.module32first(h_module_snap)\n while me32:\n if module_name.encode('ascii') == me32.sz_module:\n address_to_return = me32.mod_base_addr\n break\n me32 = kernel32.module32next(h_module_snap, me32)\n kernel32.close_handle(h_module_snap)\n return address_to_return\n except OSError:\n kernel32.close_handle(h_module_snap)\n return None", "def get_module_name_from_entry_point(entry_point):\n if 'module_name' in dir(entry_point):\n return entry_point.module_name\n elif 'module' in dir(entry_point):\n return entry_point.module\n\n raise exception.SysinvException(_(\n \"Module name for entry point {} \"\n \"could not be determined.\".format(entry_point)))", "def test_get_module_qualname_from_path_invalid_path(self):\n\n name = b_utils.get_module_qualname_from_path(\"/a/b/c/d/e.py\")\n self.assertEqual(\"e\", name)", "def find_module_file(base_directory, path):\n return os.path.join(base_directory, path)", "def find_local_module(root, name):\n\n return pm.find_local_module(root, name)", "def normalize_module_name(layer_name):\n modules = layer_name.split('.')\n try:\n idx = modules.index('module')\n except ValueError:\n return layer_name\n del modules[idx]\n return '.'.join(modules)", "def module_name(self):\n return \"py{0:s}\".format(self.library_name[3:])", "def get_module_filename(module) -> Optional[str]:\n if hasattr(module, '__file__'):\n filename = module.__file__\n if not filename or not os.path.exists(filename):\n return None\n\n program_dir = _get_program_dir()\n if filename[:len(program_dir)] == program_dir:\n return filename\n return None", "def edit_url(\n modulename: str, is_package: bool, mapping: Mapping[str, str]\n) -> str | None:\n for m, prefix in mapping.items():\n if m == modulename or modulename.startswith(f\"{m}.\"):\n filename = modulename[len(m) + 1 :].replace(\".\", \"/\")\n if is_package:\n filename = f\"{filename}/__init__.py\".lstrip(\"/\")\n else:\n filename += \".py\"\n return f\"{prefix}{filename}\"\n return None", "def name_from_file(pth = getattr(modules['__main__'], '__file__', 'optimize.default')):\n\treturn '{0:s}'.format(splitext(basename(pth))[0])", "def find_main_module(self):\n\n if self.type == 'passthrough':\n return None\n directory, basename = os.path.split(self.main_module)\n module, ext = os.path.splitext(basename)\n if ext:\n # if the module include the extension, just return its absolute\n # path\n return os.path.join(self.code_dir, self.main_module)\n\n # Otherwise, try to find the proper module, by assuming that there\n # is only one file with such name. Note that this may fail if\n # there are other files such as byte-compiled binaries, etc.\n found = glob.glob(os.path.join(self.code_dir, directory, module+'.*'))\n if not found:\n raise APIException('module not found: {}'\n .format(self.main_module), 400)\n\n return found[0]", "def find_module(modulename, filename=None):\n\n \n full_path = []\n if filename:\n full_path.append(os.path.dirname(os.path.abspath(filename)))\n full_path += sys.path\n fname = imp.find_module(modulename, full_path)\n return fname[1]", "def abs__file__():\r\n for m in sys.modules.values():\r\n if hasattr(m, '__loader__'):\r\n continue # don't mess with a PEP 302-supplied __file__\r\n try:\r\n m.__file__ = os.path.abspath(m.__file__)\r\n except AttributeError:\r\n continue", "def sanitize_module_name(module_name):\n module_name = module_name.replace('-', '_').replace('.', '_')\n if module_name[0] not in string.ascii_letters:\n module_name = \"a\" + module_name\n return module_name", "def resolve(impmod, nameparts):\n if not nameparts:\n return None\n\n m = impmod\n for nname in nameparts:\n m = getattr(m, nname, None)\n if m is None:\n break\n\n return m", "def _get_uml_filename(module_filename) -> str:\n return Path(module_filename).stem", "def lookup(self, name):\n try:\n return self._baseLookup(name)\n except ImportError:\n raise ImportError(\"No module named %r in mapper %r\" % (name, self))", "def module_path():\n return os.path.dirname(unicode(__file__, sys.getfilesystemencoding( )))", "def _find_module(self, name, path, parent=None):\n\n if parent is not None:\n # assert path is not None\n fullname = parent.identifier + '.' + name\n else:\n fullname = name\n\n node = self.findNode(fullname)\n if node is not None:\n self.msg(3, \"find_module: already included?\", node)\n raise ImportError(name)\n\n if path is None:\n if name in sys.builtin_module_names:\n return (None, BUILTIN_MODULE)\n\n path = self.path\n\n return self._find_module_path(fullname, name, path)", "def _find_corresponding_module_for_location(self, location):\r\n try:\r\n return self.descriptor.system.load_item(location)\r\n except ItemNotFoundError:\r\n # The linked problem doesn't exist.\r\n log.error(\"Problem {0} does not exist in this course.\".format(location))\r\n raise\r\n except NoPathToItem:\r\n # The linked problem does not have a path to it (ie is in a draft or other strange state).\r\n log.error(\"Cannot find a path to problem {0} in this course.\".format(location))\r\n raise", "def test_normal_module_name(assert_errors, filename, default_options):\n visitor = WrongModuleNameVisitor(default_options, filename=filename)\n visitor.run()\n\n assert_errors(visitor, [])", "def load_module(module_name):\n try:\n module = resolve_name(module_name)\n except ImportError:\n raise error.NotFound(msg=module_name)\n\n return module", "def _find_module(name, path=None):\n\n parts = name.split('.')\n\n for part in parts:\n if path is not None:\n path = [path]\n\n fh, path, descr = imp.find_module(part, path)\n\n return fh, path, descr", "def module_file(module):\n ...", "def get_module_name(self, module_name):\r\n if module_name in self.aliases:\r\n return self.aliases[module_name]\r\n return module_name", "def get_kmodule(mod, dep_map):\n\n\t_mod = mod\n\tif mod in dep_map: return mod\n\n\tmod = mod.replace('_', '-')\n\tif mod in dep_map: return mod\n\n\traise ValueError, 'unable to get module name \"%s\"' % _mod", "def _get_module(module):\n try:\n return sys.modules[module]\n except KeyError:\n raise ValueError(\n module + \"is not a valid module name or it is not loaded\"\n )", "def get_name(name, file: str) -> str:\n return os.path.basename(file) if name == \"__main__\" else name", "def test_find_module_py33():\n assert find_module_py33('_io') == (None, '_io', False)", "def _get_info(self, fullmodname):\n parts = fullmodname.split('.')\n submodname = parts[-1]\n modpath = '/'.join(parts)\n for suffix, is_package in _SEARCH_ORDER:\n relpath = modpath + suffix\n try:\n self.datablocks[relpath]\n except KeyError:\n pass\n else:\n return submodname, is_package, relpath\n msg = ('Can\\'t find module %s in .blend %r' %\n (fullmodname, self.path_entry))\n ##logging.debug(msg)\n raise BlendImportError(msg)", "def lookup(name):", "def lookup(name):", "def is_module(filename):\n if not os.path.exists(filename):\n return None\n\n if filename.endswith('.py'):\n # Assume the file is a module file\n return PY_MODULEFILE\n regex = re.compile(re.escape(r'#%Module'))\n if regex.search(open(filename).readline()):\n return TCL_MODULEFILE\n return None", "def getfullnameof(mod, xtrapath=None):\n pywin32_paths = []\n if is_win:\n pywin32_paths = [os.path.join(get_python_lib(), 'pywin32_system32')]\n if is_venv:\n pywin32_paths.append(\n os.path.join(base_prefix, 'Lib', 'site-packages',\n 'pywin32_system32')\n )\n\n epath = (sys.path + # Search sys.path first!\n pywin32_paths +\n winutils.get_system_path() +\n compat.getenv('PATH', '').split(os.pathsep))\n if xtrapath is not None:\n if type(xtrapath) == type(''):\n epath.insert(0, xtrapath)\n else:\n epath = xtrapath + epath\n for p in epath:\n npth = os.path.join(p, mod)\n if os.path.exists(npth) and matchDLLArch(npth):\n return npth\n return ''", "def test_get_module_qualname_from_path_abs_syms(self):\n\n name = b_utils.get_module_qualname_from_path(\n os.path.join(\n self.tempdir, \"syms\", \"a\", \"bsym\", \"c\", \"test_typical.py\"\n )\n )\n self.assertEqual(\"syms.a.bsym.c.test_typical\", name)", "def lookup_imports_tree(tree: dict, stdlib_lookup: bool = False) -> None:\n _build_fullname(tree)\n _build_imports(tree)\n _build_lookup(tree, stdlib_lookup)", "def search(cls, name, lookup=[]):\r\n if os.path.isfile(name): return name\r\n for spath in lookup:\r\n fname = os.path.join(spath, name)\r\n if os.path.isfile(fname):\r\n return fname\r\n for ext in cls.extensions:\r\n if os.path.isfile('%s.%s' % (fname, ext)):\r\n return '%s.%s' % (fname, ext)", "def link_to_module(self,name,full_path=True,relpath=None):\n i = self.modules.index(name)\n link = \"#M%d\" % i\n if full_path:\n link = self.html_report() + link\n if relpath:\n link = os.path.relpath(link,relpath)\n return link", "def denormalize_module_name(parallel_model, normalized_name):\n fully_qualified_name = [mod_name for mod_name, _ in parallel_model.named_modules() if\n normalize_module_name(mod_name) == normalized_name]\n if len(fully_qualified_name) > 0:\n return fully_qualified_name[-1]\n else:\n return normalized_name # Did not find a module with the name <normalized_name>", "def module_path():\n from sys import path\n from os import getcwd\n from os.path import basename,exists\n from inspect import getmodulename,getfile\n from logging import warn\n # 'getfile' retreives the source file name name compiled into the .pyc file.\n pathname = getfile(lambda x: None)\n if exists(pathname): return pathname\n # The module might have been compiled on a different machine or in a\n # different directory.\n pathname = pathname.replace(\"\\\\\",\"/\")\n filename = basename(pathname)\n dirs = [dir for dir in [getcwd()]+path if exists(dir+\"/\"+filename)]\n if len(dirs) == 0: warn(\"pathname of file %r not found\" % filename)\n dir = dirs[0] if len(dirs) > 0 else \".\"\n pathname = dir+\"/\"+filename\n return pathname", "def _get_name_level_relative_import_module(import_module: dict) -> Tuple:\n level = None\n name = None\n relative = False\n if import_module[\"type\"] == \"import\":\n # We start with import using only import keyword, it can be an import of the form:\n # import module\n # import package.module\n name = import_module[\"name\"]\n elif import_module[\"type\"] == \"from-import\":\n # Now we deal with from keyword like in:\n # from package import module\n # from module import func\n # from .. import module\n if import_module[\"module\"] is None:\n # This is the case for the following types of imports\n # from . import module (level 1)\n # from .. import module (level 2)\n name = import_module[\"name\"]\n relative = True\n else:\n # This is the case for the following types of imports\n # from .module import func (level 1)\n # from ..module import func (level 2)\n name = import_module[\"module\"]\n level = import_module[\"level\"]\n return name, level, relative", "def getModulePath(*args, moduleName: AnyStr=\"\", **kwargs)->AnyStr:\n pass", "def python_like_mod_finder(import_line, alt_path=None,\r\n stop_token=None):\r\n if stop_token and '.' in stop_token:\r\n stop_token = stop_token.split('.')[-1]\r\n tokens = re.split(r'\\W', import_line)\r\n if tokens[0] in ['from', 'import']:\r\n # find the base location\r\n try:\r\n _, path, _ = imp.find_module(tokens[1])\r\n except ImportError:\r\n if alt_path:\r\n path = osp.join(alt_path, tokens[1])\r\n else:\r\n path = None\r\n if path:\r\n path = osp.realpath(path)\r\n if not tokens[1] == stop_token:\r\n for part in tokens[2:]:\r\n if part in ['import', 'cimport', 'as']:\r\n break\r\n path = osp.join(path, part)\r\n if part == stop_token:\r\n break\r\n # from package import module\r\n if stop_token and not stop_token in path:\r\n for ext in python_like_exts():\r\n fname = '%s%s' % (stop_token, ext)\r\n if osp.exists(osp.join(path, fname)):\r\n return osp.join(path, fname)\r\n # from module import name\r\n for ext in python_like_exts():\r\n fname = '%s%s' % (path, ext)\r\n if osp.exists(fname):\r\n return fname\r\n # if it is a file, return it\r\n if osp.exists(path) and not osp.isdir(path):\r\n return path\r\n # default to the package file\r\n path = osp.join(path, '__init__.py')\r\n if osp.exists(path):\r\n return path", "def linkcode_resolve(domain, info):\n if domain != 'py' or not info['module']:\n return None\n filename = info['module'].replace('.', '/')\n return \"https://github.com/mathcamp/flywheel/blob/%s/%s.py\" % (version_data['ref'], filename)", "def get_name(internal: str):\n if '__' in internal:\n return ': '.join(get_name(s) for s in internal.split('__'))\n *path, name = internal.split('::')\n current = config.utils.names\n look_in = [current]\n try:\n for k in path:\n current = current[k]\n look_in.append(current)\n except KeyError:\n # noinspection PyUnboundLocalVariable\n logging.warning('invalid namespace {!r} of {!r}'.format(k, internal))\n look_in.reverse()\n for ns in look_in:\n try:\n val = ns[name]\n if isinstance(val, str):\n return val\n elif isinstance(val, dict):\n return val['*this*']\n else:\n raise TypeError('{!r} is neither dict nor str'.format(val))\n except KeyError:\n pass\n logging.warning('Name \"{}\" was not found in the namefile'.format('::'.join(path+[name])))\n return '::'.join(path+[name])", "def identifyIfExternalModuleExists(moduleIn, workingDir):\n if moduleIn.endswith('.py'):\n moduleToLoadString = moduleIn[:-3]\n else:\n moduleToLoadString = moduleIn\n workingDirModule = os.path.abspath(os.path.join(workingDir,moduleToLoadString))\n if os.path.exists(workingDirModule + \".py\"):\n moduleToLoadString = workingDirModule\n path, filename = os.path.split(workingDirModule)\n os.sys.path.append(os.path.abspath(path))\n else:\n path, filename = os.path.split(moduleToLoadString)\n if (path != ''):\n abspath = os.path.abspath(path)\n if '~' in abspath:\n abspath = os.path.expanduser(abspath)\n if os.path.exists(abspath):\n os.sys.path.append(abspath)\n else:\n raise IOError('The file \"{}\" provided does not exist!'.format(moduleIn))\n return moduleToLoadString, filename", "def path_for_import(name):\n return os.path.dirname(os.path.abspath(import_module(name).__file__))", "def module_filename(module, filename):\n path, _ = os.path.split(module.__file__)\n return os.path.join(path, filename)", "def command_from_module_name(module_name):\n if module_name == '__main__':\n return os.path.basename(TAUCMDR_SCRIPT)\n return ' '.join(_command_as_list(module_name))", "def resolve(name):\n arg = Path(name)\n return str(arg.resolve())", "def testRelativeImport(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(self.temp_fake_aa, 'spam')\r\n aaeggs = os.path.join(self.temp_fake_aa, 'eggs.py')\r\n self.assertEqual(expected, modulefinder.get_module_filename('spam', aaeggs))", "def _unspecify_name(self, name):\n unspec = None\n path = name.split('.')[0]\n for module in messages.MESSAGES:\n if self._fuzzy_module_name_eq(module, path):\n prefix = module.__name__.split('.')[-1]\n return self._hash_name(prefix + name[len(path)+1:])", "def testRelativeToModule(self):\n # Define modules.\n a = self.DefineModule('a')\n b = self.DefineModule('a.b')\n c = self.DefineModule('a.b.c')\n\n # Define messages.\n A = self.DefineMessage('a', 'A')\n B = self.DefineMessage('a.b', 'B')\n C = self.DefineMessage('a.b.c', 'C')\n D = self.DefineMessage('a.b.d', 'D')\n\n # Find A, B, C and D relative to a.\n self.assertEquals(A, messages.find_definition(\n 'A', a, importer=self.Importer))\n self.assertEquals(B, messages.find_definition(\n 'b.B', a, importer=self.Importer))\n self.assertEquals(C, messages.find_definition(\n 'b.c.C', a, importer=self.Importer))\n self.assertEquals(D, messages.find_definition(\n 'b.d.D', a, importer=self.Importer))\n\n # Find A, B, C and D relative to b.\n self.assertEquals(A, messages.find_definition(\n 'A', b, importer=self.Importer))\n self.assertEquals(B, messages.find_definition(\n 'B', b, importer=self.Importer))\n self.assertEquals(C, messages.find_definition(\n 'c.C', b, importer=self.Importer))\n self.assertEquals(D, messages.find_definition(\n 'd.D', b, importer=self.Importer))\n\n # Find A, B, C and D relative to c. Module d is the same case as c.\n self.assertEquals(A, messages.find_definition(\n 'A', c, importer=self.Importer))\n self.assertEquals(B, messages.find_definition(\n 'B', c, importer=self.Importer))\n self.assertEquals(C, messages.find_definition(\n 'C', c, importer=self.Importer))\n self.assertEquals(D, messages.find_definition(\n 'd.D', c, importer=self.Importer))", "def resolve_module_attr(name):\n hier, attr_name = name.rsplit('.', 1)\n mod = resolve_module(hier)\n attr_val = getattr(mod, attr_name)\n return attr_val", "def module_name_from_filepath(path: str) -> str:\n name = osp.splitext(osp.basename(path))[0]\n if name == '__init__':\n name = osp.basename(osp.dirname(path))\n return name", "def _find_module(model, mod_name):\n for name, module in model.named_modules():\n if name == mod_name:\n return module\n return None", "def _get_name(name):\n if \"::\" in name:\n return name.split(\"::\")[1]\n return name", "def _get_name(name):\n if \"::\" in name:\n return name.split(\"::\")[1]\n return name", "def derive_mod_name(self):\n\n # a) if we're lucky, this is a Fomod install w/ a modname attr\n # TODO: some non-Fomod mods still include an \"info.xml\" file\n if self.has_fomod:\n fname = self.fomod.modname.name\n # fix: the fomod name often includes a version number on the end (like \"Soul Gem Things v1.4.5\")\n vmatch = _version_format.search(fname)\n if vmatch:\n fname = fname[:vmatch.start()].strip()\n\n print(\"fomod found:\")\n print(\" orig:\", self.fomod.modname.name)\n print(\" name:\", fname)\n\n # return self.fomod.modname.name\n return fname\n\n # if not, we'll have to get clever\n\n # b) if the mod includes esp/bsa/etc. files, they're often\n # labeled with the mod's \"real\" name\n bname = os.path.basename\n split = os.path.splitext\n\n # check top 2 levels\n # accumulate names\n _names = []\n ext_re = re.compile(r\".*\\.(es[pm]|bsa)$\")\n for f in filter(lambda s: ext_re.search(s.lower()),\n self.archive_files):\n # if re.search(r\".*\\.(es[pm]|bsa)$\", f.lower()):\n _names.append(split(bname(f))[0])\n\n print(f\"names from esp/bsa ({len(_names)}):\")\n for n in _names:\n print(f\" {n}\")\n\n # c) see if we can figure it out from the archive name;\n # try to ignore the version numbers\n archive_name = self.arc_path.stem\n\n # archives downloaded from the nexus generally have\n # the mod name, then a hyphen followed by the modid, then\n # (optionally) another hyphen and version info\n m = _nexus_archive_name_format.match(archive_name)\n\n if m:\n name = m['name']\n\n # TODO: if we can get the modid, we should be able to look up the mod info on the nexus...though that would of course require writing an async web-request module...\n modid = m['modid']\n ver = m['version']\n\n if name:\n # ==> eventually, this should pull the name from the nexus\n\n # sometimes there's some extra stuff like (redundant)\n # version info on the end of the name\n exm = _extra_stuff.search(name)\n if exm:\n name = name[:exm.start()]\n\n if ver:\n ver = ver.replace(\"-\", \".\")\n\n print(\"Derived from archive name:\")\n print(\" name:\", name)\n print(\" modid:\", modid)\n print(\" version:\", ver)\n return name\n\n return \"\"", "def module_name_from_dir(dirname, err=True, files=None):\r\n if files is None:\r\n files = os.listdir(dirname)\r\n names = [file for file in files\r\n if file.endswith('.so') or file.endswith('.pyd')]\r\n if len(names) == 0 and not err:\r\n return None\r\n elif len(names) == 1:\r\n return os.path.join(dirname, names[0])\r\n else:\r\n raise ValueError(\"More than 1 compiled module in this directory:\" +\r\n dirname)", "def _fullname(obj):\n if obj is None:\n return None\n return _modname(obj, True)", "def lookup_module(id):\n return _registry[id]", "def lookup(match):\n word = match.group(0)\n return symtab[unbase(word)] or word", "def _find_module_path(self, fullname, module_name, search_dirs):\n self.msgin(4, \"_find_module_path <-\", fullname, search_dirs)\n\n # Top-level 2-tuple to be returned.\n path_data = None\n\n # List of the absolute paths of all directories comprising the\n # namespace package to which this module belongs if any.\n namespace_dirs = []\n\n try:\n for search_dir in search_dirs:\n # PEP 302-compliant importer making loaders for this directory.\n importer = pkgutil.get_importer(search_dir)\n\n # If this directory is not importable, continue.\n if importer is None:\n # self.msg(4, \"_find_module_path importer not found\", search_dir)\n continue\n\n # Get the PEP 302-compliant loader object loading this module.\n #\n # If this importer defines the PEP 302-compliant find_loader()\n # method, prefer that.\n if hasattr(importer, 'find_loader'):\n loader, loader_namespace_dirs = importer.find_loader(\n module_name)\n namespace_dirs.extend(loader_namespace_dirs)\n # Else if this importer defines the Python 2-specific\n # find_module() method, fall back to that. Despite the method\n # name, this method returns a loader rather than a module.\n elif hasattr(importer, 'find_module'):\n loader = importer.find_module(module_name)\n # Else, raise an exception.\n else:\n raise ImportError(\n \"Module %r importer %r loader unobtainable\" % (module_name, importer))\n\n # If this module is not loadable from this directory, continue.\n if loader is None:\n # self.msg(4, \"_find_module_path loader not found\", search_dir)\n continue\n\n # Absolute path of this module. If this module resides in a\n # compressed archive, this is the absolute path of this module\n # after extracting this module from that archive and hence\n # should not exist; else, this path should typically exist.\n pathname = None\n\n # If this loader defines the PEP 302-compliant get_filename()\n # method, preferably call that method first. Most if not all\n # loaders (including zipimporter objects) define this method.\n if hasattr(loader, 'get_filename'):\n pathname = loader.get_filename(module_name)\n # Else if this loader provides a \"path\" attribute, defer to that.\n elif hasattr(loader, 'path'):\n pathname = loader.path\n # Else, raise an exception.\n else:\n raise ImportError(\n \"Module %r loader %r path unobtainable\" % (module_name, loader))\n\n # If no path was found, this is probably a namespace package. In\n # such case, continue collecting namespace directories.\n if pathname is None:\n self.msg(4, \"_find_module_path path not found\", pathname)\n continue\n\n # Return such metadata.\n path_data = (pathname, loader)\n break\n # Else if this is a namespace package, return such metadata.\n else:\n if namespace_dirs:\n path_data = (namespace_dirs[0],\n NAMESPACE_PACKAGE(namespace_dirs))\n except UnicodeDecodeError as exc:\n self.msgout(1, \"_find_module_path -> unicode error\", exc)\n # Ensure that exceptions are logged, as this function is typically\n # called by the import_module() method which squelches ImportErrors.\n except Exception as exc:\n self.msgout(4, \"_find_module_path -> exception\", exc)\n raise\n\n # If this module was not found, raise an exception.\n self.msgout(4, \"_find_module_path ->\", path_data)\n if path_data is None:\n raise ImportError(\"No module named \" + repr(module_name))\n\n return path_data", "def private_name(self, module: str, partial_name: Optional[str] = None) -> str:\n # TODO: Support unicode\n if partial_name is None:\n return exported_name(self.module_map[module].rstrip('.'))\n if (module, partial_name) in self.translations:\n return self.translations[module, partial_name]\n if module in self.module_map:\n module_prefix = self.module_map[module]\n elif module:\n module_prefix = module + '.'\n else:\n module_prefix = ''\n actual = exported_name('{}{}'.format(module_prefix, partial_name))\n self.translations[module, partial_name] = actual\n return actual", "def relative_link(current_module: str, target_module: str) -> str:\n if current_module == target_module:\n return \"\"\n return _relative_link(\n current_module.split(\".\")[:-1],\n target_module.split(\".\"),\n )", "def get_mod_name(part_family_name):\n try:\n mod_name = _RM_PART_FAMILY_MAP[part_family_name]\n except KeyError:\n raise KeyError(\"Invalid family name '{}'. \"\n \"Available: {}\".format(part_family_name,\n ','.join(RM_ALL_PART_FAMILY_NAMES)))\n return mod_name", "def get_module_short_name(klass):\n return klass.__module__.rsplit('.', 1)[-1]", "def search_path():\n next_part = relative_to\n for node in name_path:\n # Look for attribute first.\n attribute = getattr(next_part, node, None)\n\n if attribute is not None:\n next_part = attribute\n else:\n # If module, look for sub-module.\n if (next_part is None or\n isinstance(next_part, types.ModuleType)):\n if next_part is None:\n module_name = node\n else:\n module_name = '%s.%s' % (next_part.__name__, node)\n\n try:\n fromitem = module_name.split('.')[-1]\n next_part = importer(module_name, '', '',\n [str(fromitem)])\n except ImportError:\n return None\n else:\n return None\n\n if not isinstance(next_part, types.ModuleType):\n if not (isinstance(next_part, type) and\n issubclass(next_part, (Message, Enum))):\n return None\n\n return next_part" ]
[ "0.6935571", "0.6808267", "0.62503785", "0.620861", "0.60548043", "0.5981422", "0.59169036", "0.58730257", "0.58359164", "0.5832401", "0.58213973", "0.5811231", "0.5761079", "0.5698767", "0.5639836", "0.5639836", "0.56236005", "0.55848193", "0.5536372", "0.5535091", "0.550816", "0.5484442", "0.54712915", "0.5442625", "0.54187393", "0.54092216", "0.5389702", "0.5383725", "0.5377162", "0.53701556", "0.5368243", "0.5313961", "0.5313516", "0.5305095", "0.53020203", "0.5296452", "0.5291877", "0.52809125", "0.5280394", "0.52772826", "0.5268113", "0.5261874", "0.5260472", "0.52587473", "0.52497405", "0.52355534", "0.5230711", "0.5222505", "0.5221379", "0.52153474", "0.5213014", "0.5204913", "0.5199102", "0.51954174", "0.5192526", "0.51785356", "0.51680386", "0.51600736", "0.5156306", "0.51525176", "0.5148818", "0.5148292", "0.5148292", "0.5141493", "0.51373065", "0.5136166", "0.5116678", "0.5116518", "0.5115146", "0.51108205", "0.5101632", "0.50902647", "0.5089106", "0.5083406", "0.5067384", "0.50603527", "0.5050651", "0.5048954", "0.5047585", "0.5041237", "0.50355744", "0.50311124", "0.50246626", "0.5021177", "0.5015568", "0.5012491", "0.5011108", "0.5002769", "0.5002769", "0.500033", "0.4996303", "0.49962947", "0.4995602", "0.49955022", "0.49918935", "0.4977236", "0.49643275", "0.49629483", "0.4959994", "0.49578762" ]
0.7024272
0
Pure implement of heap sort algorithm in Python
def heap_sort(collection): n = len(collection) for i in range(n // 2 - 1, -1, -1): heapify(collection, i, n) for i in range(n - 1, 0, -1): collection[0], collection[i] = collection[i], collection[0] heapify(collection, 0, i) return collection
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def heap_sort(list):\n pass", "def heap_sort(arr):\n if not isinstance(arr, list) or len(arr) == 0:\n return\n for i in range(len(arr) // 2 - 1, -1, -1):\n heapify(arr, i, len(arr))\n for i in range(len(arr) - 1, 0, -1):\n arr[0], arr[i] = arr[i], arr[0]\n heapify(arr, 0, i)", "def heap_sort(arr):\n build_heap(arr)\n for i in range(len(arr) -1 , -1, -1):\n arr[i], arr[0] = arr[0], arr[i]\n \"\"\"Here we swap current element with root as it is already sorted and apply heapify. Index = 0 represents root\"\"\"\n heapify(arr, 0, i)", "def heap_sort(array):\n highest_index = len(array)-1\n Heap.heapify(array, highest_index)\n for end in range(highest_index, 0, -1):\n array[end], array[0] = array[0], array[end]\n Heap.sift_down(array, 0, end-1)", "def heapsort(A):\n \n buildHeap(A)\n for i in range(len(A)-1, 0, -1):\n A[0],A[i] = A[i],A[0]\n heapify(A, 0, i)", "def heap_sort(self, A):\n pass", "def heap_sort(self, A):\n pass", "def heapsort(a):\r\n\r\n build_max_heap(a)\r\n\r\n for i in range(len(a) - 1, 0, -1):\r\n # exchange a[0] with a[i]\r\n a[0], a[i] = a[i], a[0]\r\n a.heap_size = a.heap_size - 1\r\n max_heapify(a, 0)\r\n\r\n a.heap_size = len(a)", "def heap_sort(A):\n build_max_heap(A)\n for i in range(len(A) - 1, 1, -1):\n A[1], A[i] = A[i], A[1]\n A[0] -= 1\n max_heapify(A, 1)\n del A[0]", "def heap_sort(items):\n heapq.heapify(items)\n items[:] = [heapq.heappop(items) for i in range(len(items))]", "def heap_sort(items):\n heapq.heapify(items)\n items[:] = [heapq.heappop(items) for i in range(len(items))]", "def heapsort(array):\n # Turn the entire array into a heap\n heapify(array)\n\n # Repeatedly extract the root from the heap into a sorted sublist\n n = len(array)\n while n > 1:\n array[0], array[n - 1] = array[n - 1], array[0]\n n -= 1\n sift_down(array, 0, n)", "def heapSort(sequence):\n _buildHeap(sequence)\n for i in range(len(sequence) - 1, 0, -1):\n sequence[0], sequence[i] = sequence[i], sequence[0]\n _shiftDown(sequence, i - 1, 0)", "def heapsort(heap):\n aux = heap.tamanio\n for i in range(0, heap.tamanio-1):\n quitar(heap)\n heap.tamanio = aux", "def build_heap(arr):\n for i in range(len(arr)-1, -1, -1):\n down_heapify(arr, len(arr), i)", "def heap_sort(array):\n p = PriorityHeap(min == False)\n n = len(array)\n for i in range(n):\n p.push(array[i], array[i])\n for j in range(n - 1, -1, -1):\n item = p.pop().value\n array[j] = item\n return array", "def heapify(x):\n pass", "def _heapify(self):\n for _ in range(len(self.elements)):\n for i in range(len(self.elements)-1, 0, -1):\n parentPosition = (i-1)/2 # defaults to int i.e. 7/2=3, and 6/2=3\n if parentPosition < 0:\n parentPosition = 0\n \n # change this condition to '>' if coding for max-heap. This is for min-heap.\n if self.elements[i] < self.elements[parentPosition]:\n self.elements[i], self.elements[parentPosition] = self.elements[parentPosition], self.elements[i]", "def heap_sort(self, data, draw, speed):\n \n # building max-heap\n # first index of a non-leaf node → len(data)//2 - 1 \n for i in range(len(data) // 2 - 1, -1, -1):\n self.heapify(data, len(data), i)\n \n # extract elements (remove root and heapify)\n for i in range(len(data)-1, 0, -1):\n \n # swap root with last element\n data[i], data[0] = data[0], data[i]\n \n # heapify root\n self.heapify(data, i, 0)\n draw(data, [\"Orange\" if x == i or x == self.largest else \"#a871e3\" for x in range(len(data))])\n time.sleep(speed)", "def sort(self):\n def siftDown(start, count):\n \"\"\"\n This method tries to swap down the children's of the branch\n given by index 'start', making the lowest.\n \"\"\"\n root = start\n while root * 2 + 1 < count:\n child = root * 2 + 1 # 'child' is the left children of the current node\n if child < count - 1 and self.data[child] > self.data[child + 1]:\n # Verify that right sibling is lower than the left one, if so,\n # let 'child' be the right sibling\n child += 1\n if self.data[root] > self.data[child]:\n # Swap the current child and the parent if the parent is higher than the child\n self.data[root], self.data[child] = self.data[child], self.data[root]\n root = child\n else:\n return\n\n start = self.size / 2 - 1\n end = self.size - 1\n\n # Is this really necessary? If the structure is already ordered by \"heap-way\"...\n while start >= 0:\n # This is necessary to verify that we end-up with a correct min-heap structure,\n # because we can sort the structure at any time and end up with a max-heap.\n siftDown(start, self.size)\n start -= 1\n \n while end > 0:\n # With a 'min-heap' structure, it only takes swapping the first and the\n # \"last\" element in the heap to order it, and then reorder the heap\n # from the beginning to the \"end\"\n self.data[end], self.data[0] = self.data[0], self.data[end]\n siftDown(0, end)\n end -= 1", "def heap_sort(array: list) -> list:\n\n def down_heap(array, k, n):\n \"\"\"Build the heap in array so that largest value is at the root\"\"\"\n new_elem = array[k]\n while k <= n / 2:\n child = 2 * k\n if child < n and array[child] < array[child + 1]:\n child += 1\n if new_elem >= array[child]:\n break\n array[k] = array[child]\n k = child\n array[k] = new_elem\n\n size = len(array)\n for i in range(round(size / 2 - 1), -1, -1):\n down_heap(array, i, size - 1)\n for i in range(size - 1, 0, -1):\n array[i], array[0] = array[0], array[i]\n down_heap(array, 0, i - 1)\n\n return array", "def build_heap(data):\n # The following naive implementation just sorts the given sequence\n # using selection sort algorithm and saves the resulting sequence\n # of swaps. This turns the given array into a heap, but in the worst\n # case gives a quadratic number of swaps.\n #\n # TODO: replace by a more efficient implementation\n swaps = []\n for position in range(len(data)//2, 0, -1):\n curr = position - 1 \n while curr < len(data):\n # print(\"curr:\", curr, data[curr])\n left = 2*curr + 1\n right = 2*curr + 2\n min_index = curr\n if (left<len(data)) and (data[min_index] > data[left]):\n min_index = left\n if (right<len(data)) and (data[min_index] > data[right]):\n min_index = right\n \n if min_index != curr:\n swaps.append((curr, min_index))\n data[curr], data[min_index] = data[min_index], data[curr]\n curr = min_index\n print(data)\n else:\n # print(\"break==>\", data)\n break\n \n # print(data)\n\n return swaps", "def build_heap(arr):\n for i in range((len(arr)//2), -1, -1):\n heapify(arr,index=i, size=len(arr)-1)", "def sort(self,arr):\n\t\tself.heapify(arr)\n\t\tfor i in range(len(arr)-1,0,-1):\n\t\t\t#swap the first and last elements of the heap\n\t\t\tarr[i],arr[0] = arr[0],arr[i]\n\t\t\tself.bubbleDown(arr,0,i)", "def heap_sort_nip(A):\n build_max_heap(A)\n res = []\n for i in range(len(A) - 1, 1, -1):\n res.insert(0, A[1])\n A[1], A[i] = A[i], A[1]\n del A[i]\n A[0] -= 1\n max_heapify(A, 1)\n A[:] = [A[1]] + res", "def build_heap(data):\n size = len(data)\n for i in range(size//2, -1,-1):\n shiftDown(data, i)", "def min_heap(self): \n \n for pos in range(self.size//2, 0, -1): \n self.min_heapify(pos)", "def sort(items):\n heapq.heapify(items)\n items[:] = [heapq.heappop(items) for i in range(len(items))]", "def heap_sort(l):\r\n h = SMinHeap()\r\n for el in l:\r\n h.push(el)\r\n sorted_list = [h.pop() for x in range(len(h.array))]\r\n return sorted_list", "def build_heap(data):\n # The following naive implementation just sorts the given sequence\n # using selection sort algorithm and saves the resulting sequence\n # of swaps. This turns the given array into a heap, but in the worst\n # case gives a quadratic number of swaps.\n #\n data_ = [0] * (len(data) + 1)\n data_[1:] = data\n n = len(data)\n swaps = []\n for i in reversed(range(n // 2 + 1)):\n if i == 0:\n break\n sift_down(data_, i, swaps)\n\n return swaps", "def build_heap(data):\n n = len(data) # elements 0 .. n-1\n swaps = []\n def swap(i, j):\n t = data[i]\n data[i] = data[j]\n data[j] = t\n swaps.append((i,j))\n def sift_down(i):\n # 3-way comparison to restore heap property to i\n new_i = i\n l = left(i); r = right(i)\n if l < n and data[l] < data[new_i]: new_i = l\n if r < n and data[r] < data[new_i]: new_i = r\n if not i == new_i:\n # i did not satsify heap property, swap and carry on down\n swap(i, new_i)\n sift_down(new_i)\n # starting from end, parent of n-1 is first that may break heap condition\n for i in range(parent(n - 1), -1, -1):\n sift_down(i)\n return swaps", "def buildHeap(A):\n n = len(A)\n for i in range(n/2-1, -1, -1):\n heapify(A, i, n)", "def example_eight():\n a = []\n heapq.heappush(a, 5)\n heapq.heappush(a, 3)\n heapq.heappush(a, 7)\n heapq.heappush(a, 4)\n\n assert a[0] == heapq.nsmallest(1, a)[0] == 3\n\n print('Before:', a)\n a.sort()\n print('After: ', a)", "def heapify(seq):\n minheap = [0] + seq\n for i in range(len(seq)//2, 0, -1): #len(seq)//2 -= 1 to index 1\n minHeapify(minheap, i, seq)\n seq[:] = minheap[1:]\n return seq", "def _heapify(self):\n start = self._parent(len(self) - 1)\n for i in range(start, -1, -1):\n self._down_heap(i)", "def sort(array):\n\tn = len(array)\n\theap = heapify(array)\n\tres = [pop(heap) for i in range(n)]\n\treturn res", "def heapify(array, highest_index):\n first = (highest_index-1)//2\n for start in range(first, -1, -1):\n Heap.sift_down(array, start, highest_index)", "def sort_down(self, i):\n while ((i + 1) * 2) <= len(self._heap) + 1:\n mc = self.max_child(i)\n if self._heap[i] < self._heap[mc]:\n tmp = self._heap[i]\n self._heap[i] = self._heap[mc]\n self._heap[mc] = tmp\n i = mc", "def build_heap(data: List[int]) -> List[Tuple[int, int]]:\n swaps: List[Tuple[int, int]] = []\n\n n = len(data)\n start = ceil(n/2) - 1\n for i in range(start, -1, -1):\n swaps = sink_down(i, data, swaps)\n\n return swaps", "def heapsort(iterable):\n queue = []\n\n [heapq.heappush(queue, item) for item in iterable]\n\n return [heapq.heappop(queue) for i in range(len(queue))]", "def heap_sort(alist: list, key=None) -> list:\n newList = List()\n hp = BinaryHeap(func=key)\n\n for item in alist:\n hp.heappush(item)\n\n for _ in range(len(alist)):\n newList.append(hp.heappop())\n\n return newList", "def build_heap(self, arr):\n i = len(arr) // 2\n self.size = len(arr)\n self.heap_list = [-1] + arr[:]\n while i > 0:\n self.percolate_down(i)\n i = i - 1", "def test_heap_sort(self):\n integers = heap_sort(self.actual)\n self.assertEqual(self.expected, integers)", "def Shell_sort(arr):\n\n sub_count = len(arr)//2\n while sub_count > 0:\n for start in range(sub_count):\n gap_insertion_sort(arr, start, sub_count)\n sub_count = sub_count//2", "def heapify(array):\n # Start by sifting down the first parent node\n n = len(array)\n node = (n - 2) // 2\n\n # Sift down all nodes, finishing with the root\n while node >= 0:\n sift_down(array, node, n)\n node -= 1", "def heap_sort(unsorted):\n heap = BinaryMinHeap()\n\n # add items to heap\n for item in unsorted:\n heap.heap_push(item)\n\n # new list for things to go in\n sorted_list = [0] * (heap.get_size())\n\n # add sorted elements\n for position in range(heap.get_size()):\n sorted_list[position] = heap.pop_min()\n return sorted_list", "def sort(self):\n out = []\n while len(self.x) > 0:\n self.x[0], self.x[-1] = self.x[-1], self.x[0]\n out.append(self.x.pop())\n BinaryMaxHeap.max_heapify(self.x, 0)\n\n return out", "def heapSort(lst):\n heap = createEmptyHeap(len(lst), less)\n for n in lst:\n add(heap, n)\n sortedLst = []\n while heap.size > 0:\n sortedLst.append(removeMin(heap))\n return sortedLst", "def build_heap(self):\n n = int((len(self.array) / 2) - 1)\n\n while n >= 0:\n self.heapify_top_down(n)\n n -= 1", "def build_heap(self, A: list):\n self.size = len(A)\n med = (self.size // 2) - 1 #Mid point of array\n for i in range(0, med + 1): #Reverse iteration\n self.heapify(A, med - i) #Reverse iteration", "def heapsort(self) -> Generator[T, None, None]:\n h = [e for e in self.priority_queue]\n while h:\n entry = heapq.heappop(h)[-1][0]\n if entry is not None:\n yield cast(T, entry)", "def heapify(self, A: list, i: int):\n left = 2*i\n right = (2*i) + 1\n heapIndex = i\n\n if self.isMax == True:\n #Max heap\n if left < self.size and A[left] > A[heapIndex]:\n heapIndex = left #New largest\n if right < self.size and A[right] > A[heapIndex]:\n heapIndex = right #New largest\n else:\n #Min heap\n if left < self.size and A[left] < A[heapIndex]:\n heapIndex = left #New smallest\n if right < self.size and A[right] < A[heapIndex]:\n heapIndex = right #New smallest\n \n if heapIndex != i:\n A[i], A[heapIndex] = A[heapIndex], A[i] #Swap heap index with current iteration index\n self.heapify(A, heapIndex)", "def sift_down(heap, start, end):\n # Swap first node with children until no longer smaller.\n i = start\n heaped = False\n while not heaped:\n left = i * 2 + 1\n right = i * 2 + 2\n largest = i\n\n # Find largest of i, left and right\n if left < end and compare(heap[left], heap[largest]) > 0:\n largest = left\n if right < end and compare(heap[right], heap[largest]) > 0:\n largest = right\n\n # If left or right is larger than i, swap and repeat\n if largest == i:\n heaped = True\n else:\n heap[i], heap[largest] = heap[largest], heap[i]\n i = largest", "def heapify(arr, n, i):\n\tlargest = i\n\tl = 2*i +1\n\tr = 2*i + 2\n\n\tif l < n and arr[l] > arr[largest]: # See if left child of root exists and is greater than root\n\t\tlargest = l\n\tif r < n and arr[r] > arr[largest]: # See if right child of root exists and is greater than root\n\t\tlargest = r\n\tif largest != i: # Change root, if needed\n\t\tarr[i], arr[largest] = arr[largest], arr[i]\n\t\theapify(arr, n, largest) # Heapify the root", "def heapify_down(self):\n index = 0\n while self.has_left_child(index):\n smaller_child_index = self.get_left_child_index(index)\n if self.has_right_child(index) and self.get_right_child(index) < self.get_left_child(index):\n smaller_child_index = self.get_right_child_index(index)\n if self.heap[index] < self.heap[smaller_child_index]:\n break\n else:\n self.swap_values(index, smaller_child_index)\n index = smaller_child_index", "def heap_sort_increase(alist):\r\n heap = MaxHeap()\r\n heap.build_heap(alist)\r\n originalSize = heap.size\r\n for i in range(heap.size):\r\n maxVal = heap.items[1]\r\n heap.del_max()\r\n heap.items[originalSize-i] = maxVal\r\n return heap.items[1:originalSize+1]", "def build_max_heap(a):\r\n for i in range(math.floor((len(a) - 1)/2), -1, -1):\r\n max_heapify(a, i)", "def heapdown(h, k):\n\n #put this value in the correct place\n v = h[k]\n\n while 2 * k < len(h):\n\n #assign j to be the left child\n j = 2 * k\n\n #is there a child to the right\n if j + 1 < len(h):\n\n #is the left child smaller than the right child\n if h[j] < h[j+1]:\n j = j + 1\n\n #if v is greater than its larger child\n if v >= h[j]:\n break\n else:\n h[k] = h[j]\n k = j\n\n h[k] = v", "def sift_down(self, start, end):\n i, j = start, 2*start+1\n # Temporary variable to decrease exchange times\n temp = self.heap_list[start]\n # end is equal to len(self.heap_list)-1\n while j <= end:\n # compare left child node with right child node\n if j<end and self.heap_list[j]<self.heap_list[j+1]:\n j += 1\n if temp >= self.heap_list[j]:\n break\n else:\n #self.heap_list[i], self.heap_list[j] = self.heap_list[j], self.heap_list[i]\n self.heap_list[i] = self.heap_list[j]\n i = j\n j = 2*j+1\n self.heap_list[i] = temp", "def heapify(arr, n, i):\n\n largest = i\n l = 2 * i + 1\n r = 2 * i + 2\n\n # see if left child of root exists and is \n # greater than root\n if l < n and arr[largest] < arr[l]:\n largest = l\n \n # see if right child of root exists and is \n # greater than root\n if r < n and arr[largest] < arr[r]:\n largest = r\n \n # change root, if needed\n if largest != i:\n arr[i], arr[largest] = arr[largest], arr[i]\n \n # heapift the root\n heapify(arr, n, largest)", "def build_max_heap(A):\n A.insert(0, len(A))\n for i in range(len(A)//2, 0, -1):\n max_heapify(A, i)", "def minHeapify(Array,i):\n\tleft = 2*i + 1\n\tright = 2*i + 2\n\tlargest = i\n\tif left < len(Array) and Array[left] < Array[i]:\n\t\tlargest = left\n\tif right < len(Array) and Array[right] < Array[largest]:\n\t\tlargest = right\n\tif largest !=i:\n\t\tArray[largest],Array[i] = Array[i],Array[largest]\n\t\tminHeapify(Array,largest)", "def percolate_up(self, i):\n while i // 2 > 0:\n if self.heap_list[i] > self.heap_list[i // 2]:\n tmp = self.heap_list[i // 2]\n self.heap_list[i // 2] = self.heap_list[i]\n self.heap_list[i] = tmp\n i = i // 2", "def sift_up(heap, start, end):\n # Swap last node with parents until no longer greater.\n i = end - 1\n heaped = False\n while i > start and not heaped:\n parent = (i - 1) // 2\n if compare(heap[i], heap[parent]) > 0:\n heap[i], heap[parent] = heap[parent], heap[i]\n i = parent\n else:\n heaped = True", "def testSorting(self):\n hd = HeapDict(size=2)\n hd.push('a', 1)\n hd.push('a', 2)\n hd.push('b', 3)\n hd.push('b', 2)\n self.assertEqual(hd.get_result(), {'a': [2, 1], 'b': [3, 2]})", "def heapify(self, i):\n left = 2*i + 1\n right = 2*i + 2\n #find the smallest element of A[i], A[left], A[right]\n if left < self.n and self.ar[left] < self.ar[i]:\n smallest = left\n else:\n smallest = i\n \n if right < self.n and self.ar[right] < self.ar[i]:\n smallest = right\n else:\n smallest = i\n \n #If smallest is not already the parent then swap\n if smallest != i:\n self.ar[i], self.ar[smallest] = self.ar[smallest], self.ar[i]\n self.heapify(smallest)", "def heapify(self):\r\n if self._size:\r\n start = self._parent(len(self._data)-1) # who'se the last parent?\r\n for index in range(start, -1, -1): # for all parents\r\n self.down_heap(index) # fix your heap\r", "def heappop(heap):\n pass", "def heapify(self, l):\n if not l:\n return\n self.h = [None]\n for i in xrange(0, len(l)):\n self.push(l[i])", "def heapify(self, l):\n if not l:\n return\n self.h = [None]\n for i in xrange(0, len(l)):\n self.push(l[i])", "def build_max_heap(A):\r\n i = int((len(A)-2)//2)\r\n while i >= 0:\r\n max_heapify(A, i)\r\n i -= 1\r\n return A", "def heapify_subtree(x, idx):\n n = len(x)\n n2 = n >> 1\n\n def body_func(state):\n x, idx, _ = state\n largest = largest_plr(x, idx)\n change = largest != idx\n x = lax.cond(change,\n lambda x: vec_swap_entries(x, largest, idx),\n lambda x: x,\n x)\n return x, largest, change\n\n def cond_func(state):\n x, idx, change = state\n return jnp.logical_and(idx < n2, change)\n\n state = x, idx, True\n state = lax.while_loop(cond_func, body_func, state)\n x, idx, change = state\n return x", "def heap_sort(data_list, draw_data, time_value):\n\n # heapifies the list\n for i in range((len(data_list) // 2) - 1, -1, -1):\n heapify(data_list, len(data_list), i, draw_data, time_value)\n\n # draw the heapified list as blue before starting the popping from the heap\n draw_data(data_list, [\"blue\" for i in range(len(data_list))])\n time.sleep(time_value)\n\n for i in range(len(data_list) - 1, 0, -1):\n _swap(data_list, i, 0)\n\n # generate the color list to be visualized\n color_list = [\"red\" for x in range(len(data_list))]\n\n # color the two elements being swapped green\n for x in range(len(color_list)):\n if (x == i) or (x == 0):\n color_list[x] = \"green\"\n\n # visualize the swap and wait the specified amount of time\n draw_data(data_list, color_list)\n time.sleep(time_value)\n\n # heapify the remaining portion of the list\n heapify(data_list, i, 0, draw_data, time_value)\n\n # color the whole list as green after the sort\n draw_data(data_list, [\"green\" for i in range(len(data_list))])", "def heapify(data_list, size, root_index, draw_data, time_value):\n\n # declare and locate the largest index and the children of the root\n largest_index = root_index\n left_index = (2 * root_index) + 1\n right_index = (2 * root_index) + 2\n\n # change the largest if the root is smaller than the left child\n if (left_index < size) and (data_list[root_index] < data_list[left_index]):\n largest_index = left_index\n\n # change the largest if the largest is smaller than the right child\n if (right_index < size) and (data_list[largest_index] < data_list[right_index]):\n largest_index = right_index\n\n # only changes if either the left or right child is larger than the root\n if largest_index != root_index:\n _swap(data_list, root_index, largest_index)\n\n # generate the color list to be visualized\n color_list = [\"red\" for x in range(len(data_list))]\n\n # color the two elements being swapped as blue\n for x in range(len(color_list)):\n if (x == root_index) or (x == largest_index):\n color_list[x] = \"blue\"\n\n # visualize the step and wait for the specified amount of time\n draw_data(data_list, color_list)\n time.sleep(time_value)\n\n # recurse again so that it is a complete heap\n heapify(data_list, size, largest_index, draw_data, time_value)", "def sink_down(index: int, data: List[int], swaps: List[Tuple[int, int]]) -> List[Tuple[int, int]]:\n while index * 2 + 1 < len(data):\n j = index * 2 + 1\n # the other child exist and is smaller than the current one.\n if (j+1 < len(data)) and data[j+1] < data[j]:\n j += 1\n # heap order already satisfied.\n if data[index] <= data[j]:\n return swaps\n else:\n swap(index, j, data)\n swaps.append((index, j))\n index = j\n return swaps", "def heapify (A, idx, maxIdx):\n left = 2*idx+1\n right = 2*idx+2\n if left < maxIdx and A[left] > A[idx]:\n largest = left\n else: \n largest = idx\n if right < maxIdx and A[right] > A[largest]:\n largest = right\n\n if largest != idx:\n A[idx],A[largest] = A[largest],A[idx]\n heapify(A, largest, maxIdx)", "def max_heapify_unrecursive(heap, i):\n while True:\n left_child = left(i)\n right_child = right(i)\n largest = i\n if left_child < len(heap) and heap[left_child] > heap[i]:\n largest = left_child\n if right_child < len(heap) and heap[right_child] > heap[largest]:\n largest = right_child\n if largest == i:\n return\n swap(heap, i, largest)\n i = largest", "def max_heapify(lst, n, root):\n\n largest = root\n l = 2 * root + 1\n r = 2 * root + 2\n\n if l < n and lst[l] > lst[largest]:\n largest = l\n if r < n and lst[r] > lst[largest]:\n largest = r\n if largest != root:\n lst[root], lst[largest] = lst[largest], lst[root]\n max_heapify(lst, n, largest)", "def down_heapify(arr, n, index):\n \n assert(index < n)\n \n left_index = 2 * index + 1\n right_index = 2 * index + 2\n \n max_node_idx = index\n if left_index < n and arr[left_index] > arr[index]:\n max_node_idx = left_index\n \n if right_index < n and arr[right_index] > arr[max_node_idx]:\n max_node_idx = right_index\n \n if max_node_idx != index:\n arr[max_node_idx], arr[index] = arr[index], arr[max_node_idx]\n down_heapify(arr, n, max_node_idx)", "def __init__(self, data, draw, speed):\n self.heap_sort(data, draw, speed)", "def heapify(self, index):\n left = self._left(index)\n right = self._right(index)\n\n if self.is_max:\n # `largest` points to the index of `index`, `left(index)`, `right(index)`\n # with largest value \n largest = (left if left < self.size and \n self.array[left][0] > self.array[index][0] else index)\n\n largest = (right if right < self.size and \n self.array[right][0] > self.array[largest][0] else largest)\n if largest != index:\n self._swap(index, largest)\n self.heapify(largest)\n\n else:\n # `smallest` points to the index of `index`, `left(index)`, `right(index)`\n # with smallest value\n smallest = (left if left < self.size and \n self.array[left][0] < self.array[index][0] else index)\n\n smallest = (right if right < self.size and\n self.array[right][0] < self.array[smallest][0] else smallest)\n if smallest != index:\n self._swap(index, smallest)\n self.heapify(smallest)", "def heapify(self, data, n, i):\n # initialize largest as root\n self.largest = i\n left, right = (2 * i + 1), (2 * i + 2)\n \n # if left child is greater than root\n if left < n and data[self.largest] < data[left]:\n self.largest = left\n \n # if right child is greater than root \n if right < n and data[self.largest] < data[right]:\n self.largest = right\n \n # if root is not largest, swap with largest and continue heapifying \n if self.largest != i:\n data[i], data[self.largest] = data[self.largest], data[i]\n self.heapify(data, n, self.largest)", "def quick_sort(partition_list, low, high):\n if low >= high:\n return\n part_point = get_partition(partition_list, low, high)\n quick_sort(partition_list, low, part_point - 1)\n quick_sort(partition_list, part_point + 1, high)", "def down_heap(array, k, n):\n new_elem = array[k]\n while k <= n / 2:\n child = 2 * k\n if child < n and array[child] < array[child + 1]:\n child += 1\n if new_elem >= array[child]:\n break\n array[k] = array[child]\n k = child\n array[k] = new_elem", "def restructureHeap(self):\n\n self.i = 1\n # Storing the elements that already exist in a temporary list\n tempList = []\n for heapElement in self.heap:\n if heapElement != \"NaN\" :\n tempList.append( heapElement )\n\n # Initializing new heap\n self.heap = [\"NaN\"] * self.noOfElements\n\n # Storing all the elements in the temporary list in a continuous fashion in the new heap\n for element in tempList:\n self.insertElement(element, self.i)", "def example_seven():\n a = []\n heapq.heappush(a, 5)\n heapq.heappush(a, 3)\n heapq.heappush(a, 7)\n heapq.heappush(a, 4)\n\n print(heapq.heappop(a), heapq.heappop(a), heapq.heappop(a), heapq.heappop(a))", "def flotar(heap, indice):\n padre = (indice-1)//2\n while (padre >= 0) and (heap.vector[padre] > heap.vector[indice]):\n heap.vector[padre], heap.vector[indice] = heap.vector[indice], heap.vector[padre]\n indice = padre\n padre = (padre - 1) // 2", "def heapify_up(self):\n index = len(self.heap) - 1\n while self.has_parent(index) and self.get_parent(index) > self.heap[index]:\n self.swap_values(self.get_parent_index(index), index)\n index = self.get_parent_index(index)", "def sift_up(self, i):\n #While the element is not the min value (top) or the second value in the min heap\n while i // 2 > 0:\n # Swap the values if the current value is less than it's parent value\n if self.heap_list[i][0] < self.heap_list[i // 2][0]:\n self.heap_list[i], self.heap_list[i // 2] = self.heap_list[i // 2], self.heap_list[i]\n # Move the index to the parent value (moving up the tree)\n i = i // 2", "def insert(self, k): \r\n self.heap_array.append(k)\r\n\r\n current_index = len(self.heap_array) - 1\r\n while (current_index > 0):\r\n parent_index = ((current_index-1)//2)\r\n\r\n if int(self.heap_array[current_index]) > int(self.heap_array[parent_index]): # if no vialation of the min heap property \r\n return\r\n else: # if heap property is broken then swap the parent and child that are breaking the prop \r\n self.heap_array[parent_index], self.heap_array[current_index] = self.heap_array[current_index], self.heap_array[parent_index]\r\n current_index = parent_index", "def heap_up(self, index):\n # how can we do this recursively?\n parent_node_index = (index - 1)//2\n while self.store[index].key < self.store[parent_node_index].key and index > 0:\n self.swap(index, parent_node_index)\n index = parent_node_index\n parent_node_index = (index - 1)//2\n else:\n return self.store", "def percUp(self, i):\n while i // 2 > 0:\n if self.heapList[i] < self.heapList[i // 2]:\n tmp = self.heapList[i // 2]\n self.heapList[i // 2] = self.heapList[i]\n self.heapList[i] = tmp\n i = i // 2", "def _sift_up(self, i):\n while i > 0:\n p = (i-1)//2\n if self._heap[i] < self._heap[p]:\n self._swap(i, p)\n i = p\n else:\n break", "def construct_heap(self, elems):\n for e in elems:\n self.n += 1\n self.A.append(e)\n self.pos[e[0]] = self.n\n for i in range(self.n // 2, 0, -1):\n self.combine(i)", "def flotar(heap, indice):\n padre = (indice-1)//2\n while(indice > 0) and (heap.vector[padre][0] > heap.vector[indice][0]):\n heap.vector[padre], heap.vector[indice] = heap.vector[indice], heap.vector[padre]\n indice = padre\n padre = (padre-1)//2", "def _upheap(self, node):\n parent = self.parent(node)\n while parent is not None and node.element() < parent.element():\n self._swap(node, parent) # Move node upward while key\n parent = self.parent(node) # smaller than parent's key", "def heapify_up(self, index):\n if index == 0:\n return\n parent_index = int((index - 1) / 2)\n if self.data[index] < self.data[parent_index]:\n self.data[index], self.data[parent_index] = self.data[parent_index], self.data[index]\n self.heapify_up(parent_index)", "def _heapify_after_remove(self,ele):\r\n \r\n if self._chk_left(ele):\r\n left = self._left(ele)\r\n find_small_child = left\r\n # below to find which child has small integer\r\n if self._chk_right(ele):\r\n right = self._right(ele)\r\n if self._data[left] > self._data[right]:\r\n find_small_child = right\r\n \r\n if self._data[find_small_child] < self._data[ele]:\r\n self.swap(ele, find_small_child)\r\n self._heapify_after_remove(find_small_child)", "def minHeap(self):\n for pos in range(self.size // 2, 0, -1):\n self.minHeapify(pos)", "def __sift_up(self, i: int):\n while i > 0:\n parent = (i - 1) // 2\n if self.__heap[i][0] < self.__heap[parent][0]:\n tmp = self.__heap[parent]\n self.__heap[parent] = self.__heap[i]\n self.__heap[i] = tmp\n i = parent" ]
[ "0.82783955", "0.8089301", "0.8086552", "0.8035019", "0.8015355", "0.7985793", "0.7985793", "0.7959204", "0.7954446", "0.7736345", "0.7736345", "0.7707003", "0.77046126", "0.77045786", "0.7695088", "0.7671767", "0.76464075", "0.7600847", "0.75952286", "0.7574346", "0.7545443", "0.7541781", "0.75366426", "0.7519305", "0.7481488", "0.74228895", "0.7352846", "0.7338973", "0.73305243", "0.7313575", "0.7302911", "0.7294413", "0.7264926", "0.7090785", "0.7082337", "0.70403457", "0.7003723", "0.6999349", "0.6986472", "0.6980686", "0.697801", "0.6978004", "0.6973408", "0.6944832", "0.6916832", "0.69090015", "0.6829255", "0.6812486", "0.680352", "0.67788845", "0.67735136", "0.6771239", "0.6759629", "0.6756227", "0.67559", "0.67394155", "0.6736634", "0.67338914", "0.6714776", "0.66929615", "0.6684747", "0.6678961", "0.6676631", "0.6670031", "0.6661225", "0.66452944", "0.6644855", "0.6621191", "0.66083056", "0.66083056", "0.6598982", "0.65854925", "0.6576759", "0.65710723", "0.65561", "0.6551811", "0.6545121", "0.6544766", "0.65340596", "0.6532604", "0.6514214", "0.6508664", "0.6492105", "0.6491998", "0.64914864", "0.6462574", "0.64514583", "0.64427257", "0.64411", "0.6438635", "0.6436966", "0.6430558", "0.64284027", "0.642542", "0.6420424", "0.6410712", "0.64083517", "0.6406544", "0.6402108", "0.6398943" ]
0.7628612
17
Wait n seconds before returning ok
def timeout(n): time.sleep(int(n)) return 'ok', 200
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wait(n=3):\n sleep(n)", "def waitUntilSuccess():", "def functionThatWillTimeOut():\n time.sleep(5)", "def wait_for(test, timeout_seconds=DEFAULT_TIMEOUT):\n start = time.time()\n while True:\n if test():\n return True\n if time.time() - start > timeout_seconds:\n return False\n time.sleep(0.5)", "def wait(delay=2):\n time.sleep(delay)", "def wait_for_time():\n while rospy.Time().now().to_sec() == 0:\n pass", "def wait_for_time():\n while rospy.Time().now().to_sec() == 0:\n pass", "def wait_for_time():\n while rospy.Time().now().to_sec() == 0:\n pass", "def wait_for_time():\n while rospy.Time().now().to_sec() == 0:\n pass", "def wait(wait_time=WAIT_TIME):\n # time.sleep(wait_time)\n pass", "def __wait(min_sec, max_sec):\n time.sleep(randint(min_sec, max_sec))", "def wait(self, timeoout=None, state=\"C-completed\"):", "def wait(self, seconds):\n time.sleep(seconds)", "def wait():\n time.sleep(1)", "def waitFor(self,duration=2):\n time.sleep(duration)\n print('Done waiting for ',duration)\n return", "def WaitForAction(self, action):\n start_time = time.time()\n while time.time() - start_time < 20:\n if action():\n return True\n time.sleep(1)\n\n return False", "def wait(self, ms=None):\r\n util.raiseNotDefined()", "def wait_for_completion(self, timeout=10):\n cur_status = self.runtime_status()\n while cur_status not in ['FAILED', 'KILLED', 'FINISHED']:\n time.sleep(0.2)\n timeout -= 0.2\n cur_status = self.runtime_status()\n if timeout < 0:\n break\n\n return timeout > 0", "def wait(t):\n message = \"WAIT:\" + str(t) + '\\n'\n sock.sendall(message)\n time.sleep(t)\n return", "def assert_timeout(self) -> None:", "def wait_to_succeed(name, namespace, timeout):\n return watch.wait_to_succeed(name=name, namespace=namespace,\n timeout=timeout, group=GROUP, plural=PLURAL,\n version=VERSION)", "def wait(self, timeout=600):\n s = datetime.datetime.now()\n status = json.loads(self.get())\n while status['status'] != 'COMPLETE':\n status = self.get()\n e = datetime.datetime.now()\n if (e - s).seconds > timeout:\n raise RuntimeError('timeout')\n return status", "def do_wait(self):\n pass", "def wait(self, timeout):\n raise NotImplementedError(\n u\"%s: Method not implemented\", self.__class__.__name__)", "def waitUntilFinished():", "def waitUntilFinished():", "def waitUntilFinished():", "def waitUntilFinished():", "def wait_fluently(condition: Callable, timeout: TimeoutType, err_msg: str):\n if timeout is None:\n timeout = 0\n start_time = time.time()\n while True:\n res = condition()\n if res:\n return res\n if time.time() - start_time >= timeout:\n raise TimeoutException(err_msg)\n time.sleep(0.3)", "def sleep(self):\n for i in range(10):\n if cancelled: return False\n time.sleep(1)\n return True", "def wait(self, timeout=None):\n if timeout is None:\n timeout = self.timeout\n started = time.time()\n while True:\n if self.get_ip():\n self.state = State.RUNNING\n return True\n else:\n time.sleep(1)\n if timeout != 0:\n if time.time() - started > timeout:\n return False", "def check_completion(self):\n\n time.sleep(3)\n while self.status == 0:\n pass", "def wait(self, timeout):\n log.info(u\"%s %s(%r)\", self.request_params['method'],\n self.request_params['url'],\n self.request_params['params'])\n req = requests.Request(**self.request_params)\n self.apply_authentication(req)\n return self.session.send(self.session.prepare_request(req),\n timeout=timeout)", "def wait(self):\n time.sleep(0.010)", "def sleep(timeout=1):\n if os.environ.get('JS9_TEST_TIMEOUT'):\n timeout = int(os.environ.get('JS9_TEST_TIMEOUT'));\n time.sleep(timeout)", "def test_timeout(self):\n start = time.time()\n dr = EventualResult(Deferred(), None)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.03)\n # be a little lenient for slow computers:\n self.assertTrue(abs(time.time() - start) < 0.05)", "def wait_forever(self):\r\n while True:\r\n time.sleep(0.5)", "def wait (self, seconds=0.0):\r\n\t\tstart_time = time.time()\r\n\t\twhile time.time() < start_time + seconds:\r\n\t\t\tself.receive()", "def wait(interval):\n time.sleep(interval/1000.0)", "def waitfor(self):\r\n finished = False\r\n while finished == False:\r\n time.sleep(5)\r\n finished = self.isFinished()", "def _wait_for_response(self, millis):\n loops = millis / 10\n while not self.__atresponse_received:\n time.sleep(0.01)\n loops -= 1\n if loops == 0:\n return False\n return True", "def wait(wait_time):\n\n time.sleep(wait_time)", "def cmd_wait(con, run_cmd):\n # May take up to 5 minutes\n sleep(5)\n ret = False\n for _ in range(25):\n try:\n result = con.run(run_cmd, hide=True)\n if result.return_code == 0:\n ret = True\n break\n except (ConnectionError, NoValidConnectionsError):\n sleep(10)\n\n return ret", "def wait():\n pass", "def user_wait(self, duration):\n self.enqueue(lambda t: sleep(duration + int(PY3)))", "def waitForFullExec(annarInterface, id, timeout = -1):\r\n\r\n ret = False\r\n actionState = 0\r\n \r\n if timeout == -1: \r\n \r\n while actionState != 1 and actionState != 2:\r\n ret = annarInterface.checkActionExecState(id)\r\n if ret:\r\n actionState = annarInterface.getActionExecState()\r\n return actionState\r\n \r\n else:\r\n \r\n start_time = time.time()\r\n dur = 0 \r\n while (actionState == 0) and dur < timeout:\r\n dur = time.time() - start_time\r\n ret = annarInterface.checkActionExecState(id)\r\n if ret:\r\n actionState = annarInterface.getActionExecState()\r\n \r\n if(dur >= timeout):\r\n print \"Timeout\"\r\n\r\n return actionState", "def _waitConnected(self):\n if not self.isConnected.wait(5.0): # timeout after 5 seconds\n raise Exception(\"Timed out waiting for connection completion\")", "def wait(self):\n\t\twhile True:\n\t\t\tr1 = self.zaberSend(self.translation[\"hor\"], self.cmd[\"returnStatus\"], data=0)\n\t\t\tr2 = self.zaberSend(self.translation[\"ver\"], self.cmd[\"returnStatus\"], data=0)\n\t\t\tif r1[2] == 0 and r2[2] == 0:\n\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\ttime.sleep(.01)", "def test_wait_for_page_in_timeout(self):\n start_time = datetime.now()\n with self.assertRaises(SpdbError):\n csdb = CacheStateDB(self.config_data)\n ch = csdb.create_page_in_channel()\n\n csdb.wait_for_page_in([\"MY_TEST_KEY1\", \"MY_TEST_KEY2\"], ch, 1)\n\n assert (datetime.now() - start_time).seconds < 3", "def wait(self, sleep_time):\n time.sleep(sleep_time)", "def wait(self, timeout=None):\n assert False, \"Deriving class must implement\"", "def _wait_what(self, expected):\r\n \r\n self._msg_server(cb.WAITWHATSERVER % (expected))", "def timeout(self):\n self._status_update(\"Pyloton: Timeout\")\n time.sleep(3)", "def test_finished_new_keycard_scan_timed_out(self):\n t_info('Creating NewKeycardScan object; setting creation '\n 'time to 1 sec more than the time-out time......', 3)\n new_nks_obj = NewKeycardScan.objects.create(\n rfid='1111111111', assigner_user_id=self.staff_only_user.pk)\n min_till_timeout = 2.0\n time_delta = timedelta(minutes=min_till_timeout, seconds=1)\n fake_time_initiated = (datetime.datetime.now() - time_delta)\n new_nks_obj.time_initiated = fake_time_initiated\n new_nks_obj.save()\n\n t_info('Getting response..........', 3)\n response = self.client.get('/done_scan/%d/' % new_nks_obj.pk)\n\n t_info('Check response status code', 4)\n self.assertEqual(response.status_code, 200)\n\n t_info('Check response content type', 4)\n self.assertEqual(response['content-type'], 'application/json')\n\n t_info('Check response content', 4)\n self.assertEqual(simplejson.loads(response.content)['success'], False)\n # self.assertEqual(simplejson.loads(response.content)['error_mess'],\n # 'Sorry, the system timed out. You have %d minutes to scan the card,\n # then hit 'Done.' ' % default_timeout_minutes)\n self.assertEqual(simplejson.loads(response.content)['error_mess'],\n 'Sorry, the system timed out. You have {} minutes to scan the card, '\n 'then hit \"Done.\" '.format(min_till_timeout))", "def wait_for(self, timeout):\n ready = False\n # Dividing sleep time by 300 instead of 30 double CPU load but cuts\n # IMU timestamp variation from about 20% to less than 1%\n sleep_time = (timeout / 1000.0) / 30\n stop_time = time.monotonic_ns() + (timeout * 1000000.0)\n while not ready and time.monotonic_ns() < stop_time:\n ready = GPIO.input(self.gpio_pin)\n time.sleep(sleep_time)\n return ready", "def wait(self, seconds):\n self.driver.implicitly_wait(seconds)", "def await(self, timeout=None):\n if timeout is not None:\n end_time = time.time() + max(0, timeout)\n else:\n end_time = None\n # Use the same/similar scheme that the python condition class uses.\n delay = 0.0005\n while self.state not in (states.FAILURE, states.SUCCESS):\n time.sleep(delay)\n if end_time is not None:\n remaining = end_time - time.time()\n if remaining <= 0:\n return False\n delay = min(delay * 2, remaining, 0.05)\n else:\n delay = min(delay * 2, 0.05)\n return True", "def wait_for_test_action(test_action, timeout=0.5, **kargs):\n # use decorator 'timeout_decorator'\n @timeout_decorator.timeout(timeout)\n def _wait_for_action():\n while test_action(**kargs):\n pass\n\n # call method with decorator\n _wait_for_action()", "def waitForExec(annarInterface, id, timeout = -1):\r\n\r\n ret = False\r\n actionState = 0\r\n \r\n if timeout == -1:\r\n while actionState == 0:\r\n ret = annarInterface.checkActionExecState(id)\r\n if ret:\r\n actionState = annarInterface.getActionExecState()\r\n return actionState\r\n \r\n else:\r\n \r\n start_time = time.time()\r\n dur = 0 \r\n while (actionState == 0) and dur < timeout:\r\n dur = time.time() - start_time\r\n ret = annarInterface.checkActionExecState(id)\r\n if ret:\r\n actionState = annarInterface.getActionExecState()\r\n \r\n if(dur >= timeout):\r\n print \"Timeout\"\r\n\r\n return actionState", "def wait_15_minutes():\r\n # Waiting:\r\n for i in range(15):\r\n if i == 14:\r\n print(\"1 minute of waiting left.\")\r\n else:\r\n print(\"{} minutes of waiting left.\".format(15 - i))\r\n time.sleep(60)\r\n\r\n # Just in case:\r\n time.sleep(5)\r\n\r\n # After waiting:\r\n print(\"Waiting ended!\")\r\n return None", "def delay():\r\n time.sleep(2)", "def test_long_run_case_that_we_want_to_skip():\n time.sleep(30)\n assert 0", "def test_timeoutReset(self):\n for i in range(3):\n self.circuit_breaker.failure()\n self.time.advance(29.0)\n available29sec = self.circuit_breaker.available()\n self.time.advance(1.1)\n available30sec = self.circuit_breaker.available()\n self.assertEqual((available29sec, available30sec),\n (False, True))", "def await_condition(condition, timeout=2000):\n\n for _ in range(timeout):\n if condition():\n return True\n time.sleep(0.001)\n return False", "def testSessionTimeout(self):\n\n def testTimeout(res):\n self.failUnlessEqual(res.value.args[0], b'404')\n\n def testCBTimeout(res):\n # check for terminate if we expire\n terminate = res[0].getAttribute('type',False)\n self.failUnlessEqual(terminate, 'terminate')\n\n def sendTest():\n sd = self.send()\n sd.addCallback(testCBTimeout)\n sd.addErrback(testTimeout)\n return sd\n\n def testResend(res):\n self.failUnless(res[0].name=='body', 'Wrong element')\n s = self.b.service.sessions[self.sid]\n self.failUnless(s.inactivity==2,'Wrong inactivity value')\n self.failUnless(s.wait==2, 'Wrong wait value')\n return task.deferLater(reactor, s.wait+s.inactivity+1, sendTest)\n\n def testSessionCreate(res):\n self.failUnless(res[0].name=='body', 'Wrong element')\n self.failUnless(res[0].hasAttribute('sid'),'Not session id')\n self.sid = res[0]['sid']\n\n # send and wait\n sd = self.send()\n sd.addCallback(testResend)\n return sd\n\n\n\n BOSH_XML = \"\"\"<body content='text/xml; charset=utf-8'\n hold='1'\n rid='%(rid)i'\n to='localhost'\n route='xmpp:127.0.0.1:%(server_port)i'\n ver='1.6'\n wait='2'\n ack='1'\n inactivity='2'\n xml:lang='en'\n xmlns='http://jabber.org/protocol/httpbind'/>\n \"\"\"% { \"rid\": self.rid, \"server_port\": self.server_port }\n\n return self.proxy.connect(BOSH_XML).addCallbacks(testSessionCreate)", "def timedOut(self):\n return self.result() == TIMEOUT", "def _wait_for_server():\n start_time = time.time()\n\n while True:\n try:\n urllib.request.urlopen('http://localhost:5000/')\n break\n except Exception:\n time.sleep(.1)\n if time.time() - start_time > 2:\n raise", "async def timeout(self, failed: bool = False) -> None:\n raise NotImplementedError()", "def sleep(self, timeout):\n try:\n self._wait_in_process_loop(lambda: (False,None),timeout=timeout)\n except threadprop.TimeoutThreadError:\n pass", "def wait(self, options):\n self.socketIO.wait(seconds=options)", "def wait_for_reply(timeout=3):\n i = 0\n reply = Networking.get_instance().client.get_server_reply()\n while not reply:\n reply = Networking.get_instance().client.get_server_reply()\n time.sleep(1)\n i += 1\n if i > timeout:\n raise TimeoutError\n return reply", "def wait_on_function ( func, func_args, func_wait_value, sleep_time, max_wait ) :\n func_response = func( *func_args )\n while func_response != func_wait_value :\n if max_wait <= 0 :\n break\n time.sleep( sleep_time )\n max_wait -= sleep_time\n func_response = func( *func_args )\n\n return func_response == func_wait_value", "def timeout(self):\n pf.debug(\"TIMEOUT\")\n self.acceptData(TIMEOUT)", "def wait_for(old_html, timeout=60):\n\tstart_time = time.time() \n\twhile time.time() < start_time + timeout: \n\t\tif check_new_page_loaded(old_html): \n\t\t\treturn time.time() - start_time \n\t\telse: \n\t\t\ttime.sleep(0.1) \n\traise Exception('WebPage Load Timeout')", "def ensure_always(test, timeout_seconds=DEFAULT_TIMEOUT):\n start = time.time()\n while True:\n if not test():\n return False\n if time.time() - start > timeout_seconds:\n return True\n time.sleep(0.5)", "def test_success_result(self):\n dr = EventualResult(succeed(123), None)\n self.assertEqual(dr.wait(0.1), 123)", "def sleep(seconds):\r\n time.sleep(seconds)", "def _wait_before_call(self):\n while (dt.datetime.now() - self._last_call_ts) <= dt.timedelta(\n seconds=self.api_timeout\n ):\n time.sleep(0.5)\n self._last_call_ts = dt.datetime.now()", "def SendTimeout(self) -> int:", "def SendTimeout(self) -> int:", "def request_until_succeed(url):\n \n req = urllib2.Request(url)\n success = False\n while success is False:\n try: \n response = urllib2.urlopen(req)\n if response.getcode() == 200:\n success = True\n except Exception, e:\n print e\n time.sleep(5)\n\n return response.read()", "def waitForCompletion(self):\n\n while(json.loads(self.robot.device())['state']!=0):\n time.sleep(0.1)\n continue\n\n return", "def test_timeout_loop(self):\n\n self.assertIsNone(self.state.becomeCandidateTimeout)\n # This should do nothing\n self.state.cancelBecomeCandidateTimeout()\n self.assertIsNone(self.state.becomeCandidateTimeout)\n\n results = self.state.begin()\n\n self.state.resetElectionTimeout()\n self.assertTrue(self.state.electionTimeout > 0.150)\n self.assertTrue(self.state.electionTimeout < 0.350)\n self.assertTrue(isinstance(self.state.becomeCandidateTimeout,\n base.DelayedCall))", "def blip(wait=0.01):\n yield from asyncio.sleep(wait)\n return True", "def test_wait(self):\n skill = create_skill()\n\n expected_response = 'Yes I do, very much'\n\n converser = Thread(target=create_converse_responder(expected_response,\n skill))\n converser.start()\n validator = mock.Mock()\n validator.return_value = True\n is_cancel = mock.Mock()\n is_cancel.return_value = False\n on_fail = mock.Mock()\n\n response = skill._wait_response(is_cancel, validator, on_fail, 1)\n self.assertEqual(response, expected_response)\n converser.join()", "def wait_for_seconds(self, seconds, sleeptime=0.001):\n self.listen_until_return(timeout=seconds, sleeptime=sleeptime)", "def wait(self):\n for _ in range(15):\n time.sleep(10)\n if self.ready:\n break\n else:\n raise RuntimeError('timeout, lease failed to start')", "def process_request(t):\n time.sleep(t)", "def process_request(t):\n time.sleep(t)", "def process_request(t):\n time.sleep(t)", "def wait(self, _id):\n while not self._actions[_id].done:\n sleep(1e-3)", "def wait_for_result(self, timeout=10, interval=0.1):\n end = time.time() + timeout\n while time.time() <= end:\n ev = self.get_event()\n if ev.id == ID_OK:\n return\n elif ev.id in (ID_ERROR, ID_ALARM):\n raise GrblEventError(ev)\n time.sleep(interval)\n raise GrblHostError(\"Timeout\")", "def test_wait_for_predicate_timeout(self):\n predicate_mock = mock.MagicMock(side_effect=[True, True, True])\n with self.assertRaises(TimeoutError):\n train_utils.wait_for_predicate(predicate_mock, num_retries=3)", "def testSleeping(self):\n time.sleep(2 * 60)\n raise AssertionError('Test case should have timed out.')", "def nanny(self): \n while not self.started and not self.failed:\n eventlet.sleep(.1)\n return not self.failed", "def sleep(secs: float = 0.02) -> bool:\n run(asyncio.sleep(secs))\n return True", "def waitTillReachable(self, sleep_per_try_secs=120, timeout=1200):\n elapsed_time = 0\n while elapsed_time < timeout:\n if self.isReachable():\n logger.info(\"Machine pingable. Reconnecting after 30 secs..\")\n time.sleep(30)\n self.connect()\n return True\n else:\n logger.info(\"Machine not yet pingable. Waiting for %s secs before retrying..\" % sleep_per_try_secs)\n time.sleep(sleep_per_try_secs)\n elapsed_time += sleep_per_try_secs\n logger.warning(\"TIMEOUT: Waited for %d secs, but machine still not reachable\" % elapsed_time)\n return False", "def wait_on_job(self, delay=10):\n while self.isJobRunning() == True:\n time.sleep(delay)\n return self.ofile_exists()", "def post(self):\n sleep(pow((self.unit * self.timeout), self.count))", "def sleep(seconds):\n time.sleep(seconds)" ]
[ "0.71733505", "0.7147747", "0.68212134", "0.6718219", "0.6702025", "0.6661689", "0.6661689", "0.6661689", "0.6635981", "0.65762943", "0.65646064", "0.6553791", "0.65345186", "0.649197", "0.6484155", "0.64481616", "0.6447762", "0.6443559", "0.6374297", "0.63630664", "0.62902665", "0.6289753", "0.6286876", "0.62640446", "0.6259149", "0.6259149", "0.6259149", "0.6259149", "0.62353873", "0.62310284", "0.6211044", "0.62073964", "0.6205052", "0.619819", "0.61916703", "0.61909354", "0.61862034", "0.6182978", "0.61767805", "0.61620164", "0.61502457", "0.6138006", "0.6135227", "0.6121755", "0.61116046", "0.6110484", "0.6089428", "0.60718346", "0.60659677", "0.6048276", "0.604236", "0.6039126", "0.60323864", "0.603182", "0.6030877", "0.6029538", "0.6027403", "0.6025515", "0.6020408", "0.60103565", "0.59940344", "0.59884095", "0.59798074", "0.5979255", "0.5968787", "0.59677607", "0.5956349", "0.59399605", "0.591925", "0.5913127", "0.5910324", "0.59081876", "0.5907372", "0.59073126", "0.5904642", "0.59034365", "0.59017664", "0.5900651", "0.58940345", "0.58940345", "0.589147", "0.5877019", "0.5869672", "0.58695084", "0.5858327", "0.585611", "0.5844144", "0.5840994", "0.5840994", "0.5840994", "0.5839802", "0.5839656", "0.58377624", "0.5824524", "0.5823685", "0.58198", "0.581331", "0.58125514", "0.580652", "0.57902443" ]
0.8150463
0
/menu should return information about accesible menu elements
def test_normal(self, fake_app): result = fake_app.get(self.url) assert result.json == { 'menu': [{ 'elements': [{ 'is_active': False, 'name': 'not_logged_home', 'text': 'Home', 'url': '#/' }], 'name': 'Menu' }] }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_menu_items():\n\n pass", "def get_menus():\n\n pass", "def get_all_menu():", "def get_menu(menu_name):\n\n pass", "def present_menu (self, menu, groupName = 'main'):\n \n if not hasattr (cherrypy.request, 'nav'):\n cherrypy.request.nav = {}\n\n if not groupName in cherrypy.request.nav:\n cherrypy.request.nav [groupName] = []\n \n for item in menu.items:\n cherrypy.request.nav [groupName].append (item)", "def menus(self):\r\n return []", "def create_menu():", "def get_menu ( self, object ):\n return self.menu", "def menu(self):\n menu = list()\n \n \n menu.extend([\n {\n 'title': 'Bootstrap Demo',\n 'href': self.request.route_url('bootstrap_demo'),\n 'icon': \"fa fa-twitter-square\"\n },\n {\n 'title': 'Jade Demo',\n 'href': self.request.route_url('jade_demo'),\n 'icon': \"fa fa-indent\"\n },\n ])\n if self.user:\n menu.extend([\n {\n 'title': 'Entities',\n 'icon': \"fa fa-bar-chart\",\n 'dropdown': [\n {\n 'title': 'All entities',\n 'href': self.request.route_url(\n 'entities',\n ext='html',\n _query={\n 'renderer': 'datatable',\n 'options': 'serverside-columnsearch'\n }\n ),\n 'icon': \"fa fa-bar-chart\"},\n {\n 'title': 'CPTs',\n 'href': self.request.route_url(\n 'cpts',\n ext='html',\n _query={\n 'renderer': 'datatable',\n 'options': 'columnsearch'\n }\n ),\n }\n ]\n }\n ]),\n if self.user.has_admin:\n menu.append(\n {\n 'title': \"User Management\",\n 'icon': \"fa fa-users\",\n 'dropdown': [\n {\n 'title': 'User Overview',\n 'href': self.request.route_url(\n 'users',\n ext='html',\n _query={\n 'renderer': 'datatable',\n 'options': 'serverside-columnsearch'\n }\n ),\n 'icon': 'fa fa-users',\n },\n {\n 'title': 'Add User',\n 'href': self.request.route_url('user_create'),\n 'icon': 'fa fa-user-plus',\n }\n ]\n }\n )\n\n return menu", "def get_menu() -> str:\n date = datetime.date.today()\n urls = generate_urls(date)\n menu_json = fetch_menu(urls)\n menu = extract_menu(menu_json, date)\n\n return menu", "def ask_for_full_menu():\n res = requests.get(url='http://127.0.0.1:5000/full_menu')\n return res.text", "def get_menu_items(self):\n url = self.build_url(\"menus/\")\n res = get(url)\n if res.ok:\n return [x[\"item_name\"] for x in res.json()]\n return None", "def menu(self):\n return self._menu", "def get_menu_items(self) -> typing.List[typing.Tuple[str, typing.List[typing.Tuple[str, typing.Callable[[], None]]]]]: #this method is to be queried by the root frame when it is creating the menu bar at the top of the screen and needs options to put in it\n return []", "def main_menu(self):\n return self.sitemap", "def create_menus( self ):", "def test_menu():\n response = app.test_client().get('/v1/resources/menu/all')\n\n assert response.status_code == 200\n assert response.content_type == 'application/json'", "def main_menu(self):\n menu_string = \"Main menu\\n\"\n menu_string += \"\\t1. Modify a list\\n\"\n menu_string += \"\\t2. Grade submenu\\n\"\n menu_string += \"\\t3. Search for something\\n\"\n menu_string += \"\\t4. Get a statistic\\n\"\n menu_string += \"\\t5. Undo/Redo\\n\"\n menu_string += \"\\t0. Exit\\n\"\n stop = False\n\n while not stop:\n command_list = \\\n {'0': self.__no_command,\n '1': self.__modify_submenu,\n '2': self.__grade_submenu,\n '3': self.__search_submenu,\n '4': self.__statistics_submenu,\n '5': self.__undo_submenu\n }\n command = self.__ui_read_command(menu_string)\n\n if command in command_list.keys():\n if command == '0':\n return\n else:\n command_list[command]()\n\n else:\n print(\"Invalid command!\")", "def get_one_menu_option():", "def print_menu():\r\n print(\"==============================================\")\r\n print(\"What do you want to do now? \")\r\n print(\"==============================================\")\r\n print(\"Available options:\")\r\n i = 1\r\n for a in available_actions:\r\n if current_state in a[\"valid_states\"]:\r\n # Only hint about the action if the current state allows it\r\n print(\" %i) %s\" % (i, a[\"description\"]))\r\n i += 1\r\n print()", "def navigate_mainMenu():\r\n msg, flag = \"\", False\r\n try: \r\n 'Click on the main menu item in OMM home page'\r\n \r\n flag = ui_controls.button(get_obj_identifier('mnu_btn'))\r\n if flag:\r\n print \"Main menu icon in home page is clicked\"\r\n\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n return flag, msg", "def menu(self):\n ## This is a DICTIONARY, it's a list with custom index values. Python is cool.\n # Please feel free to change the menu and add options.\n print(\"\\n *** MENU ***\") \n menu = {\"n\": (\"Navigate\", self.nav),\n \"d\": (\"Dance\", self.dance),\n \"o\": (\"Obstacle count\", self.obstacle_count),\n \"s\": (\"Shy\", self.shy),\n \"f\": (\"Follow\", self.follow),\n \"c\": (\"Calibrate\", self.calibrate),\n \"q\": (\"Quit\", self.quit)\n }\n # loop and print the menu...\n for key in sorted(menu.keys()):\n print(key + \":\" + menu[key][0])\n # store the user's answer\n ans = str.lower(input(\"Your selection: \"))\n # activate the item selected\n menu.get(ans, [None, self.quit])[1]()", "def menu(self):\n ## This is a DICTIONARY, it's a list with custom index values. Python is cool.\n # Please feel free to change the menu and add options.\n print(\"\\n *** MENU ***\") \n menu = {\"n\": (\"Navigate\", self.nav),\n \"d\": (\"Dance\", self.dance),\n \"o\": (\"Obstacle count\", self.obstacle_count),\n \"s\": (\"Shy\", self.shy),\n \"f\": (\"Follow\", self.follow),\n \"c\": (\"Calibrate\", self.calibrate),\n \"q\": (\"Quit\", self.quit)\n }\n # loop and print the menu...\n for key in sorted(menu.keys()):\n print(key + \":\" + menu[key][0])\n # store the user's answer\n ans = str.lower(input(\"Your selection: \"))\n # activate the item selected\n menu.get(ans, [None, self.quit])[1]()", "def render(self):\n menu = etree.Element('openbox_pipe_menu')\n \n walk(self.menuItems, menu)\n \n print etree.tostring(menu)", "def getMenuOption():\n return menu_option", "def menu():\n return render_template('menu.html')", "def get_app_menu(self): # real signature unknown; restored from __doc__\n pass", "def accessoriesMenu():\n pref = QtGui.QAction(mw)\n pref.setText(\"Command panel\")\n pref.setObjectName(\"CommandPanel\")\n pref.triggered.connect(onPreferences)\n try:\n import AccessoriesMenu\n AccessoriesMenu.addItem(\"CommandPanel\")\n except ImportError:\n a = mw.findChild(QtGui.QAction, \"AccessoriesMenu\")\n if a:\n a.menu().addAction(pref)\n else:\n mb = mw.menuBar()\n action = QtGui.QAction(mw)\n action.setObjectName(\"AccessoriesMenu\")\n action.setIconText(\"Accessories\")\n menu = QtGui.QMenu()\n action.setMenu(menu)\n menu.addAction(pref)\n\n def addMenu():\n \"\"\"Add accessories menu to the menu bar.\"\"\"\n toolsMenu = mb.findChild(QtGui.QMenu, \"&Tools\")\n if toolsMenu:\n toolsMenu.addAction(action)\n\n addMenu()\n mw.workbenchActivated.connect(addMenu)", "def get_items(self):\n options = \"\"\n for item in self.menu:\n options += f\"{item.name}/\"\n return options", "def createMenu():\n mType = -1\n if auth.is_logged_in() and auth.has_membership('administrador',auth.user.id):\n return menuAdmin\n elif auth.is_logged_in():\n return menuUser\n else:\n return menuPublic", "def addMenu():\n mb.addAction(actionAccessories)\n actionAccessories.setVisible(True)", "def top_menu_items(request):\n return {\n 'top_menu_items': settings.TOP_MENU_ITEMS\n }", "def menu(self):\n ## This is a DICTIONARY, it's a list with custom index values\n # You may change the menu if you'd like to add an experimental method\n menu = {\"n\": (\"Navigate forward\", self.nav),\n \"d\": (\"Dance\", self.dance),\n \"c\": (\"Calibrate\", self.calibrate),\n \"t\": (\"test restore\", self.calibrate),\n \"s\": (\"Check status\", self.status),\n \"q\": (\"Quit\", quit_now)\n }\n # loop and print the menu...\n for key in sorted(menu.keys()):\n print(key + \":\" + menu[key][0])\n # store the user's answer\n ans = raw_input(\"Your selection: \")\n # activate the item selected\n menu.get(ans, [None, error])[1]()", "def loadMenu(self):\r\n show_empty_root_items = pos.config['menu', 'show_empty_root_items']\r\n show_disabled_items = pos.config['menu', 'show_disabled_items']\r\n self.mainToolbook.AssignImageList(pos.menu.il)\r\n \r\n for root in pos.menu.main.items:\r\n if not root.enabled and not show_disabled_items:\r\n continue\r\n enabled_children = [i for i in root.children if i.enabled]\r\n if show_disabled_items:\r\n children = root.children\r\n else:\r\n children = enabled_children\r\n # Hide empty menu root items\r\n if len(children) == 0 and not show_empty_root_items:\r\n continue\r\n page = self.getToolbookPage(children)\r\n self.mainToolbook.AddPage(imageId=root.image, page=page, select=False, text=root.label)\r\n page.Enable(root.enabled)# and len(enabled_children) != 0)\r", "def test_extractMenu(self):\n doc = lunchr.parseHtml(self.html)\n self.assertEquals(self.menu, lunchr.extractMenu(doc))", "def show_menu():\n if not GD.gui.menu.item('Tools'):\n create_menu()", "def do_menu(parser, token):\n bits = token.split_contents()\n return RenderMenuNode()", "def on_menu_connect():\n\n ServerSockets.get_menuitems_by_category(True)", "def GetMenu(self):\n return self._menu", "def get_menu_item(menu_item_name):\n\n pass", "def _MenuAboutToShow(self, menuName):\n menu = self._menus[menuName]\n context = self._contextProvider.GetMenuContext()\n for action in menu.actions():\n if action.isSeparator():\n continue\n actionData = action.data()\n if actionData and isinstance(actionData, MenuAction):\n actionData.Update(action, context)", "def menu_items():\n def show():\n form.show();\n form.activateWindow()\n form.raise_()\n\n lst = []\n lst.append((\"Import Programmableweb\", show))\n \n return tuple(lst)", "def main_menu(self):\n return [SitemapEntry(self.config.options.mount_label.title(), '.')]", "def main_menu(self):\n return [SitemapEntry(self.config.options.mount_label.title(), '.')]", "def publicMenu(restaurant_id):\n\n restaurant = session.query(Restaurant).filter_by(id = restaurant_id).first()\n menuItems = session.query(MenuItem).filter_by(restaurant_id = restaurant_id)\n creator = getUserInfo(restaurant.user_id)\n\n return render_template( 'publicMenu.html',\n menuItems = menuItems,\n restaurant = restaurant,\n creator= creator )", "def get_menu ( self, object, row ):\n return self.menu", "def attributeMenu(*args, beginMenu: bool=True, editor: AnyStr=\"\", finishMenu: bool=True,\n inputs: bool=True, plug: name=None, regPulldownMenuCommand: AnyStr=\"\",\n unregPulldownMenuCommand: int=0, **kwargs)->AnyStr:\n pass", "def getMenuItems(self, context, request):\n\n results = []\n url = context.absolute_url()\n can_tag = not sdct.IStructuredDocument.providedBy(context)\n can_untag = not can_tag\n \n if can_tag:\n results.append(\n { 'title' : \"Mark as a structured document\",\n 'description' : 'Mark the content as a structured document',\n 'action' : \"%s/@@sd.tagging\" % url,\n 'selected' : False,\n 'icon' : u\"\",\n 'extra' : {'id': 'sd_tag',\n 'separator': None,\n 'class': ''\n },\n 'submenu' : None,\n }\n )\n else:\n layout = context.getLayout()\n results.append(\n { 'title' : \"Remove structured document options\",\n 'description' : 'Restore the content normal behavior',\n 'action' : \"%s/@@sd.untagging\" % url,\n 'selected' : False,\n 'icon' : u\"\",\n 'extra' : {'id': 'sd_untag',\n 'separator': None,\n 'class': ''\n },\n 'submenu' : None,\n }\n )\n \n results.append(\n { 'title' : \"Document on one page\",\n 'description' : 'Change the display of the document',\n 'action' : (\"%s/@@sd.options?layout=@@sd.document.onepage\"\n % url),\n 'selected' : layout == '@@sd.document.onepage',\n 'icon' : u\"\",\n 'extra' : {'id': 'sd_document_onepage',\n 'separator': 'actionSeparator',\n 'class': ''\n },\n 'submenu': None,\n }\n )\n\n return results", "def menu():\n user_id = session[\"user_id\"]\n if not user_id:\n session.clear()\n redirect(\"/\")\n database = db.db_connect()\n user = g.user\n return render_template(\"menu.html\", username=user[\"username\"])", "def Infor_menu():\n \n import sys\n d = ''\n msg = '' \n while d == '':\n print('\\nINFORMATION MENU')\n print('1. Display coordinate sequence')\n print('2. Display SEQRES sequence')\n print('3. Display Alignment sequence')\n print('4. Display all non-water ligands in the protein(if any)')\n print('q. Quit')\n option = input('Select an option: ')\n if option.lower() == 'q':\n sys.exit()\n elif option == '1':\n msg = 'Option 1'\n d = display_cord_seq()\n elif option == '2':\n msg = 'Option 2'\n d = display_seqres_seq()\n elif option == '3':\n msg = 'Option 3'\n d = display_algn_seq()\n elif option == '4':\n msg = 'Option 4'\n d = display_all_nonwater_L()\n else:\n print ('Invalid selection!')\n return msg, d", "def __admin_menu(self):\n log.debug(\"Displaying __admin_menu\")\n self.menu = TelegramMenu(\"config/comunda_admin_menu.bpmn\", self, \"MenuStart\")\n self.menu.admin_menu(\"MenuStart\", \"menu_admin_main_txt\")\n return", "def menu(self):\n ## This is a DICTIONARY, it's a list with custom index values. Python is cool.\n # Please feel free to change the menu and add options.\n print(\"\\n *** MENU ***\") \n menu = {\"c\": (\"Calibrate\", self.calibrate),\n \"d\": (\"Dance\", self.dance),\n \"h\": (\"Hold position\", self.hold_position),\n \"n\": (\"Navigate\", self.nav),\n \"o\": (\"Obstacle count\", self.obstacle_count),\n \"q\": (\"Quit\", self.quit),\n \"v\": (\"Veer\", self.slither)\n }\n # loop and print the menu...\n for key in sorted(menu.keys()):\n print(key + \":\" + menu[key][0])\n # store the user's answer\n ans = str.lower(input(\"Your selection: \"))\n # activate the item selected\n menu.get(ans, [None, self.quit])[1]()", "def getMenuItem(self, event):\n return self.GetMenuBar().FindItemById(event.GetId())", "def getMenu(self, name):\n if self.__object is not None:\n return self.__object.getMenu(name)\n else:\n return None", "def get_menus(id, api_key=API_KEY):\n path = \"/restaurant/\" + str(id) + \"/menuitems\"\n\n return request(API_HOST, path, api_key, url_params=None)['result']['data']", "def menu():\n logout_user()\n return render_template('menu.html')", "def display_menu(self):\n return ', '.join(menu.name for menu in self.menu.all()[:3])", "def showMenu():\n print( \"1. Create New User\" )\n print( \"2. Authorize\" )\n print( \"3. Send SMS\" )\n print( \"4. Send Email\" )\n print( \"5. Get Recently Sent Message\" )\n print( \"6. Exit\" )", "def navigate_mainMenu_settings():\r\n msg, flag = \"\", False\r\n try:\r\n 'click on home main menu button'\r\n flag1 = navigate_mainMenu()\r\n\r\n 'Click on the settings item in the list generated from OMM home page -> main menu'\r\n flag2 = ui_controls.button(get_obj_identifier('home_mainMenu_settings_lnk'))\r\n flag = flag1 and flag2\r\n\r\n if flag:\r\n print \"settings in the home page -> main menu button is clicked\"\r\n\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n return flag, msg", "def navigation_menu(context):\n try:\n request = context['request']\n request_path = request.path\n except KeyError:\n request_path = ''\n return {\n 'shop_nav_enabled': settings.SHOP_NAV_ENABLED,\n 'request_path': request_path,\n }", "def OutputMenuItems():\r\n print('''\r\n Menu of Options\r\n 1) Show current data\r\n 2) Add a new item.\r\n 3) Save Data to File\r\n 4) Exit Program\r\n ''')\r\n print() # Add an extra line for looks\r", "def menu(request):\n user = request.user\n # fase = Fase.objects.filter(id_Proyecto__id_proyecto=1).order_by('id_Fase')\n\n\n if( user.usuario.esta_aprobado):\n if user.has_perm('gestion.es_administrador'):\n return render(request,'Menu/MenuAdministrador.html')\n else:\n return render(request, 'Menu/Menu.html')\n else:\n registrarAuditoria(request.user ,'Inicio Menu en espera de aprobacion')\n return render(request, 'Menu/MenuEnEspera.html')", "def initMenu(self, menu):\n menu.clear()\n \n self.subMenus = []\n \n adminMenu = QMenu(self.tr(\"Administration\"), menu)\n adminMenu.setTearOffEnabled(True)\n adminMenu.addAction(self.gitShowConfigAct)\n adminMenu.addAction(self.gitRepoConfigAct)\n adminMenu.addSeparator()\n adminMenu.addAction(self.gitReflogBrowserAct)\n adminMenu.addSeparator()\n adminMenu.addAction(self.gitCreateIgnoreAct)\n adminMenu.addSeparator()\n adminMenu.addAction(self.gitCreateArchiveAct)\n adminMenu.addSeparator()\n adminMenu.addAction(self.gitStatisticsAct)\n adminMenu.addAction(self.gitVerifyAct)\n adminMenu.addAction(self.gitHouseKeepingAct)\n self.subMenus.append(adminMenu)\n \n bundleMenu = QMenu(self.tr(\"Bundle Management\"), menu)\n bundleMenu.setTearOffEnabled(True)\n bundleMenu.addAction(self.gitBundleAct)\n bundleMenu.addSeparator()\n bundleMenu.addAction(self.gitBundleVerifyAct)\n bundleMenu.addAction(self.gitBundleListHeadsAct)\n bundleMenu.addSeparator()\n bundleMenu.addAction(self.gitBundleApplyFetchAct)\n bundleMenu.addAction(self.gitBundleApplyPullAct)\n self.subMenus.append(bundleMenu)\n \n patchMenu = QMenu(self.tr(\"Patch Management\"), menu)\n patchMenu.setTearOffEnabled(True)\n patchMenu.addAction(self.gitCheckPatchesAct)\n patchMenu.addAction(self.gitApplyPatchesAct)\n patchMenu.addSeparator()\n patchMenu.addAction(self.gitShowPatcheStatisticsAct)\n self.subMenus.append(patchMenu)\n \n bisectMenu = QMenu(self.tr(\"Bisect\"), menu)\n bisectMenu.setTearOffEnabled(True)\n bisectMenu.addAction(self.gitBisectStartAct)\n bisectMenu.addAction(self.gitBisectStartExtendedAct)\n bisectMenu.addSeparator()\n bisectMenu.addAction(self.gitBisectGoodAct)\n bisectMenu.addAction(self.gitBisectBadAct)\n bisectMenu.addAction(self.gitBisectSkipAct)\n bisectMenu.addSeparator()\n bisectMenu.addAction(self.gitBisectResetAct)\n bisectMenu.addSeparator()\n bisectMenu.addAction(self.gitBisectLogBrowserAct)\n bisectMenu.addSeparator()\n bisectMenu.addAction(self.gitBisectCreateReplayAct)\n bisectMenu.addAction(self.gitBisectEditReplayAct)\n bisectMenu.addAction(self.gitBisectReplayAct)\n self.subMenus.append(bisectMenu)\n \n tagsMenu = QMenu(self.tr(\"Tags\"), menu)\n tagsMenu.setIcon(UI.PixmapCache.getIcon(\"vcsTag.png\"))\n tagsMenu.setTearOffEnabled(True)\n tagsMenu.addAction(self.vcsTagAct)\n tagsMenu.addAction(self.gitTagListAct)\n tagsMenu.addAction(self.gitDescribeTagAct)\n self.subMenus.append(tagsMenu)\n \n branchesMenu = QMenu(self.tr(\"Branches\"), menu)\n branchesMenu.setIcon(UI.PixmapCache.getIcon(\"vcsBranch.png\"))\n branchesMenu.setTearOffEnabled(True)\n branchesMenu.addAction(self.gitBranchAct)\n branchesMenu.addSeparator()\n branchesMenu.addAction(self.gitBranchListAct)\n branchesMenu.addAction(self.gitMergedBranchListAct)\n branchesMenu.addAction(self.gitNotMergedBranchListAct)\n branchesMenu.addAction(self.gitShowBranchAct)\n branchesMenu.addSeparator()\n branchesMenu.addAction(self.gitDeleteRemoteBranchAct)\n self.subMenus.append(branchesMenu)\n \n changesMenu = QMenu(self.tr(\"Manage Changes\"), menu)\n changesMenu.setTearOffEnabled(True)\n changesMenu.addAction(self.gitUnstageAct)\n changesMenu.addAction(self.vcsRevertAct)\n changesMenu.addAction(self.vcsMergeAct)\n changesMenu.addAction(self.gitCommitMergeAct)\n changesMenu.addAction(self.gitCancelMergeAct)\n \n remotesMenu = QMenu(self.tr(\"Remote Repositories\"), menu)\n remotesMenu.setTearOffEnabled(True)\n remotesMenu.addAction(self.gitRemotesShowAct)\n remotesMenu.addAction(self.gitRemoteShowAct)\n remotesMenu.addSeparator()\n remotesMenu.addAction(self.gitRemoteAddAct)\n remotesMenu.addAction(self.gitRemoteRenameAct)\n remotesMenu.addAction(self.gitRemoteChangeUrlAct)\n remotesMenu.addAction(self.gitRemoteCredentialsAct)\n remotesMenu.addAction(self.gitRemoteRemoveAct)\n remotesMenu.addAction(self.gitRemotePruneAct)\n \n cherrypickMenu = QMenu(self.tr(\"Cherry-pick\"), menu)\n cherrypickMenu.setIcon(UI.PixmapCache.getIcon(\"vcsGraft.png\"))\n cherrypickMenu.setTearOffEnabled(True)\n cherrypickMenu.addAction(self.gitCherryPickAct)\n cherrypickMenu.addAction(self.gitCherryPickContinueAct)\n cherrypickMenu.addAction(self.gitCherryPickQuitAct)\n cherrypickMenu.addAction(self.gitCherryPickAbortAct)\n \n stashMenu = QMenu(self.tr(\"Stash\"), menu)\n stashMenu.setTearOffEnabled(True)\n stashMenu.addAction(self.gitStashAct)\n stashMenu.addSeparator()\n stashMenu.addAction(self.gitStashBrowserAct)\n stashMenu.addAction(self.gitStashShowAct)\n stashMenu.addSeparator()\n stashMenu.addAction(self.gitStashApplyAct)\n stashMenu.addAction(self.gitStashPopAct)\n stashMenu.addSeparator()\n stashMenu.addAction(self.gitStashBranchAct)\n stashMenu.addSeparator()\n stashMenu.addAction(self.gitStashDropAct)\n stashMenu.addAction(self.gitStashClearAct)\n \n submodulesMenu = QMenu(self.tr(\"Submodules\"), menu)\n submodulesMenu.setTearOffEnabled(True)\n submodulesMenu.addAction(self.gitSubmoduleAddAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesInitAct)\n submodulesMenu.addAction(self.gitSubmodulesUpdateInitAct)\n submodulesMenu.addAction(self.gitSubmodulesDeinitAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesUpdateAct)\n submodulesMenu.addAction(self.gitSubmodulesUpdateRemoteAct)\n submodulesMenu.addAction(self.gitSubmodulesUpdateOptionsAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesSyncAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesListAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesStatusAct)\n submodulesMenu.addAction(self.gitSubmodulesSummaryAct)\n \n act = menu.addAction(\n UI.PixmapCache.getIcon(\n os.path.join(\"VcsPlugins\", \"vcsGit\", \"icons\", \"git.png\")),\n self.vcs.vcsName(), self._vcsInfoDisplay)\n font = act.font()\n font.setBold(True)\n act.setFont(font)\n menu.addSeparator()\n \n menu.addAction(self.gitFetchAct)\n menu.addAction(self.gitPullAct)\n menu.addSeparator()\n menu.addAction(self.vcsCommitAct)\n menu.addAction(self.gitPushAct)\n menu.addSeparator()\n menu.addMenu(changesMenu)\n menu.addMenu(stashMenu)\n menu.addSeparator()\n menu.addMenu(cherrypickMenu)\n menu.addSeparator()\n menu.addMenu(bundleMenu)\n menu.addMenu(patchMenu)\n menu.addSeparator()\n menu.addMenu(remotesMenu)\n menu.addMenu(submodulesMenu)\n menu.addSeparator()\n menu.addMenu(tagsMenu)\n menu.addMenu(branchesMenu)\n menu.addSeparator()\n menu.addAction(self.gitLogBrowserAct)\n menu.addSeparator()\n menu.addAction(self.vcsStatusAct)\n menu.addSeparator()\n menu.addAction(self.vcsDiffAct)\n menu.addAction(self.gitExtDiffAct)\n menu.addSeparator()\n menu.addAction(self.vcsSwitchAct)\n menu.addSeparator()\n menu.addMenu(bisectMenu)\n menu.addSeparator()\n menu.addAction(self.vcsCleanupAct)\n menu.addSeparator()\n menu.addAction(self.vcsCommandAct)\n menu.addSeparator()\n menu.addMenu(adminMenu)\n menu.addSeparator()\n menu.addAction(self.gitEditUserConfigAct)\n menu.addAction(self.gitConfigAct)\n menu.addSeparator()\n menu.addAction(self.vcsNewAct)\n menu.addAction(self.vcsExportAct)", "def _createDisplayMenu(ned, menu):\n pass", "def menu(*args, allowOptionBoxes: bool=True, defineTemplate: AnyStr=\"\", deleteAllItems:\n bool=True, docTag: Union[AnyStr, bool]=\"\", enable: bool=True, exists: bool=True,\n familyImage: Union[AnyStr, bool]=\"\", helpMenu: bool=True, itemArray: bool=True, label:\n Union[AnyStr, bool]=\"\", ltVersion: Union[AnyStr, bool]=\"\", mnemonic: Union[AnyStr,\n bool]=\"\", numberOfItems: bool=True, parent: AnyStr=\"\", postMenuCommand: Script=None,\n postMenuCommandOnce: bool=True, scrollable: bool=True, tearOff: bool=True,\n useTemplate: AnyStr=\"\", version: Union[AnyStr, bool]=\"\", visible: bool=True, q=True,\n query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def getMenuNames(self):\n if self.__object is not None:\n return list(self.__menus.keys())\n else:\n return []", "def menuItem(*args, allowOptionBoxes: bool=True, annotation: Union[AnyStr, bool]=\"\", boldFont:\n bool=False, checkBox: bool=True, collection: Union[AnyStr, bool]=\"\", command:\n Union[Script, bool]=None, data: Union[int, bool]=0, defineTemplate: AnyStr=\"\",\n divider: bool=True, dividerLabel: Union[AnyStr, bool]=\"\", docTag: Union[AnyStr,\n bool]=\"\", dragDoubleClickCommand: Union[Script, bool]=None, dragMenuCommand:\n Union[Script, bool]=None, echoCommand: bool=True, enable: bool=True,\n enableCommandRepeat: bool=True, exists: bool=True, familyImage: Union[AnyStr,\n bool]=\"\", image: Union[AnyStr, bool]=\"\", imageOverlayLabel: Union[AnyStr, bool]=\"\",\n insertAfter: AnyStr=\"\", isCheckBox: bool=True, isOptionBox: bool=True,\n isRadioButton: bool=True, italicized: bool=False, label: Union[AnyStr, bool]=\"\",\n longDivider: bool=True, ltVersion: Union[AnyStr, bool]=\"\", optionBox: bool=True,\n optionBoxIcon: Union[AnyStr, bool]=\"\", parent: AnyStr=\"\", postMenuCommand:\n Union[Script, bool]=None, postMenuCommandOnce: bool=True, radialPosition:\n Union[AnyStr, bool]=\"\", radioButton: bool=True, runTimeCommand: AnyStr=\"\",\n sourceType: Union[AnyStr, bool]=\"\", subMenu: bool=True, tearOff: bool=True,\n useTemplate: AnyStr=\"\", version: Union[AnyStr, bool]=\"\", visible: bool=True,\n q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def print_menu():\r\n\r\n print('Menu\\n\\n[l] load Inventory from file\\n[a] Add CD\\n[i] Display Current Inventory')\r\n print('[d] Delete CD from Inventory\\n[s] Save Inventory to file\\n[x] exit\\n')", "def print_menu():\r\n\r\n print('Menu\\n\\n[l] load Inventory from file\\n[a] Add CD\\n[i] Display Current Inventory')\r\n print('[d] delete CD from Inventory\\n[s] Save Inventory to file\\n[x] exit\\n')", "def menu_inicial():\n clear_window()\n items = [\"Juego Nuevo\", \"Acerca de\", \"Salir\"]\n while True:\n show_title(\"____ Menu Inicial ____\")\n item = show_menu(items)\n clear_window()\n if item == 0 :\n juego_nuevo()\n clear_window()\n elif item==1 :\n mostrar_acerca_de()\n clear_window()\n elif item==2 :\n return\n else:\n print \"Opcion invalida\"", "def __init__(self, menu_name):\n self.menu_name = menu_name", "def checkMenuItem(self):\r\n self.eventID, self.parameter, res = self.receiver.getMenuItem()\r\n\r\n return res", "def check_menu_exists(menu_name):\n\n pass", "def create_menu():\n MenuData = [\n (\"&Draw Variables\",drawable.ask),\n (\"&Show Variables\",printall),\n (\"&Print Variables\",printval),\n (\"&Edit Variable\",edit),\n (\"&Rename Variable\",rename),\n (\"&Forget Variables\",forget),\n (\"---\",None),\n (\"&Create Plane\",\n [(\"Coordinates\", \n [(\"Point and normal\", createPlaneCoordsPointNormal),\n (\"Three points\", createPlaneCoords3Points),\n ]), \n (\"Visually\", \n [(\"Three points\", createPlaneVisual3Points),\n ]),\n ]),\n (\"&Select Plane\",planes.ask),\n (\"&Draw Selection\",planes.draw),\n (\"&Forget Selection\",planes.forget),\n (\"---\",None),\n (\"&Pick Actors\",pick_actors),\n (\"&Pick Elements\",pick_elements),\n (\"&Pick Points\",pick_points),\n (\"&Pick Edges\",pick_edges),\n (\"---\",None),\n ('&Selection',\n [('&Create Report',report_selection),\n ('&Set Property',setprop_selection),\n ('&Grow',grow_selection),\n ('&Partition',partition_selection),\n ('&Get Partition',get_partition),\n ('&Export',export_selection),\n ]),\n (\"---\",None),\n ('&Query',\n [('&Actors',query_actors),\n ('&Elements',query_elements),\n ('&Points',query_points),\n ('&Edges',query_edges),\n ('&Distances',query_distances),\n ]),\n (\"---\",None),\n (\"&Close\",close_menu),\n ]\n return widgets.Menu('Tools',items=MenuData,parent=GD.gui.menu,before='help')", "def __aboutToShowMenu(self):\n self.hgFetchAct.setEnabled(self.vcs.canPull())", "def MainMenu():\n\n # You have to open an object container to produce the icons you want to appear on this page.\n oc = ObjectContainer()\n main_list = [('New Videos', ''), ('Best Videos', '/best'), ('Pornstars', '/pornstars')]\n for pt, h in main_list:\n oc.add(DirectoryObject(\n key=Callback(ShowHTML, pTitle=pt, href=h),\n title=pt, thumb=Callback(GetThumb, url=BASE_URL)))\n oc.add(DirectoryObject(\n key=Callback(ShowHTML, pTitle=\"Pornstars\", href='/channels'),\n title=\"Channels\", thumb=Callback(GetThumb, url=BASE_URL)))\n\n oc.add(InputDirectoryObject(key=Callback(Search), title='Search Videos', prompt='Search Videos'))\n\n return oc", "def __populateMenu(self, name, menu):\n if name not in [\"Tools\", \"PluginTools\"]:\n return\n \n editor = e5App().getObject(\"ViewManager\").activeWindow()\n \n if name == \"Tools\":\n if not menu.isEmpty():\n menu.addSeparator()\n act = menu.addMenu(self.__menu)\n act.setEnabled(editor is not None)\n elif name == \"PluginTools\" and self.__mainActions:\n self.__mainActions[-1].setEnabled(editor is not None)", "def back_to_menu_info(cls):\n print(\n \"\"\"\n ________________________________________________\n\n HABITSBOX\n ________________________________________________\n Hint: Press 0 (zero) to return to the main menu\n ------------------------------------------------\"\"\")", "def printMenu():\n # tWelc = PrettyTable(['Welcome to the CLI-of the repository classifier'])\n print('Welcome to the CLI of the repository classifier')\n print(strStopper1)\n t = PrettyTable(['Action', ' Shortcut '])\n t.add_row(['Show Menu', '- m -'])\n t.add_row([' Predict repositories form txt-file ', '- i -'])\n t.add_row(['Input URL', '- u -'])\n t.add_row(['Show Info', '- f -'])\n t.add_row(['Train Model', '- t -'])\n t.add_row(['set GitHub-Token', '- g -'])\n t.add_row(['Help', '- h -'])\n t.add_row(['Quit', '- q -'])\n print(t)\n print('')", "def print_menu():\r\n \r\n print('Menu: \\n\\n[1] Load Inventory from File\\n[2] Add CD\\n[3] Display Current Inventory')\r\n print('[4] Delete CD from Inventory\\n[5] Save Inventory to file\\n[0] Exit Program\\n')", "def accessoriesMenu():\n pref = QtGui.QAction(mw)\n pref.setText(\"TabBar\")\n pref.setObjectName(\"TabBar\")\n pref.triggered.connect(onPreferences)\n try:\n import AccessoriesMenu\n AccessoriesMenu.addItem(\"TabBar\")\n except ImportError:\n a = mw.findChild(QtGui.QAction, \"AccessoriesMenu\")\n if a:\n a.menu().addAction(pref)\n else:\n mb = mw.menuBar()\n actionAccessories = QtGui.QAction(mw)\n actionAccessories.setObjectName(\"AccessoriesMenu\")\n actionAccessories.setIconText(\"Accessories\")\n menu = QtGui.QMenu()\n actionAccessories.setMenu(menu)\n menu.addAction(pref)\n\n def addMenu():\n \"\"\"Add accessories menu to the menu bar.\"\"\"\n mb.addAction(actionAccessories)\n actionAccessories.setVisible(True)\n\n addMenu()\n mw.workbenchActivated.connect(addMenu)", "def addMenu(menu=None):\n\n # Workbench\n if menu and \"workbench\" in menu:\n wb = menu[\"workbench\"]\n if \".\" in wb or \",\" in wb:\n wb = None\n else:\n wb = None\n\n # UUID\n if wb and \"uuid\" in menu:\n uid = menu[\"uuid\"]\n if \".\" in uid or \",\" in uid:\n uid = None\n else:\n uid = None\n\n if wb and uid:\n domain = \".\".join([\"CPMenu\", \"System\", wb, uid])\n group = cpc.findGroup(domain)\n if not group:\n group = cpc.newGroup(domain)\n if group:\n # UUID\n group.SetString(\"uuid\", uid)\n # Name\n if \"name\" in menu:\n group.SetString(\"name\", menu[\"name\"])\n # Commands\n if \"commands\" in menu:\n temp = []\n for cmd in menu[\"commands\"]:\n if cmd.startswith(\"CPMenu\") and \",\" not in cmd:\n temp.append(cmd)\n elif \".\" not in cmd and \",\" not in cmd:\n temp.append(cmd)\n else:\n pass\n group.SetString(\"commands\", \",\".join(temp))\n # Default\n if \"default\" in menu:\n base = p.GetGroup(\"System\").GetGroup(wb)\n base.SetString(\"default\", domain)\n else:\n domain = None\n else:\n domain = None\n\n return domain", "def menu_items():\r\n menu_name = \"Coloring\"\r\n algorithm_list = [[\"Brute Force\", board_brute_force_coloring],\r\n [\"separator\", \"separator\"],\r\n [\"Greedy Coloring\", board_greedy_coloring],\r\n [\"Tabu Coloring Search\", board_tabu_coloring],\r\n [\"Tabu Pre-Coloring Search\", board_tabu_precoloring]]\r\n \r\n return [menu_name, algorithm_list]", "def extract_menu(menu_json: dict, date: datetime.date) -> str:\n\n inner_menu = menu_json[-1]\n acf = inner_menu.get(\"acf\")\n\n date_string = f\"*Menu for {date}*\"\n story = prettify(acf.get(\"story\")).strip()\n menu_items = parse_menu_items(acf.get(\"menu_items\"))\n\n return \"\\n\".join([date_string] + [story] + menu_items)", "def _print_menu(self):\n # Create header line.\n header = \"%s Menu:\" % (self.__name)\n header = header.title()\n print(header)\n\n # Show the iterations counter.\n iterations = self._status.get_value(\"iterations\")\n print(\"(Iteration %d)\" % (iterations))\n\n self._print_custom()\n\n # Display the options alphabetically.\n option_names = list(self.__options.keys())\n option_names.sort()\n for option in option_names:\n desc, command = self.__options[option]\n print(\"\\t%s: %s\" % (option, desc))", "def print_menu():\r\n clear()\r\n print(\"Ratatouille Server\")\r\n print(\"---------------------------\")\r\n print(\"\")\r\n\r\n for (index, func) in MENU.items():\r\n print(\"%d - %s\" % (index, func.__name__))\r\n\r\n return raw_input(\"Choose an option: \").lstrip()", "def menu(stdscr):\n options = {}\n options[\"v\"] = (\"(V)iew user list\", view_users)\n options[\"r\"] = (\"(R)ead messages\", list_messages)\n options[\"s\"] = (\"(S)end a new message\", send_message)\n options[\"u\"] = (\"(U)pdate node status (slow!)\", update_status)\n while True:\n # Don't show the cursor or echo output.\n # These are inside the loop so menu items can unset them.\n curses.curs_set(0)\n curses.noecho()\n row = 4\n column = 5\n for o in sorted(options):\n safe_put(stdscr, options[o][0], (row, column))\n row += 1\n safe_put(stdscr, \"(Q)uit Taurus\", (row+1, column))\n stdscr.refresh()\n\n c = stdscr.getch()\n if not 0 < c < 255:\n continue\n if chr(c) == \"q\":\n break\n if chr(c) in options:\n options[chr(c)][1](stdscr)", "def printMenu():\n print(\"\\nBienvenido\")\n print(\"1- Cargar Datos\")\n print(\"2- Contar los elementos de la Lista\")\n print(\"3- Contar elementos filtrados por palabra clave\")\n print(\"4- Consultar elementos a partir de dos listas\")\n print(\"5- Consultar buenas peliculas\")\n print(\"0- Salir\")", "def test_get_menu_when_not_set(self):\n\n\t\tres = self.login_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\n\t\t\n\t\tresponse = self.client().get('/api/v2/menu',\n\t\t\theaders={\"x-access-token\": access_token})\n\t\tself.assertEqual(response.status_code, 404)", "def main_menu(self) -> str:\n print(\" ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\")\n print(\" MENU PRINCIPALE \")\n print(\n \"\\n\"\n \" ● 1 - Créer un tournoi ●\\n\"\n \" ● 2 - Ajouter des joueurs à un tournoi ●\\n\"\n \" ● 3 - Débuté ou continué un tournoi ●\\n\"\n \" ● 4 - Ajouter un nouveau joueur ●\\n\"\n \" ● 5 - Modifier classement d'un joueur ●\\n\"\n \" ● 6 - Menu Secondaire ●\\n\"\n \" ● 7 - Quitter l'application ● \"\n\n )\n print(\" ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\")\n response = input(\"Choississez un chiffre pour naviguer dans le menu : \")\n return response", "def index(self, *_, **__):\n # TODO: There has to be a better way of doing this...\n self.extension = [''.join(self.extensions)]\n _vars = {k: getattr(self,k) for k in dir(self) if not k.startswith('_')}\n _vars['path'] = self.buildpath()\n _vars['extension'] = '\\n\\t'.join(self.extensions)\n return self.menu % _vars", "def _createUtilityMenuItems(ned, node):\n pass", "def listen_for_menu():\n\n pubsub=pgpubsub.connect(**app.pubsub_conn_det)\n channel = ServerSockets._set_up_change_notifier(\n pubsub.conn, \"menuitems\", set(ServerSockets.DbActions))\n pubsub.listen(channel)\n while True:\n for event in pubsub.events(yield_timeouts=True):\n if event is None:\n pass\n else:\n with app.app_context():\n ServerSockets.get_menuitems_by_category()", "def __help_menu(self):\n log.debug(\"Displaying __help_menu\")\n # Create a keyboard with the user help menu\n keyboard = [[telegram.KeyboardButton(self.loc.get(\"menu_guide\"))],\n [telegram.KeyboardButton(self.loc.get(\"menu_contact_shopkeeper\"))],\n [telegram.KeyboardButton(self.loc.get(\"menu_all_cancel\"))]]\n # Send the previously created keyboard to the user (ensuring it can be clicked only 1 time)\n self.bot.send_message(self.chat.id,\n self.loc.get(\"conversation_open_help_menu\"),\n reply_markup=telegram.ReplyKeyboardMarkup(keyboard, one_time_keyboard=True))\n # Wait for a reply from the user\n selection = self.__wait_for_specific_message([\n self.loc.get(\"menu_guide\"),\n self.loc.get(\"menu_contact_shopkeeper\")\n ], cancellable=True)\n # If the user has selected the Guide option...\n if selection == self.loc.get(\"menu_guide\"):\n # Send them the bot guide\n self.bot.send_message(self.chat.id, self.loc.get(\"help_msg\"))\n # If the user has selected the Order Status option...\n elif selection == self.loc.get(\"menu_contact_shopkeeper\"):\n # Find the list of available shopkeepers\n shopkeepers = self.session.query(db.Admin).filter_by(display_on_help=True).join(db.User).all()\n # Create the string\n shopkeepers_string = \"\\n\".join([admin.user.mention() for admin in shopkeepers])\n # Send the message to the user\n self.bot.send_message(self.chat.id, self.loc.get(\"contact_shopkeeper\", shopkeepers=shopkeepers_string))\n # If the user has selected the Cancel option the function will return immediately", "def showMenu(restaurant_id):\n\n if 'access_token' not in flask_session:\n return logInRedirect()\n restaurant = session.query(Restaurant).filter_by(id = restaurant_id).first()\n user_id = getUserId(flask_session['email'],flask_session['google_plus_id'])\n if not restaurant.user_id == user_id:\n return redirect(url_for(\"publicMenu\",restaurant_id = restaurant_id))\n\n menuItems = session.query(MenuItem).filter_by(restaurant_id = restaurant_id)\n creator = getUserInfo(restaurant.user_id)\n\n return render_template( 'showMenu.html',\n restaurant = restaurant,\n menuItems = menuItems,\n creator = creator )", "def render(self, menu):\n # Get the menu title.\n try:\n menu_title = menu[\"meta\"][\"title\"]\n except KeyError:\n # If menu title not found, set to a default title.\n menu_title = \"??????????\"\n\n render = \"\\n\" + menu_title\n render += \"\\n\" + \"=\" * len(menu_title) + \"\\n\"\n\n # Loop through each section\n item_count_offset = 1\n for section in menu[\"sections\"]:\n try:\n section_title = section[\"meta\"][\"title\"]\n except KeyError:\n section_title = \"\"\n\n if section_title != \"\":\n render += f\"{section_title}\\n\"\n render += \"-\" * len(section_title) + \"\\n\"\n\n for item in section[\"items\"]:\n try:\n item_key = item[\"meta\"][\"key\"]\n except KeyError:\n item_key = item_count_offset\n item_count_offset += 1\n\n item_title = item[\"meta\"][\"title\"]\n\n render += f\"[{item_key}] {item_title}\\n\"\n\n render += \"\\n\"\n\n return render", "def sitemap_xml(self):\n if self.should_noindex():\n return []\n return self.main_menu()", "def get_user_menu_item_info(self, menu_id):\n return self._user_menu_items[menu_id]", "def get_menus(self):\n \n return [\n ('File', [\n ('New task...\\tCtrl+N', 'Add a new task', self.OnAddTask, wx.ID_NEW),\n ('Edit task...\\tCtrl+E', 'Edit the selected task', None, wx.ID_OPEN),\n ('Remove task...\\tDel', 'Remove the selected task', None, wx.ID_CLOSE),\n (None, ),\n ('&Quit\\tCtrl+Q', 'Close down this program', self.OnExit, wx.ID_EXIT)\n ]),\n ('Help', [\n ('About %s...\\tCtrl+H' % APP_TITLE, 'Learn a little about this program', self.OnAbout, wx.ID_ABOUT),\n ]),\n ]", "def create_menu(page):\n menu = []\n # menu.append('<span class=\"header\">Topics</span>')\n for title, target in NAV.items():\n if isinstance(target, str):\n if target.startswith((\"https://\", \"http://\", \"/\")):\n menu.append(f\"<a href='target'>{title}</a>\")\n else:\n menu.append(f\"<a href='{target}.html'>{title}</a>\")\n if target == page.name:\n menu[-1] = menu[-1].replace(\"<a \", '<a class=\"current\" ')\n menu += [\n f\"<a class='sub' href='#{title.lower()}'>{title}</a>\"\n for level, title in page.headers\n if level == 2\n ]\n elif isinstance(target, dict):\n menu.append(f\"<a href='{target['']}.html'>{title}</a>\")\n if target[\"\"] == page.name:\n menu[-1] = menu[-1].replace(\"<a \", '<a class=\"current\" ')\n if any(page.name == subtarget for subtarget in target.values()):\n for subtitle, subtarget in target.items():\n if not subtitle:\n continue\n if subtarget.startswith((\"https://\", \"http://\", \"/\")):\n menu.append(f\"<a class='sub' href='{subtarget}'>{subtitle}</a>\")\n else:\n menu.append(\n f\"<a class='sub' href='{subtarget}.html'>{subtitle}</a>\"\n )\n if subtarget == page.name:\n menu[-1] = menu[-1].replace(\"class='\", \"class='current \")\n else:\n raise RuntimeError(f\"Unexpected NAV entry {type(target)}\")\n\n return \"<br />\".join(menu)", "def print_menu():\n print()\n print(\"Main Menu\")\n print(\"---------\")\n print(\"1 - Process a new data file\")\n print(\"2 - Choose units\")\n print(\"3 - Edit room filter\")\n print(\"4 - Show summary statistics\")\n print(\"5 - Show temperature by date and time\")\n print(\"6 - Show histogram of temperatures\")\n print(\"7 - Quit\")\n print()" ]
[ "0.76196367", "0.75381", "0.7521898", "0.74221736", "0.70497966", "0.6988888", "0.67294633", "0.6726015", "0.66863775", "0.66313416", "0.6611032", "0.66060996", "0.658206", "0.6521646", "0.6520005", "0.6460026", "0.63582134", "0.6333034", "0.6328714", "0.63178813", "0.63146925", "0.6292107", "0.6292107", "0.6289226", "0.6279352", "0.6263461", "0.6258552", "0.6257532", "0.62537545", "0.6223802", "0.62088126", "0.61727214", "0.61603683", "0.61454695", "0.6142156", "0.611732", "0.6100689", "0.60995495", "0.6083346", "0.60781115", "0.6052308", "0.6033255", "0.60306984", "0.60306984", "0.6014317", "0.60139424", "0.6008118", "0.60036486", "0.59917694", "0.5970283", "0.5951926", "0.5951893", "0.5946059", "0.5940746", "0.5921674", "0.5915114", "0.59085", "0.5906465", "0.5900799", "0.5895882", "0.58823997", "0.5868291", "0.5867654", "0.5861795", "0.5854991", "0.5851322", "0.58501375", "0.584635", "0.5842097", "0.58263725", "0.58241963", "0.58227247", "0.58060414", "0.57898587", "0.57883316", "0.5780544", "0.57642996", "0.57598186", "0.57574105", "0.5754217", "0.57442844", "0.5742442", "0.5741069", "0.5724017", "0.57137686", "0.5710341", "0.57062656", "0.57036203", "0.57012737", "0.5691947", "0.56900626", "0.5688925", "0.56797504", "0.56744844", "0.5673847", "0.56620073", "0.56422657", "0.5642122", "0.5641728", "0.5640702", "0.56387144" ]
0.0
-1
/menu should return information about accesible menu elements for authenticated user
def test_authenticated_user(self, fake_app, authenticated_user, jwt): result = fake_app.get(self.url, headers={'JWT': jwt}) assert result.json == { 'menu': [{ 'elements': [{ 'is_active': False, 'name': 'dashboard_home', 'text': 'Wallets', 'url': '#/dashboard' }], 'name': 'Dashboard' }] }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def menu():\n user_id = session[\"user_id\"]\n if not user_id:\n session.clear()\n redirect(\"/\")\n database = db.db_connect()\n user = g.user\n return render_template(\"menu.html\", username=user[\"username\"])", "def get_menus():\n\n pass", "def menu(self):\n menu = list()\n \n \n menu.extend([\n {\n 'title': 'Bootstrap Demo',\n 'href': self.request.route_url('bootstrap_demo'),\n 'icon': \"fa fa-twitter-square\"\n },\n {\n 'title': 'Jade Demo',\n 'href': self.request.route_url('jade_demo'),\n 'icon': \"fa fa-indent\"\n },\n ])\n if self.user:\n menu.extend([\n {\n 'title': 'Entities',\n 'icon': \"fa fa-bar-chart\",\n 'dropdown': [\n {\n 'title': 'All entities',\n 'href': self.request.route_url(\n 'entities',\n ext='html',\n _query={\n 'renderer': 'datatable',\n 'options': 'serverside-columnsearch'\n }\n ),\n 'icon': \"fa fa-bar-chart\"},\n {\n 'title': 'CPTs',\n 'href': self.request.route_url(\n 'cpts',\n ext='html',\n _query={\n 'renderer': 'datatable',\n 'options': 'columnsearch'\n }\n ),\n }\n ]\n }\n ]),\n if self.user.has_admin:\n menu.append(\n {\n 'title': \"User Management\",\n 'icon': \"fa fa-users\",\n 'dropdown': [\n {\n 'title': 'User Overview',\n 'href': self.request.route_url(\n 'users',\n ext='html',\n _query={\n 'renderer': 'datatable',\n 'options': 'serverside-columnsearch'\n }\n ),\n 'icon': 'fa fa-users',\n },\n {\n 'title': 'Add User',\n 'href': self.request.route_url('user_create'),\n 'icon': 'fa fa-user-plus',\n }\n ]\n }\n )\n\n return menu", "def menu():\n logout_user()\n return render_template('menu.html')", "def get_menu_items():\n\n pass", "def get_all_menu():", "def get_menu(menu_name):\n\n pass", "def present_menu (self, menu, groupName = 'main'):\n \n if not hasattr (cherrypy.request, 'nav'):\n cherrypy.request.nav = {}\n\n if not groupName in cherrypy.request.nav:\n cherrypy.request.nav [groupName] = []\n \n for item in menu.items:\n cherrypy.request.nav [groupName].append (item)", "def createMenu():\n mType = -1\n if auth.is_logged_in() and auth.has_membership('administrador',auth.user.id):\n return menuAdmin\n elif auth.is_logged_in():\n return menuUser\n else:\n return menuPublic", "def menu(request):\n user = request.user\n # fase = Fase.objects.filter(id_Proyecto__id_proyecto=1).order_by('id_Fase')\n\n\n if( user.usuario.esta_aprobado):\n if user.has_perm('gestion.es_administrador'):\n return render(request,'Menu/MenuAdministrador.html')\n else:\n return render(request, 'Menu/Menu.html')\n else:\n registrarAuditoria(request.user ,'Inicio Menu en espera de aprobacion')\n return render(request, 'Menu/MenuEnEspera.html')", "def publicMenu(restaurant_id):\n\n restaurant = session.query(Restaurant).filter_by(id = restaurant_id).first()\n menuItems = session.query(MenuItem).filter_by(restaurant_id = restaurant_id)\n creator = getUserInfo(restaurant.user_id)\n\n return render_template( 'publicMenu.html',\n menuItems = menuItems,\n restaurant = restaurant,\n creator= creator )", "def showMenu(restaurant_id):\n\n if 'access_token' not in flask_session:\n return logInRedirect()\n restaurant = session.query(Restaurant).filter_by(id = restaurant_id).first()\n user_id = getUserId(flask_session['email'],flask_session['google_plus_id'])\n if not restaurant.user_id == user_id:\n return redirect(url_for(\"publicMenu\",restaurant_id = restaurant_id))\n\n menuItems = session.query(MenuItem).filter_by(restaurant_id = restaurant_id)\n creator = getUserInfo(restaurant.user_id)\n\n return render_template( 'showMenu.html',\n restaurant = restaurant,\n menuItems = menuItems,\n creator = creator )", "def ask_for_full_menu():\n res = requests.get(url='http://127.0.0.1:5000/full_menu')\n return res.text", "def get_menu ( self, object ):\n return self.menu", "def get_menu() -> str:\n date = datetime.date.today()\n urls = generate_urls(date)\n menu_json = fetch_menu(urls)\n menu = extract_menu(menu_json, date)\n\n return menu", "def menu():\n return render_template('menu.html')", "def test_get_menu_when_not_set(self):\n\n\t\tres = self.login_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\n\t\t\n\t\tresponse = self.client().get('/api/v2/menu',\n\t\t\theaders={\"x-access-token\": access_token})\n\t\tself.assertEqual(response.status_code, 404)", "def test_menu():\n response = app.test_client().get('/v1/resources/menu/all')\n\n assert response.status_code == 200\n assert response.content_type == 'application/json'", "def get_menu_items(self):\n url = self.build_url(\"menus/\")\n res = get(url)\n if res.ok:\n return [x[\"item_name\"] for x in res.json()]\n return None", "def top_menu_items(request):\n return {\n 'top_menu_items': settings.TOP_MENU_ITEMS\n }", "def create_menu():", "def restaurantMenuPage(restaurant_id):\n restaurant = db_methods.searchResByID(restaurant_id)\n items = db_methods.getMenuItems(restaurant_id)\n creator = db_methods.getUserByResId(restaurant_id)\n if 'username' not in login_session or creator != login_session['user_id']:\n return render_template('publicmenu.html', items = items, \n restaurant = restaurant, creator = creator)\n else:\n user_id = login_session['user_id']\n return render_template('menu.html', items = items, restaurant = restaurant, \n creator = creator, user_id = user_id)", "def menus(self):\r\n return []", "def menu_auth(cls, **attr):\n\n auth = current.auth\n logged_in = auth.is_logged_in()\n settings = current.deployment_settings\n\n if not logged_in:\n request = current.request\n login_next = URL(args=request.args, vars=request.vars)\n if request.controller == \"default\" and \\\n request.function == \"user\" and \\\n \"_next\" in request.get_vars:\n login_next = request.get_vars[\"_next\"]\n\n self_registration = settings.get_security_registration_visible()\n if self_registration == \"index\":\n register = MM(\"Register\", c=\"default\", f=\"index\", m=\"register\",\n vars=dict(_next=login_next),\n check=self_registration)\n else:\n register = MM(\"Register\", m=\"register\",\n vars=dict(_next=login_next),\n check=self_registration)\n\n if settings.get_auth_password_changes() and \\\n settings.get_auth_password_retrieval():\n lost_pw = MM(\"Lost Password\", m=\"retrieve_password\")\n else:\n lost_pw = None\n\n menu_auth = MM(\"Login\", c=\"default\", f=\"openid_connect\", m=\"login\",\n _id=\"auth_menu_login\",\n vars=dict(_next=login_next), **attr)(\n MM(\"Login\", m=\"login\",\n vars=dict(_next=login_next)),\n register,\n lost_pw,\n )\n else:\n # Logged-in\n\n if settings.get_auth_password_changes():\n change_pw = MM(\"Change Password\", m=\"change_password\")\n else:\n change_pw = None\n\n menu_auth = MM(auth.user.email, c=\"default\", f=\"user\",\n translate=False, link=False, _id=\"auth_menu_email\",\n **attr)(\n MM(\"Logout\", m=\"logout\", _id=\"auth_menu_logout\"),\n MM(\"User Profile\", m=\"profile\"),\n MM(\"Personal Data\", c=\"default\", f=\"person\", m=\"update\"),\n MM(\"Contact Details\", c=\"pr\", f=\"person\",\n args=\"contact\",\n vars={\"person.pe_id\" : auth.user.pe_id}),\n #MM(\"Subscriptions\", c=\"pr\", f=\"person\",\n # args=\"pe_subscription\",\n # vars={\"person.pe_id\" : auth.user.pe_id}),\n change_pw,\n SEP(),\n MM({\"name\": current.T(\"Rapid Data Entry\"),\n \"id\": \"rapid_toggle\",\n \"value\": current.session.s3.rapid_data_entry is True},\n f=\"rapid\"),\n )\n\n return menu_auth", "def menu(self):\n return self._menu", "def create_menus( self ):", "def restaurant_menu(restaurant_id):\n restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()\n menu_list = session.query(MenuItem).filter_by(restaurant_id=restaurant_id).order_by(\"likes-dislikes desc\")\n user_info = helper.get_user_if_exists(login_session)\n return render_template('restaurantmenu.html',\n restaurant=restaurant,\n menu_list=menu_list,\n user_info=user_info)", "def get_menu_items(self) -> typing.List[typing.Tuple[str, typing.List[typing.Tuple[str, typing.Callable[[], None]]]]]: #this method is to be queried by the root frame when it is creating the menu bar at the top of the screen and needs options to put in it\n return []", "def navigation_menu(context):\n try:\n request = context['request']\n request_path = request.path\n except KeyError:\n request_path = ''\n return {\n 'shop_nav_enabled': settings.SHOP_NAV_ENABLED,\n 'request_path': request_path,\n }", "def main_menu(self):\n return self.sitemap", "def get_user_menu_item_info(self, menu_id):\n return self._user_menu_items[menu_id]", "def get_menus(id, api_key=API_KEY):\n path = \"/restaurant/\" + str(id) + \"/menuitems\"\n\n return request(API_HOST, path, api_key, url_params=None)['result']['data']", "def addMenu():\n mb.addAction(actionAccessories)\n actionAccessories.setVisible(True)", "def showMenu():\n print( \"1. Create New User\" )\n print( \"2. Authorize\" )\n print( \"3. Send SMS\" )\n print( \"4. Send Email\" )\n print( \"5. Get Recently Sent Message\" )\n print( \"6. Exit\" )", "def _MenuAboutToShow(self, menuName):\n menu = self._menus[menuName]\n context = self._contextProvider.GetMenuContext()\n for action in menu.actions():\n if action.isSeparator():\n continue\n actionData = action.data()\n if actionData and isinstance(actionData, MenuAction):\n actionData.Update(action, context)", "def test_get_menu_when_is_set(self):\n\n\t\tres = self.login_admin_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\n\t\tresponse = self.client().post(\n\t\t\t'/api/v2/meals', \n\t\t\t\theaders={\"x-access-token\": access_token},\n\t\t\t\tdata = json.dumps(\n\t\t\t\tself.meal_data) , content_type = 'application/json')\n\t\tself.assertEqual(response.status_code, 201)\n\n\t\tresponse = self.client().post('/api/v2/menu/1',\n\t\t\theaders={\"x-access-token\": access_token},\n\t\t\tdata = json.dumps(\n\t\t\t\tself.meal_data), content_type = 'application/json')\n\t\tself.assertEqual(response.status_code, 200)\n\n\t\tresults = self.client().get('/api/v2/menu',\n\t\t\theaders={\"x-access-token\": access_token})\n\t\tself.assertEqual(results.status_code, 200)\n\t\tresp = json.loads(results.data)\n\t\tretn = {'1': {'category': 'Snacks', 'meal_name': 'Burger', 'price': 400.0}}\n\t\tself.assertIn(retn, resp)", "def get_menu_items(request):\n\n cat_id = request.POST.get('cat_id', None)\n menu_items = MenuItem.objects.filter(category_id=cat_id)\n context = {\n 'menu_items': menu_items\n }\n menu_items_view = render_to_string('HotelMgmt/menu_items.html', context)\n\n return JsonResponse({'menu_items_view': menu_items_view})", "def loadMenu(self):\r\n show_empty_root_items = pos.config['menu', 'show_empty_root_items']\r\n show_disabled_items = pos.config['menu', 'show_disabled_items']\r\n self.mainToolbook.AssignImageList(pos.menu.il)\r\n \r\n for root in pos.menu.main.items:\r\n if not root.enabled and not show_disabled_items:\r\n continue\r\n enabled_children = [i for i in root.children if i.enabled]\r\n if show_disabled_items:\r\n children = root.children\r\n else:\r\n children = enabled_children\r\n # Hide empty menu root items\r\n if len(children) == 0 and not show_empty_root_items:\r\n continue\r\n page = self.getToolbookPage(children)\r\n self.mainToolbook.AddPage(imageId=root.image, page=page, select=False, text=root.label)\r\n page.Enable(root.enabled)# and len(enabled_children) != 0)\r", "def GetMenu(self):\n return self._menu", "def __admin_menu(self):\n log.debug(\"Displaying __admin_menu\")\n self.menu = TelegramMenu(\"config/comunda_admin_menu.bpmn\", self, \"MenuStart\")\n self.menu.admin_menu(\"MenuStart\", \"menu_admin_main_txt\")\n return", "def accessoriesMenu():\n pref = QtGui.QAction(mw)\n pref.setText(\"Command panel\")\n pref.setObjectName(\"CommandPanel\")\n pref.triggered.connect(onPreferences)\n try:\n import AccessoriesMenu\n AccessoriesMenu.addItem(\"CommandPanel\")\n except ImportError:\n a = mw.findChild(QtGui.QAction, \"AccessoriesMenu\")\n if a:\n a.menu().addAction(pref)\n else:\n mb = mw.menuBar()\n action = QtGui.QAction(mw)\n action.setObjectName(\"AccessoriesMenu\")\n action.setIconText(\"Accessories\")\n menu = QtGui.QMenu()\n action.setMenu(menu)\n menu.addAction(pref)\n\n def addMenu():\n \"\"\"Add accessories menu to the menu bar.\"\"\"\n toolsMenu = mb.findChild(QtGui.QMenu, \"&Tools\")\n if toolsMenu:\n toolsMenu.addAction(action)\n\n addMenu()\n mw.workbenchActivated.connect(addMenu)", "def restaurantMenu(restaurant_id):\n try:\n restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()\n menuItemsQuery = session.query(MenuItem).filter_by(restaurant_id=restaurant_id).order_by(MenuItem.course.asc()).all()\n\n menuItems = dict()\n for m in menuItemsQuery:\n l = menuItems.get(m.course, list())\n l.append(m)\n menuItems[m.course] = l\n\n return render_template('restaurantMenu.html', restaurant=restaurant, menuItems=menuItems)\n\n except exc.NoResultFound:\n return redirect(url_for('mainPage'))", "def get_app_menu(self): # real signature unknown; restored from __doc__\n pass", "def menu(request):\n cart = cartData(request)\n cart_items = cart['cart_items']\n # order = cart['order']\n # items = cart['items']\n # Get all our object\n products = BobaProduct.objects.all()\n # Dictionary to hold our products\n context = {\"products\": products, \"cart_items\": cart_items}\n return render(request, 'store/menu.html', context)", "def show_user_left_navi(context):\n li_html = u''\n user = context[\"request\"].user\n cur_path = context[\"request\"].path\n user_menu = gen_menu_tree(user)\n active_css = u'class=\"active\" '\n\n def _get_m():\n for m in user_menu:\n if m.has_key(\"path\"):\n if cur_path.startswith(m[\"path\"]):\n return m\n else:\n for c in m.get(\"children\",[]):\n if c.has_key(\"path\"):\n if c[\"path\"].startswith(cur_path):\n return m\n\n m = _get_m()\n if m:\n if m.has_key(\"path\"):\n li_html += u'<li class=\"nav-header\" %s><a href=\"%s\" >%s</a></li>' % (active_css,m['path'],m[\"name\"])\n\n else:\n _sub_html = u'<li class=\"nav-header\">%s</li>' % m['name']\n for c in m.get(\"children\",[]):\n if c.has_key(\"path\"):\n if c[\"path\"].startswith(cur_path):\n if cur_path == \"/\":\n if c['path'] == cur_path:\n _sub_html += u'<li %s><a href=\"%s\" >%s</a></li>' % (active_css,c['path'],c['name'])\n else:\n _sub_html += u'<li ><a href=\"%s\" >%s</a></li>' % (c['path'],c['name'])\n else:\n _sub_html += u'<li %s><a href=\"%s\" >%s</a></li>' % (active_css,c['path'],c['name'])\n else:\n _sub_html += u'<li><a href=\"%s\" >%s</a></li>' % (c['path'],c['name'])\n li_html += _sub_html\n\n return li_html", "def get_menu ( self, object, row ):\n return self.menu", "def getMenu(self, name):\n if self.__object is not None:\n return self.__object.getMenu(name)\n else:\n return None", "def show_page_list_accessible():\r\n\tpage_list = Page.objects.filter(in_nav=1).order_by('order')\r\n\treturn {'page_list': page_list}", "def __aboutToShowMenu(self):\n self.hgFetchAct.setEnabled(self.vcs.canPull())", "async def top_menu(self) -> None:\n return await self.relay(\"top_menu\")()", "def mainMenu(userid, args):\r\n if popuplib.isqueued(\"sourcerpg_rpgmenu\", userid):\r\n return\r\n popuplib.send('sourcerpg_rpgmenu', userid)", "def __help_menu(self):\n log.debug(\"Displaying __help_menu\")\n # Create a keyboard with the user help menu\n keyboard = [[telegram.KeyboardButton(self.loc.get(\"menu_guide\"))],\n [telegram.KeyboardButton(self.loc.get(\"menu_contact_shopkeeper\"))],\n [telegram.KeyboardButton(self.loc.get(\"menu_all_cancel\"))]]\n # Send the previously created keyboard to the user (ensuring it can be clicked only 1 time)\n self.bot.send_message(self.chat.id,\n self.loc.get(\"conversation_open_help_menu\"),\n reply_markup=telegram.ReplyKeyboardMarkup(keyboard, one_time_keyboard=True))\n # Wait for a reply from the user\n selection = self.__wait_for_specific_message([\n self.loc.get(\"menu_guide\"),\n self.loc.get(\"menu_contact_shopkeeper\")\n ], cancellable=True)\n # If the user has selected the Guide option...\n if selection == self.loc.get(\"menu_guide\"):\n # Send them the bot guide\n self.bot.send_message(self.chat.id, self.loc.get(\"help_msg\"))\n # If the user has selected the Order Status option...\n elif selection == self.loc.get(\"menu_contact_shopkeeper\"):\n # Find the list of available shopkeepers\n shopkeepers = self.session.query(db.Admin).filter_by(display_on_help=True).join(db.User).all()\n # Create the string\n shopkeepers_string = \"\\n\".join([admin.user.mention() for admin in shopkeepers])\n # Send the message to the user\n self.bot.send_message(self.chat.id, self.loc.get(\"contact_shopkeeper\", shopkeepers=shopkeepers_string))\n # If the user has selected the Cancel option the function will return immediately", "def getMenuOption():\n return menu_option", "def return_admin_list(request):\n del request\n return return_user_list(Administrador)", "def test_clear_menu_when_not_set(self):\n\n\t\tres = self.login_admin_user()\n\t\taccess_token = json.loads(res.data.decode())['access_token']\n\n\t\tresults = self.client().delete('/api/v2/menu',\n\t\t\theaders={\"x-access-token\": access_token})\n\t\tresponse = json.loads(results.data)\n\t\tself.assertEqual(results.status_code, 404)\n\t\tself.assertEqual(response[\"message\"], \"Menu not set yet!\")", "def navbar_user():\n return (\"machines\", render_to_string(\"deployments/navbar.html\"))", "def main_menu(self):\n menu_string = \"Main menu\\n\"\n menu_string += \"\\t1. Modify a list\\n\"\n menu_string += \"\\t2. Grade submenu\\n\"\n menu_string += \"\\t3. Search for something\\n\"\n menu_string += \"\\t4. Get a statistic\\n\"\n menu_string += \"\\t5. Undo/Redo\\n\"\n menu_string += \"\\t0. Exit\\n\"\n stop = False\n\n while not stop:\n command_list = \\\n {'0': self.__no_command,\n '1': self.__modify_submenu,\n '2': self.__grade_submenu,\n '3': self.__search_submenu,\n '4': self.__statistics_submenu,\n '5': self.__undo_submenu\n }\n command = self.__ui_read_command(menu_string)\n\n if command in command_list.keys():\n if command == '0':\n return\n else:\n command_list[command]()\n\n else:\n print(\"Invalid command!\")", "def test_normal(self, fake_app):\n result = fake_app.get(self.url)\n assert result.json == {\n 'menu': [{\n 'elements': [{\n 'is_active': False,\n 'name': 'not_logged_home',\n 'text': 'Home',\n 'url': '#/'\n }],\n 'name':\n 'Menu'\n }]\n }", "def catalog_menu(driver, open_login_page):\n return CatalogMenu(driver)", "def on_menu_connect():\n\n ServerSockets.get_menuitems_by_category(True)", "def test_no_permission(client):\n user = user_with_permissions()\n\n url = reverse(\"admin:index\")\n client.force_login(user)\n\n response = client.get(url)\n assert parse_sidemenu(response) == {\"Global\": [\"/en/admin/\"]}", "def getMenuItems(self, context, request):\n\n results = []\n url = context.absolute_url()\n can_tag = not sdct.IStructuredDocument.providedBy(context)\n can_untag = not can_tag\n \n if can_tag:\n results.append(\n { 'title' : \"Mark as a structured document\",\n 'description' : 'Mark the content as a structured document',\n 'action' : \"%s/@@sd.tagging\" % url,\n 'selected' : False,\n 'icon' : u\"\",\n 'extra' : {'id': 'sd_tag',\n 'separator': None,\n 'class': ''\n },\n 'submenu' : None,\n }\n )\n else:\n layout = context.getLayout()\n results.append(\n { 'title' : \"Remove structured document options\",\n 'description' : 'Restore the content normal behavior',\n 'action' : \"%s/@@sd.untagging\" % url,\n 'selected' : False,\n 'icon' : u\"\",\n 'extra' : {'id': 'sd_untag',\n 'separator': None,\n 'class': ''\n },\n 'submenu' : None,\n }\n )\n \n results.append(\n { 'title' : \"Document on one page\",\n 'description' : 'Change the display of the document',\n 'action' : (\"%s/@@sd.options?layout=@@sd.document.onepage\"\n % url),\n 'selected' : layout == '@@sd.document.onepage',\n 'icon' : u\"\",\n 'extra' : {'id': 'sd_document_onepage',\n 'separator': 'actionSeparator',\n 'class': ''\n },\n 'submenu': None,\n }\n )\n\n return results", "def render(self):\n menu = etree.Element('openbox_pipe_menu')\n \n walk(self.menuItems, menu)\n \n print etree.tostring(menu)", "def navigate_mainMenu():\r\n msg, flag = \"\", False\r\n try: \r\n 'Click on the main menu item in OMM home page'\r\n \r\n flag = ui_controls.button(get_obj_identifier('mnu_btn'))\r\n if flag:\r\n print \"Main menu icon in home page is clicked\"\r\n\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n return flag, msg", "def event_menus(id):\n\n check_admin()\n event_id = Event.query.filter_by(id=id).all()\n menu_path = event_id[0].menus\n if menu_path != 'menus/':\n im = Image.open((menu_path))\n im.show()\n\n return render_template('admin/events/menus.html', action=\"View\",\n id =id, title=\"Menu\")", "def index():\n view_dict = get_opentree_services_method_urls(request)\n view_dict['maintenance_info'] = get_maintenance_info(request)\n if auth.is_logged_in():\n # user is logged in, filter to their own collections by default?\n pass\n else:\n # anonymous visitor, show unfiltered list?\n pass\n\n return view_dict", "def print_menu():\r\n print(\"==============================================\")\r\n print(\"What do you want to do now? \")\r\n print(\"==============================================\")\r\n print(\"Available options:\")\r\n i = 1\r\n for a in available_actions:\r\n if current_state in a[\"valid_states\"]:\r\n # Only hint about the action if the current state allows it\r\n print(\" %i) %s\" % (i, a[\"description\"]))\r\n i += 1\r\n print()", "def do_menu(parser, token):\n bits = token.split_contents()\n return RenderMenuNode()", "def menu(self):\n ## This is a DICTIONARY, it's a list with custom index values. Python is cool.\n # Please feel free to change the menu and add options.\n print(\"\\n *** MENU ***\") \n menu = {\"n\": (\"Navigate\", self.nav),\n \"d\": (\"Dance\", self.dance),\n \"o\": (\"Obstacle count\", self.obstacle_count),\n \"s\": (\"Shy\", self.shy),\n \"f\": (\"Follow\", self.follow),\n \"c\": (\"Calibrate\", self.calibrate),\n \"q\": (\"Quit\", self.quit)\n }\n # loop and print the menu...\n for key in sorted(menu.keys()):\n print(key + \":\" + menu[key][0])\n # store the user's answer\n ans = str.lower(input(\"Your selection: \"))\n # activate the item selected\n menu.get(ans, [None, self.quit])[1]()", "def menu(self):\n ## This is a DICTIONARY, it's a list with custom index values. Python is cool.\n # Please feel free to change the menu and add options.\n print(\"\\n *** MENU ***\") \n menu = {\"n\": (\"Navigate\", self.nav),\n \"d\": (\"Dance\", self.dance),\n \"o\": (\"Obstacle count\", self.obstacle_count),\n \"s\": (\"Shy\", self.shy),\n \"f\": (\"Follow\", self.follow),\n \"c\": (\"Calibrate\", self.calibrate),\n \"q\": (\"Quit\", self.quit)\n }\n # loop and print the menu...\n for key in sorted(menu.keys()):\n print(key + \":\" + menu[key][0])\n # store the user's answer\n ans = str.lower(input(\"Your selection: \"))\n # activate the item selected\n menu.get(ans, [None, self.quit])[1]()", "def initMenu(self, menu):\n menu.clear()\n \n self.subMenus = []\n \n adminMenu = QMenu(self.tr(\"Administration\"), menu)\n adminMenu.setTearOffEnabled(True)\n adminMenu.addAction(self.gitShowConfigAct)\n adminMenu.addAction(self.gitRepoConfigAct)\n adminMenu.addSeparator()\n adminMenu.addAction(self.gitReflogBrowserAct)\n adminMenu.addSeparator()\n adminMenu.addAction(self.gitCreateIgnoreAct)\n adminMenu.addSeparator()\n adminMenu.addAction(self.gitCreateArchiveAct)\n adminMenu.addSeparator()\n adminMenu.addAction(self.gitStatisticsAct)\n adminMenu.addAction(self.gitVerifyAct)\n adminMenu.addAction(self.gitHouseKeepingAct)\n self.subMenus.append(adminMenu)\n \n bundleMenu = QMenu(self.tr(\"Bundle Management\"), menu)\n bundleMenu.setTearOffEnabled(True)\n bundleMenu.addAction(self.gitBundleAct)\n bundleMenu.addSeparator()\n bundleMenu.addAction(self.gitBundleVerifyAct)\n bundleMenu.addAction(self.gitBundleListHeadsAct)\n bundleMenu.addSeparator()\n bundleMenu.addAction(self.gitBundleApplyFetchAct)\n bundleMenu.addAction(self.gitBundleApplyPullAct)\n self.subMenus.append(bundleMenu)\n \n patchMenu = QMenu(self.tr(\"Patch Management\"), menu)\n patchMenu.setTearOffEnabled(True)\n patchMenu.addAction(self.gitCheckPatchesAct)\n patchMenu.addAction(self.gitApplyPatchesAct)\n patchMenu.addSeparator()\n patchMenu.addAction(self.gitShowPatcheStatisticsAct)\n self.subMenus.append(patchMenu)\n \n bisectMenu = QMenu(self.tr(\"Bisect\"), menu)\n bisectMenu.setTearOffEnabled(True)\n bisectMenu.addAction(self.gitBisectStartAct)\n bisectMenu.addAction(self.gitBisectStartExtendedAct)\n bisectMenu.addSeparator()\n bisectMenu.addAction(self.gitBisectGoodAct)\n bisectMenu.addAction(self.gitBisectBadAct)\n bisectMenu.addAction(self.gitBisectSkipAct)\n bisectMenu.addSeparator()\n bisectMenu.addAction(self.gitBisectResetAct)\n bisectMenu.addSeparator()\n bisectMenu.addAction(self.gitBisectLogBrowserAct)\n bisectMenu.addSeparator()\n bisectMenu.addAction(self.gitBisectCreateReplayAct)\n bisectMenu.addAction(self.gitBisectEditReplayAct)\n bisectMenu.addAction(self.gitBisectReplayAct)\n self.subMenus.append(bisectMenu)\n \n tagsMenu = QMenu(self.tr(\"Tags\"), menu)\n tagsMenu.setIcon(UI.PixmapCache.getIcon(\"vcsTag.png\"))\n tagsMenu.setTearOffEnabled(True)\n tagsMenu.addAction(self.vcsTagAct)\n tagsMenu.addAction(self.gitTagListAct)\n tagsMenu.addAction(self.gitDescribeTagAct)\n self.subMenus.append(tagsMenu)\n \n branchesMenu = QMenu(self.tr(\"Branches\"), menu)\n branchesMenu.setIcon(UI.PixmapCache.getIcon(\"vcsBranch.png\"))\n branchesMenu.setTearOffEnabled(True)\n branchesMenu.addAction(self.gitBranchAct)\n branchesMenu.addSeparator()\n branchesMenu.addAction(self.gitBranchListAct)\n branchesMenu.addAction(self.gitMergedBranchListAct)\n branchesMenu.addAction(self.gitNotMergedBranchListAct)\n branchesMenu.addAction(self.gitShowBranchAct)\n branchesMenu.addSeparator()\n branchesMenu.addAction(self.gitDeleteRemoteBranchAct)\n self.subMenus.append(branchesMenu)\n \n changesMenu = QMenu(self.tr(\"Manage Changes\"), menu)\n changesMenu.setTearOffEnabled(True)\n changesMenu.addAction(self.gitUnstageAct)\n changesMenu.addAction(self.vcsRevertAct)\n changesMenu.addAction(self.vcsMergeAct)\n changesMenu.addAction(self.gitCommitMergeAct)\n changesMenu.addAction(self.gitCancelMergeAct)\n \n remotesMenu = QMenu(self.tr(\"Remote Repositories\"), menu)\n remotesMenu.setTearOffEnabled(True)\n remotesMenu.addAction(self.gitRemotesShowAct)\n remotesMenu.addAction(self.gitRemoteShowAct)\n remotesMenu.addSeparator()\n remotesMenu.addAction(self.gitRemoteAddAct)\n remotesMenu.addAction(self.gitRemoteRenameAct)\n remotesMenu.addAction(self.gitRemoteChangeUrlAct)\n remotesMenu.addAction(self.gitRemoteCredentialsAct)\n remotesMenu.addAction(self.gitRemoteRemoveAct)\n remotesMenu.addAction(self.gitRemotePruneAct)\n \n cherrypickMenu = QMenu(self.tr(\"Cherry-pick\"), menu)\n cherrypickMenu.setIcon(UI.PixmapCache.getIcon(\"vcsGraft.png\"))\n cherrypickMenu.setTearOffEnabled(True)\n cherrypickMenu.addAction(self.gitCherryPickAct)\n cherrypickMenu.addAction(self.gitCherryPickContinueAct)\n cherrypickMenu.addAction(self.gitCherryPickQuitAct)\n cherrypickMenu.addAction(self.gitCherryPickAbortAct)\n \n stashMenu = QMenu(self.tr(\"Stash\"), menu)\n stashMenu.setTearOffEnabled(True)\n stashMenu.addAction(self.gitStashAct)\n stashMenu.addSeparator()\n stashMenu.addAction(self.gitStashBrowserAct)\n stashMenu.addAction(self.gitStashShowAct)\n stashMenu.addSeparator()\n stashMenu.addAction(self.gitStashApplyAct)\n stashMenu.addAction(self.gitStashPopAct)\n stashMenu.addSeparator()\n stashMenu.addAction(self.gitStashBranchAct)\n stashMenu.addSeparator()\n stashMenu.addAction(self.gitStashDropAct)\n stashMenu.addAction(self.gitStashClearAct)\n \n submodulesMenu = QMenu(self.tr(\"Submodules\"), menu)\n submodulesMenu.setTearOffEnabled(True)\n submodulesMenu.addAction(self.gitSubmoduleAddAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesInitAct)\n submodulesMenu.addAction(self.gitSubmodulesUpdateInitAct)\n submodulesMenu.addAction(self.gitSubmodulesDeinitAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesUpdateAct)\n submodulesMenu.addAction(self.gitSubmodulesUpdateRemoteAct)\n submodulesMenu.addAction(self.gitSubmodulesUpdateOptionsAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesSyncAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesListAct)\n submodulesMenu.addSeparator()\n submodulesMenu.addAction(self.gitSubmodulesStatusAct)\n submodulesMenu.addAction(self.gitSubmodulesSummaryAct)\n \n act = menu.addAction(\n UI.PixmapCache.getIcon(\n os.path.join(\"VcsPlugins\", \"vcsGit\", \"icons\", \"git.png\")),\n self.vcs.vcsName(), self._vcsInfoDisplay)\n font = act.font()\n font.setBold(True)\n act.setFont(font)\n menu.addSeparator()\n \n menu.addAction(self.gitFetchAct)\n menu.addAction(self.gitPullAct)\n menu.addSeparator()\n menu.addAction(self.vcsCommitAct)\n menu.addAction(self.gitPushAct)\n menu.addSeparator()\n menu.addMenu(changesMenu)\n menu.addMenu(stashMenu)\n menu.addSeparator()\n menu.addMenu(cherrypickMenu)\n menu.addSeparator()\n menu.addMenu(bundleMenu)\n menu.addMenu(patchMenu)\n menu.addSeparator()\n menu.addMenu(remotesMenu)\n menu.addMenu(submodulesMenu)\n menu.addSeparator()\n menu.addMenu(tagsMenu)\n menu.addMenu(branchesMenu)\n menu.addSeparator()\n menu.addAction(self.gitLogBrowserAct)\n menu.addSeparator()\n menu.addAction(self.vcsStatusAct)\n menu.addSeparator()\n menu.addAction(self.vcsDiffAct)\n menu.addAction(self.gitExtDiffAct)\n menu.addSeparator()\n menu.addAction(self.vcsSwitchAct)\n menu.addSeparator()\n menu.addMenu(bisectMenu)\n menu.addSeparator()\n menu.addAction(self.vcsCleanupAct)\n menu.addSeparator()\n menu.addAction(self.vcsCommandAct)\n menu.addSeparator()\n menu.addMenu(adminMenu)\n menu.addSeparator()\n menu.addAction(self.gitEditUserConfigAct)\n menu.addAction(self.gitConfigAct)\n menu.addSeparator()\n menu.addAction(self.vcsNewAct)\n menu.addAction(self.vcsExportAct)", "def show_menu():\n if not GD.gui.menu.item('Tools'):\n create_menu()", "def sellMenu(userid, args):\r\n buildSellMenu(userid)", "def test_get_menu(self):\n url = \"/get_menu\"\n data = {\n \"restaurant\": 1\n }\n response = app.test_client().post(url,\n json=data,\n content_type='application/json')\n assert response.status_code == 200, logging.error(\n \"Getting Menus Failed!\")\n logging.info(\"GET Menu Tested!\")", "def get_menu_for_display(self):\n \n game = self.game\n if not game.menu:\n return\n\n menu = game.menu\n if not menu.is_visible:\n return None\n \n return menu", "def listen_for_menu():\n\n pubsub=pgpubsub.connect(**app.pubsub_conn_det)\n channel = ServerSockets._set_up_change_notifier(\n pubsub.conn, \"menuitems\", set(ServerSockets.DbActions))\n pubsub.listen(channel)\n while True:\n for event in pubsub.events(yield_timeouts=True):\n if event is None:\n pass\n else:\n with app.app_context():\n ServerSockets.get_menuitems_by_category()", "def get_one_menu_option():", "def manage_permission_only(self, **kw):\n return dict(page='managers stuff')", "def manage_permission_only(self, **kw):\n return dict(page='managers stuff')", "def manage_permission_only(self, **kw):\n return dict(page='managers stuff')", "def manage_permission_only(self, **kw):\n return dict(page='managers stuff')", "def manage_permission_only(self, **kw):\n return dict(page='managers stuff')", "def show_user_left_navi_bootstrap3(context):\n li_html = u''\n user = context[\"request\"].user\n cur_path = context[\"request\"].path\n user_menu = gen_menu_tree(user)\n active_css = u'class=\"active\" '\n\n def _get_m():\n for m in user_menu:\n if m.has_key(\"path\"):\n if cur_path.startswith(m[\"path\"]):\n return m\n else:\n for c in m.get(\"children\", []):\n if c.has_key(\"path\"):\n if c[\"path\"].startswith(cur_path):\n return m\n\n m = _get_m()\n if m:\n if m.has_key(\"path\"):\n li_html += u'<li %s><a href=\"%s\">%s</a></li>' % (active_css, m['path'], m[\"name\"])\n\n else:\n _sub_html = u'<li class=\"disabled\"><a href=\"#\">%s</a></li>' % m['name']\n for c in m.get(\"children\", []):\n if c.has_key(\"path\"):\n if c[\"path\"].startswith(cur_path):\n if cur_path == \"/\":\n if c['path'] == cur_path:\n _sub_html += u'<li %s><a href=\"%s\" >%s</a></li>' % (active_css, c['path'], c['name'])\n else:\n _sub_html += u'<li ><a href=\"%s\" >%s</a></li>' % (c['path'], c['name'])\n else:\n _sub_html += u'<li %s><a href=\"%s\" >%s</a></li>' % (active_css, c['path'], c['name'])\n else:\n _sub_html += u'<li><a href=\"%s\" >%s</a></li>' % (c['path'], c['name'])\n li_html += _sub_html\n\n return li_html", "def show_privileges(self):\n\t\tprint(\"An administrator has following privileges: \")\n\t\tfor item in self.privileges:\n\t\t\tprint(item)", "def accessoriesMenu():\n pref = QtGui.QAction(mw)\n pref.setText(\"TabBar\")\n pref.setObjectName(\"TabBar\")\n pref.triggered.connect(onPreferences)\n try:\n import AccessoriesMenu\n AccessoriesMenu.addItem(\"TabBar\")\n except ImportError:\n a = mw.findChild(QtGui.QAction, \"AccessoriesMenu\")\n if a:\n a.menu().addAction(pref)\n else:\n mb = mw.menuBar()\n actionAccessories = QtGui.QAction(mw)\n actionAccessories.setObjectName(\"AccessoriesMenu\")\n actionAccessories.setIconText(\"Accessories\")\n menu = QtGui.QMenu()\n actionAccessories.setMenu(menu)\n menu.addAction(pref)\n\n def addMenu():\n \"\"\"Add accessories menu to the menu bar.\"\"\"\n mb.addAction(actionAccessories)\n actionAccessories.setVisible(True)\n\n addMenu()\n mw.workbenchActivated.connect(addMenu)", "def getMenuItem(self, event):\n return self.GetMenuBar().FindItemById(event.GetId())", "def stock(request):\n if not request.user.is_staff:\n return NO_PERMISSION\n return {}", "def show_help():\n if \"username\" in session.keys() and is_admin(session[\"username\"]):\n buttons = [\n NavButton(url_for(\"logout\"), \"Log Out\"),\n NavButton(url_for(\"show_main\"), \"Main\"),\n NavButton(url_for(\"show_admin\"), \"Administration\"),\n ]\n elif \"username\" in session.keys():\n buttons = [\n NavButton(url_for(\"logout\"), \"Log Out\"),\n NavButton(url_for(\"show_main\"), \"Main\"),\n ]\n else:\n buttons = [NavButton(url_for(\"index\"), \"Home\")]\n return render_template(\"help.html\", buttons=buttons)", "def main_menu(self) -> str:\n print(\" ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\")\n print(\" MENU PRINCIPALE \")\n print(\n \"\\n\"\n \" ● 1 - Créer un tournoi ●\\n\"\n \" ● 2 - Ajouter des joueurs à un tournoi ●\\n\"\n \" ● 3 - Débuté ou continué un tournoi ●\\n\"\n \" ● 4 - Ajouter un nouveau joueur ●\\n\"\n \" ● 5 - Modifier classement d'un joueur ●\\n\"\n \" ● 6 - Menu Secondaire ●\\n\"\n \" ● 7 - Quitter l'application ● \"\n\n )\n print(\" ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\")\n response = input(\"Choississez un chiffre pour naviguer dans le menu : \")\n return response", "def menu():\n print('\\n----------------------------- MENU ------------------------------')\n print('0 - EXIT PROGRAM | 10 - Create user')\n print('1 - Read temperature | 11 - Check users info')\n print('2 - Read air humidity | 12 - Update user infos')\n print('3 - Read soil humidity | 13 - Remove user')\n print('4 - Visualize the last record | 14 - Read both (temp. and umid.) ')\n print('5 - Visualize all record | 15 - Delete record from collection by id')\n print('6 - Delete last record | 16 - *')\n print('7 - Delete all record | 17 - *')\n print('8 - Visualize insertions by user | 18 - *')\n print('C - CLEAR SCREEN | 19 - *')\n print('-----------------------------------------------------------------\\n')\n # * to be implemented", "def menu(self, venue_id, date):\n query = \"&date=\" + date\n response = self._request(V2_ENDPOINTS['MENUS'] + venue_id + query)\n return response", "def menu(self):\n ## This is a DICTIONARY, it's a list with custom index values\n # You may change the menu if you'd like to add an experimental method\n menu = {\"n\": (\"Navigate forward\", self.nav),\n \"d\": (\"Dance\", self.dance),\n \"c\": (\"Calibrate\", self.calibrate),\n \"t\": (\"test restore\", self.calibrate),\n \"s\": (\"Check status\", self.status),\n \"q\": (\"Quit\", quit_now)\n }\n # loop and print the menu...\n for key in sorted(menu.keys()):\n print(key + \":\" + menu[key][0])\n # store the user's answer\n ans = raw_input(\"Your selection: \")\n # activate the item selected\n menu.get(ans, [None, error])[1]()", "def test_json_menu_data_when_logged_in(self):\n self.login()\n test_project = Project.objects.create(title='TestTitle', short_name='TestShortName', status='open')\n test_note = Note.objects.create(title='TestNote', note='Testing', report=False, project=test_project)\n\n\t# Goto the json note menu\n resp = self.client.get(reverse('note-menu', kwargs={'project_id': test_project.id}))\n\n\t# Check there was a 200 response code\n self.assertEqual(resp.status_code, 200)\n\n\t# Check that the note is not returned\n self.assertContains(resp, test_note.title)\n self.assertContains(resp, \"\\\"name\\\": \\\"\" + test_note.title + \"\\\"\")\n self.assertContains(resp, \"\\\"id\\\": \" + str(test_note.id))", "def _createDisplayMenu(ned, menu):\n pass", "def restaurantMenuJson(restaurant_id):\n try:\n menuItems = session.query(MenuItem).filter_by(restaurant_id=restaurant_id).all()\n return jsonify(MenuItems=[menuItem.serialize for menuItem in menuItems])\n except exc.NoResultFound:\n return redirect(url_for('mainPage'))", "def getMenuNames(self):\n if self.__object is not None:\n return list(self.__menus.keys())\n else:\n return []", "def __populateMenu(self, name, menu):\n if name not in [\"Tools\", \"PluginTools\"]:\n return\n \n editor = e5App().getObject(\"ViewManager\").activeWindow()\n \n if name == \"Tools\":\n if not menu.isEmpty():\n menu.addSeparator()\n act = menu.addMenu(self.__menu)\n act.setEnabled(editor is not None)\n elif name == \"PluginTools\" and self.__mainActions:\n self.__mainActions[-1].setEnabled(editor is not None)", "def get_items(self):\n options = \"\"\n for item in self.menu:\n options += f\"{item.name}/\"\n return options", "def extract_menu(menu_json: dict, date: datetime.date) -> str:\n\n inner_menu = menu_json[-1]\n acf = inner_menu.get(\"acf\")\n\n date_string = f\"*Menu for {date}*\"\n story = prettify(acf.get(\"story\")).strip()\n menu_items = parse_menu_items(acf.get(\"menu_items\"))\n\n return \"\\n\".join([date_string] + [story] + menu_items)", "def report_menu(context, request, report, obj):\n if isinstance(obj, Addon):\n has_privs = False\n if request.user.is_authenticated and (\n acl.action_allowed(request, amo.permissions.STATS_VIEW) or\n obj.has_author(request.user)\n ):\n has_privs = True\n tpl = loader.get_template('stats/addon_report_menu.html')\n ctx = {\n 'addon': obj,\n 'has_privs': has_privs,\n }\n return jinja2.Markup(tpl.render(ctx))", "def get(self):\r\n self.checkSession(self.request.headers.get('Cookie'))\r\n try:\r\n self.insertMenu()\r\n self.getSentMSG(self.Session['userid'])\r\n except:\r\n self.response.out.write(\"Some error occured\")" ]
[ "0.71579736", "0.7013057", "0.69366163", "0.6936209", "0.6905796", "0.6846141", "0.6812014", "0.6768968", "0.67572683", "0.67403543", "0.67125106", "0.65609455", "0.6558285", "0.65495783", "0.6461598", "0.64251655", "0.6414871", "0.640194", "0.6364668", "0.63646626", "0.6344749", "0.6334116", "0.6319808", "0.6246206", "0.6166369", "0.6147041", "0.61087704", "0.60946673", "0.6090457", "0.6075057", "0.6049944", "0.60089356", "0.5995659", "0.5983873", "0.5934108", "0.592014", "0.59031945", "0.5898168", "0.58959", "0.5895614", "0.5880469", "0.5873575", "0.5849266", "0.58173466", "0.5796277", "0.579295", "0.57865524", "0.57742137", "0.5754252", "0.57480115", "0.5744551", "0.57404816", "0.5717962", "0.57167023", "0.5714511", "0.571404", "0.5712855", "0.5697777", "0.56966156", "0.56964296", "0.5678643", "0.5668773", "0.5668564", "0.5638856", "0.56320125", "0.56225383", "0.56083775", "0.56039345", "0.56006837", "0.56006837", "0.55704844", "0.55688155", "0.55674666", "0.5564004", "0.5562122", "0.55521", "0.5540024", "0.55383307", "0.55383307", "0.55383307", "0.55383307", "0.55383307", "0.5527636", "0.552729", "0.5519547", "0.5516568", "0.55156344", "0.5512551", "0.551029", "0.55018216", "0.5493846", "0.5487598", "0.5486342", "0.54825914", "0.54811996", "0.54804325", "0.5472147", "0.54655796", "0.5462042", "0.5456101", "0.5451539" ]
0.0
-1
Sample pytest test function with the pytest fixture as an argument.
def test_example(decorated_example): import visual_coding_2p_analysis
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pytest(context):\n exec_cmd = \"pytest\"\n run_cmd(context, exec_cmd)", "def pytest_generate_tests(metafunc):\n if \"retrospective\" in metafunc.fixturenames:\n metafunc.parametrize(\"retrospective\", [False, True])\n if \"test_type\" in metafunc.fixturenames:\n metafunc.parametrize(\"test_type\", [FILES_TEST, STATE_TEST])\n if \"raise_error\" in metafunc.fixturenames:\n metafunc.parametrize(\"raise_error\", [False, True])", "def test_bar_fixture(testdir):\n\n # create a temporary pytest test module\n testdir.makefile('hadoken.lp', \"\"\"\n% TEST: data-generation, two-in-one\np(1..3).\n\n% TEST qrule: rule-q, two-in-one\nq(X): p(X).\n \"\"\")\n\n # # run pytest with the following cmd args\n result = testdir.runpytest(\n '-p asptest',\n '--uid-tests-dir=examples',\n '-v'\n )\n\n # # fnmatch_lines does an assertion internally\n print(result.stdout.lines)\n result.stdout.fnmatch_lines([\n '*::test_block* PASSED*',\n ])\n\n # # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0", "def pytest_generate_tests(metafunc):\n from datastructures.tests._test_trees_data import \\\n ids, \\\n inputs, \\\n expected_list, \\\n expected_items_list, \\\n expected_tree, \\\n expected_items_tree, \\\n expected_len, \\\n expected_valid_BST, \\\n shuffled_inputs, \\\n is_equal\n\n if 'get_test_as_list_data' in metafunc.fixturenames:\n metafunc.parametrize('get_test_as_list_data',\n list(zip(inputs, expected_list)),\n ids=ids)\n\n if 'get_test_items_as_list_data' in metafunc.fixturenames:\n metafunc.parametrize('get_test_items_as_list_data',\n list(zip(inputs, expected_items_list)),\n ids=ids)\n\n if 'get_test_as_tree_data' in metafunc.fixturenames:\n metafunc.parametrize('get_test_as_tree_data',\n list(zip(inputs, expected_tree)),\n ids=ids)\n\n if 'get_test_items_as_tree_data' in metafunc.fixturenames:\n metafunc.parametrize('get_test_items_as_tree_data',\n list(zip(inputs, expected_items_tree)),\n ids=ids)\n\n if 'get_test_len_data' in metafunc.fixturenames:\n metafunc.parametrize('get_test_len_data',\n list(zip(inputs, expected_len)),\n ids=ids)\n\n if 'get_test_valid_BST_glassbox' in metafunc.fixturenames:\n metafunc.parametrize('get_test_valid_BST_glassbox',\n list(zip(inputs, expected_valid_BST)),\n ids=ids)\n\n if 'get_test_eq' in metafunc.fixturenames:\n metafunc.parametrize('get_test_eq',\n list(zip(inputs, shuffled_inputs, is_equal)),\n ids=ids)", "def fixture_example_data():\n import_example_data()", "def pytest_generate_tests(metafunc):\n if \"size1\" in metafunc.fixturenames and \"size2\" in metafunc.fixturenames:\n metafunc.parametrize(\n [\"size1\", \"size2\"], itertools.product([1, 4], [2, 8]))\n if \"lines\" in metafunc.fixturenames:\n metafunc.parametrize(\"lines\", [[], [\"line1\"], [\"line1\", \"line2\"]])", "def pytest_generate_tests(metafunc):\n\t\n\tif not metafunc.cls:\n\t\treturn\n\t\n\tinst = metafunc.cls()\n\t\n\tif 'valid' in metafunc.fixturenames:\n\t\tmetafunc.parametrize('valid', inst.valid)\n\t\n\tif 'invalid' in metafunc.fixturenames:\n\t\tmetafunc.parametrize('invalid', inst.invalid)", "def test_fixture_simple_patch(testdir):\n\n # create a temporary pytest test module\n testdir.makepyfile(\n \"\"\"\n import requests\n\n def test_simple(requests_mock):\n with requests_mock.patch('/api/test') as patch:\n patch.returns = requests_mock.good('hello')\n response = requests.get('https://test.api/api/test')\n assert response.text == 'hello'\n assert patch.was_called_once()\n \"\"\"\n )\n\n # run pytest with the following cmd args\n result = testdir.runpytest(\"-v\")\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines([\"*::test_simple PASSED*\"])\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0", "def pytest_generate_tests(metafunc):\n if \"expected_failure\" in metafunc.fixturenames:\n modpath = os.path.dirname(metafunc.module.__file__)\n pattern = os.path.join(modpath, \"RST???\", \"*.py\")\n metafunc.parametrize(\n \"expected_failure\",\n [os.path.relpath(p, modpath) for p in sorted(glob.glob(pattern))],\n )", "def pytest_generate_tests(metafunc):\n for param in ['env', 'browser', 'logging_level', 'env_file', 'name', 'jenkins_url', 'slack', 'output', 'email_retries',\n 'email_search_errors']:\n option_value = getattr(metafunc.config.option, param)\n if param in metafunc.fixturenames:\n metafunc.parametrize(param, [option_value], scope='session')", "def test_something():", "def pytest_generate_tests(metafunc):\n if \"maptype\" in metafunc.fixturenames:\n metafunc.parametrize(\"maptype\", ALL_ATTMAPS)", "def spec_tests():\n pass", "def pytest_generate_tests(metafunc):\n def get_schema_name(schema_path):\n \"\"\"Helper function to return the informative part of a schema path\"\"\"\n print(schema_path)\n path = os.path.normpath(schema_path)\n return os.path.sep.join(path.split(os.path.sep)[-3:])\n\n def create_schema_example_id(argval):\n \"\"\"Helper function to create test ID for schema example validation\"\"\"\n if argval[0] == '/':\n # ID for the first argument is just the schema name\n return get_schema_name(argval)\n else:\n # This will cause pytest to create labels of the form:\n # SCHEMA_NAME-example\n # If there are multiple examples within a single schema, the\n # examples will be numbered automatically to distinguish them\n return \"example\"\n\n if metafunc.function is test_validate_schema:\n metafunc.parametrize(\n 'schema_path',\n generate_schema_list(),\n # just use the schema name as a test ID instead of full path\n ids=get_schema_name)\n elif metafunc.function is test_schema_example:\n metafunc.parametrize(\n 'filename,example',\n generate_example_schemas(),\n ids=create_schema_example_id)", "def test_simple(testdir, pytest_params):\n testdir.makefile(\n \".feature\",\n simple=\"\"\"\n Feature: Simple feature\n Scenario: Simple scenario\n Given I have a bar\n \"\"\",\n )\n testdir.makepyfile(\n \"\"\"\n from pytest_bdd import scenario, given, then\n\n @scenario(\"simple.feature\", \"Simple scenario\")\n def test_simple():\n pass\n\n @given(\"I have a bar\")\n def bar():\n return \"bar\"\n\n @then(\"pass\")\n def bar():\n pass\n \"\"\"\n )\n result = testdir.runpytest_subprocess(*pytest_params)\n result.assert_outcomes(passed=1)", "def test_pytest():\n assert True", "def test_pytest():\n assert True", "def test(ctx):\n pass", "def pytest_generate_tests(metafunc):\n if (\"solver\" in metafunc.fixturenames\n and \"coefficients\" in metafunc.fixturenames):\n _parametrize_solver_coefficients(metafunc)", "def test_generate(monkeypatch, capsys):\n monkeypatch.setattr(sys, \"argv\", [\"\", \"generate\", os.path.join(PATH, \"generate.feature\")])\n main()\n out, err = capsys.readouterr()\n assert out == textwrap.dedent(\n '''\n # coding=utf-8\n \"\"\"Code generation feature tests.\"\"\"\n\n from pytest_bdd import (\n given,\n scenario,\n then,\n when,\n )\n\n\n @scenario('scripts/generate.feature', 'Given and when using the same fixture should not evaluate it twice')\n def test_given_and_when_using_the_same_fixture_should_not_evaluate_it_twice():\n \"\"\"Given and when using the same fixture should not evaluate it twice.\"\"\"\n\n\n @given('1 have a fixture (appends 1 to a list) in reuse syntax')\n def have_a_fixture_appends_1_to_a_list_in_reuse_syntax():\n \"\"\"1 have a fixture (appends 1 to a list) in reuse syntax.\"\"\"\n raise NotImplementedError\n\n\n @given('I have an empty list')\n def i_have_an_empty_list():\n \"\"\"I have an empty list.\"\"\"\n raise NotImplementedError\n\n\n @when('I use this fixture')\n def i_use_this_fixture():\n \"\"\"I use this fixture.\"\"\"\n raise NotImplementedError\n\n\n @then('my list should be [1]')\n def my_list_should_be_1():\n \"\"\"my list should be [1].\"\"\"\n raise NotImplementedError\n\n '''[\n 1:\n ].replace(\n u\"'\", u\"'\"\n )\n )", "def test_func():\n pass", "def test_by_variable():\n pass", "def pytest_generate_tests(metafunc):\n parent_conftest.pytest_generate_tests(metafunc, __file__)", "def test_request_items_runner_fixture(testdir):\n\n # create a temporary pytest test module\n testdir.makepyfile(\n \"\"\"\n def test_exists(request_items_runner):\n assert request_items_runner\n \"\"\"\n )\n\n # run pytest with the following cmd args\n result = testdir.runpytest(\"-v\")\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines([\"*::test_exists PASSED*\"])\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0", "def pytest_generate_tests(metafunc):\n\n # test is setup or teardown - parametrize to all scenarios\n if metafunc.function.__name__ in [\"test_setup\", \"test_teardown\"]:\n metafunc.parametrize(\n \"scenario\", Scenario.scenarios.values())\n\n # parameterize test for each scenario it is included in\n else:\n metafunc.parametrize(\n \"scenario\", metafunc.cls._scenarios)", "def pytest_generate_tests(metafunc):\n if \"worker_type\" in metafunc.fixturenames:\n test_params = [[\"thread\", 1, 1], [\"thread\", 2, 2]]\n # if the OS is not Windows / OS X and python version > 2.7 then also do the multiprocess workers testing.\n if platform.system() not in [\"Windows\", \"Darwin\"] and sys.version_info >= (\n 2,\n 7,\n ):\n test_params.extend([[\"process\", 1, 1], [\"process\", 2, 2]])\n\n metafunc.parametrize(\n \"worker_type, workers_count, worker_sessions_count\", test_params\n )", "def test_1():", "def pytest_generate_tests_abstract(metafunc):\n if 'content' in metafunc.fixturenames:\n content = getattr(metafunc.function, '_content', None)\n if isinstance(content, list):\n metafunc.parametrize('content', [content])\n else:\n metafunc.parametrize('content', [[]])", "def test(func):\n register_tests(func, [func.__name__])", "def setup_fixtures(func):\n func = pytest.mark.usefixtures('smtp', 'mock_access_request', 'dummy_access_request')(func)\n func = pytest.mark.parametrize('mock_access_request',\n [{\n 'during_registration': True,\n 'during_registration_required': True,\n 'personal_data': PERSONAL_DATA\n }],\n indirect=True)(func)\n return func", "def setup_fixtures(func):\n func = pytest.mark.usefixtures('smtp', 'mock_access_request', 'dummy_access_request')(func)\n func = pytest.mark.parametrize('mock_access_request',\n [{\n 'during_registration': True,\n 'during_registration_required': True,\n 'personal_data': PERSONAL_DATA\n }],\n indirect=True)(func)\n return func", "def test(session) -> None:\n session.install(\".[test]\")\n session.run(\"pytest\", \"-n\", \"auto\", *session.posargs)", "def test_single_test_case():\n pass", "def template_for_test_functions():\r\n\r\n expected = \"\"\r\n actual = \"\"\r\n print_test_results(func, expected, actual)", "def pytest_generate_tests(self, metafunc):\n\n # function for pretty test name\n def id_func(x):\n return \"-\".join([f\"{k}={v}\" for k, v in x.items()])\n\n # get arguments for the test function\n funcarglist = metafunc.cls.params.get(metafunc.function.__name__, None)\n if funcarglist is None:\n return\n else:\n # equivalent of pytest.mark.parametrize applied on the metafunction\n metafunc.parametrize(\"fields\", funcarglist, ids=id_func)", "def test_T1():", "def test_T1():", "def test_examples():\n argv = [\"py.test\", \"-examples\"]\n assert get_sargs(argv) is None", "def test_dummy():", "def test_main():\n # Setup\n # Exercise\n # Verify", "def fixture_runner():\n return CliRunner()", "def pytest_generate_tests(metafunc):\n if not \"bpftrace_condition\" in metafunc.fixturenames:\n raise RuntimeError(\"Invalid test case.\")\n spec_file = metafunc.config.option.spec_file\n spec_dir = os.path.dirname(os.path.realpath(spec_file.name))\n spec = json.load(spec_file)\n conditions = []\n # Generate a list of conditions to evaluate\n for test_case in spec['cases']:\n bpftrace_vars = collect_test_results(test_case, spec_dir)\n for condition in test_case['conditions']:\n conditions.append((test_case['name'], condition, bpftrace_vars))\n\n # Parameterize the conditions so that the test function gets run for each condition\n # We also set the ids of the functions to be \"name: condition\" for better reporting\n metafunc.parametrize(\"bpftrace_condition\", conditions, ids=map(\n lambda c: f\"{c[0]}: {c[1]}\", conditions))", "def pytest_pyfunc_call(pyfuncitem):\n if 'use_mongo' in pyfuncitem.keywords:\n settings.STORAGE = {\n 'class': 'bot.storage.MongoStorage',\n 'host': 'localhost',\n 'port': 27017,\n 'db': 'test',\n }\n if 'async_test' in pyfuncitem.keywords:\n funcargs = pyfuncitem.funcargs\n loop = funcargs['loop']\n testargs = {arg: funcargs[arg]\n for arg in pyfuncitem._fixtureinfo.argnames}\n loop.run_until_complete(pyfuncitem.obj(**testargs))\n return True", "def test_fixture_json_api(testdir):\n\n # create a temporary pytest test module\n testdir.makepyfile(\n \"\"\"\n import requests\n import pytest\n\n def test_json(requests_mock):\n test_dict = {'a': 'b'}\n with requests_mock.patch('/api/test') as patch:\n patch.returns = requests_mock.good(test_dict).as_json()\n response = requests.get('https://test.api/api/test')\n assert response.json() == test_dict\n assert 'Content-Type' in response.headers\n assert response.headers['Content-Type'] == 'application/json'\n \"\"\"\n )\n\n result = testdir.runpytest(\"-v\")\n result.stdout.fnmatch_lines([\"*::test_json PASSED*\"])\n assert result.ret == 0", "def pytest_runtest_setup(item):\n if hasattr(item, 'fixturenames') and LOOP_KEY not in item.fixturenames:\n item.fixturenames.append(LOOP_KEY)", "def fixture_microbial_sample_name():\n return \"microbial_name_test\"", "def test_3():", "def test_2():", "def pytest_generate_tests(metafunc):\n testcases_file = metafunc.config._nrfu['testcase_dir'].joinpath(\n 'testcases-cabling.json')\n\n metafunc.parametrize('testcase',\n json.load(testcases_file.open()),\n ids=nrfu.name_test)", "def pytest_pyfunc_call(pyfuncitem):\n if _is_coroutine(pyfuncitem.function):\n loop = pyfuncitem.funcargs[LOOP_KEY]\n funcargs = pyfuncitem.funcargs\n testargs = {}\n for arg in pyfuncitem._fixtureinfo.argnames:\n testargs[arg] = funcargs[arg]\n loop.run_until_complete(\n loop.create_task(\n pyfuncitem.obj(**testargs)\n )\n )\n return True", "def tests():", "def test_dummy_test():\n pass", "def test_T01():", "def pytest_funcarg__generate_golden_data(request):\r\n return request.config.option.generate_golden_data", "def setUpFixture(self):\n pass", "def test_basic_execution(self):", "def run_suite(func):\n print 'testing ',func.__name__\n # create a TestSuite object\n suite = poc_simpletest.TestSuite()\n \n # test func on various inputs\n hand = tuple([])\n suite.run_test(func(hand, 6, 1), 5,\"Test #1:\")", "def test():", "def test():", "def fixtures():", "def pytest_pyfunc_call(pyfuncitem): # type:ignore\n outcome = yield\n try:\n outcome.get_result()\n except InvalidArgument as exc:\n pytest.fail(exc.args[0])", "def test(ctx, filter=\"*\", verbose=False):\n test_python(ctx, filter, verbose)", "def pytest_runtest_setup(item):\n marker = item.get_closest_marker(name=\"hoverfly\")\n if not marker:\n return\n\n ensure_simulation_dir(item.config)\n\n stateful = marker.kwargs.pop(\"stateful\", False)\n record = marker.kwargs.pop(\"record\", False)\n\n if set(marker.kwargs) - {\"name\"}:\n raise RuntimeError(f\"Unknown argments passed to @hoverfly: {marker.kwargs}\")\n\n if record:\n item.fixturenames.append(\"_stateful_simulation_recorder\" if stateful else \"_simulation_recorder\")\n else:\n item.fixturenames.append(\"_simulation_replayer\")", "def fixture_file(*args):\n with open(os.path.join(FIXTURE_DATA, *args)) as file:\n yield file.read()", "def test_T2():", "def test_T2():", "def run(context, path=\"\"):\n common.success(f\"Tests {path} running \")\n return start.run_python(\n context,\n f\"-m pytest {path}\"\n )", "def no_run(obj):\n return pytest.mark.usefixtures(\"no_run\")(obj)", "def test(args):\n try:\n import pytest # pylint: disable=unused-import\n except ImportError:\n raise KedroCliError(NO_DEPENDENCY_MESSAGE.format(\"pytest\"))\n else:\n python_call(\"pytest\", args)", "def test_5():", "def test_basic_plugin(pytester, otel_service):\n pytester.makepyfile(\n common_code\n + \"\"\"\ndef test_basic():\n time.sleep(5)\n pass\n\"\"\")\n assertTest(pytester, \"test_basic\", \"passed\", STATUS_CODE_OK, \"passed\", STATUS_CODE_OK)", "def test():\n current_directory = os.getcwd()\n os.chdir(PACKAGE_DIR)\n pytest.main()\n os.chdir(current_directory)", "def fixture_microbial_sample_id():\n return \"microbial_sample_test\"", "def test_passed():\n pass", "def pytest_can_run_together(item1, item2):", "def test(ctx):\n click.echo('testing')\n ctx.test()\n click.echo('done')", "def generate_tests(self, fixture):\n if fixture.startswith(\"splunk_searchtime_fields\"):\n yield from self.dedup_tests(\n self.fieldtest_generator.generate_tests(fixture),\n fixture\n )\n elif fixture.startswith(\"splunk_searchtime_cim\"):\n yield from self.dedup_tests(\n self.cim_test_generator.generate_tests(fixture),\n fixture\n )", "def runTest(self):\r\n self.setUp()", "def test_T3():", "def test_T3():", "def test_function(eso_from_data:callable):\n print('Testing of eso_from_data with hardcoded data')\n # for context, relation, concepts_source in SEQUENCE:\n eso_from_data(0, 'site', 'contains', 'animal')", "def create(cls,\n new_fixture_host, # type: Union[Type, ModuleType]\n test_func, # type: Callable\n param_union_name, # type: str\n argnames, # type: Sequence[str]\n i, # type: int\n argvalue, # type: Any\n id, # type: Union[str, Callable]\n hook=None, # type: Callable\n debug=False # type: bool\n ):\n # type: (...) -> SingleParamAlternative\n nb_params = len(argnames)\n param_names_str = '_'.join(argnames).replace(' ', '')\n\n # Create a unique fixture name\n p_fix_name = \"%s_%s_P%s\" % (test_func.__name__, param_names_str, i)\n p_fix_name = check_name_available(new_fixture_host, p_fix_name, if_name_exists=CHANGE, caller=parametrize)\n\n if debug:\n print(\" - Creating new fixture %r to handle parameter %s\" % (p_fix_name, i))\n\n # Now we'll create the fixture that will return the unique parameter value\n # since this parameter is unique, we do not parametrize the fixture (_create_param_fixture \"auto_simplify\" flag)\n # for this reason the possible pytest.param ids and marks have to be set somewhere else: we move them\n # to the alternative.\n\n # unwrap possible pytest.param on the argvalue to move them on the SingleParamAlternative\n has_pytestparam_wrapper = is_marked_parameter_value(argvalue)\n if has_pytestparam_wrapper:\n p_id = get_marked_parameter_id(argvalue)\n p_marks = get_marked_parameter_marks(argvalue)\n argvalue = get_marked_parameter_values(argvalue, nbargs=nb_params)\n if nb_params == 1:\n argvalue = argvalue[0]\n\n # Create the fixture. IMPORTANT auto_simplify=True : we create a NON-parametrized fixture.\n _create_param_fixture(new_fixture_host, argname=p_fix_name, argvalues=(argvalue,),\n hook=hook, auto_simplify=True, debug=debug)\n\n # Create the alternative\n argvals = (argvalue,) if nb_params == 1 else argvalue\n p_fix_alt = SingleParamAlternative(union_name=param_union_name, alternative_name=p_fix_name,\n argnames=argnames, param_index=i, argval=argvals, id=id,\n decorated=test_func)\n\n # Finally copy the custom id/marks on the ParamAlternative if any\n if has_pytestparam_wrapper:\n p_fix_alt = ParameterSet(values=(p_fix_alt,), id=p_id, marks=p_marks) # noqa\n\n return p_fix_alt", "def make_uri_fixture(name):\n # noinspection PyShadowingNames\n def func(conn):\n return conn.createURI('ex://' + name)\n func.__name__ = name\n return pytest.fixture(func, name=name)", "def local_test():\n pass", "def pytest_generate_tests(metafunc):\n if 'browser' in metafunc.fixturenames:\n if os.environ.get('E2E', 'no').lower() != 'yes':\n pytest.skip(\n \"End-to-end tests skipped because E2E environment variable \"\n \"was not set to 'yes'.\")\n\n # Parameterize test based on list of browsers.\n browsers = os.environ.get('E2E_WEBDRIVER_BROWSERS', 'Chrome').split()\n metafunc.parametrize('browser', browsers, indirect=True)", "def test(opt=\"qsx\"):\n if opt:\n opt = \"-\" + opt\n local(\"py.test %s tests/*.py\" % opt)", "def pytest_fixture_setup(fixturedef):\n if isasyncgenfunction(fixturedef.func):\n func = fixturedef.func\n\n strip_request = False\n if 'request' not in fixturedef.argnames:\n fixturedef.argnames += ('request',)\n strip_request = True\n\n def wrapper(*args, **kwargs):\n request = kwargs['request']\n\n if strip_request:\n del kwargs['request']\n\n if 'loop' not in request.fixturenames:\n raise Exception(\n \"Asynchronous fixtures must depend on the 'loop' fixture or \"\n \"be used in tests depending from it.\"\n )\n\n loop = request.getfixturevalue('loop')\n # for async generators, we need to advance the generator once,\n # then advance it again in a finalizer\n gen = func(*args, **kwargs)\n\n def finalizer():\n try:\n return loop.run_until_complete(gen.__anext__())\n except StopAsyncIteration: # NOQA\n pass\n\n request.addfinalizer(finalizer)\n return loop.run_until_complete(gen.__anext__())\n\n fixturedef.func = wrapper\n\n elif asyncio.iscoroutinefunction(fixturedef.func):\n func = fixturedef.func\n\n strip_request = False\n if 'request' not in fixturedef.argnames:\n fixturedef.argnames += ('request',)\n strip_request = True\n\n def wrapper(*args, **kwargs):\n request = kwargs['request']\n if 'loop' not in request.fixturenames:\n raise Exception(\n \"Asynchronous fixtures must depend on the 'loop' fixture or \"\n \"be used in tests depending from it.\"\n )\n\n loop = request.getfixturevalue('loop')\n\n if strip_request:\n del kwargs['request']\n\n return loop.run_until_complete(func(*args, **kwargs))\n\n fixturedef.func = wrapper\n\n else:\n return", "def test(self,*args,**kwargs):\n print(\"[TEMPLATE ENGINE] 'testing' function not implemented\")\n pass", "def test_data_in_param(self):", "def test_expt(doctest):", "def test_function(arg_1):\n return arg_1 * 2", "def test_4():", "def check_from_function(self):\n context = TestContext(session_context=ducktape_mock.session_context(),\n cls=DummyTest, function=DummyTest.test_function_description)\n assert context.description == \"function description\"", "def test_config_options_fixture(testdir):\n\n # create a temporary pytest test module\n testdir.makepyfile(\"\"\"\n def test_sth(pytestconfig):\n assert pytestconfig.option.leaks == \":\"\n \"\"\")\n\n # run pytest with the following cmd args in a subprocess\n # for some reason an in-process run reports leaks\n result = testdir.runpytest_subprocess(\n '-R', ':',\n '-v'\n )\n\n # fnmatch_lines does an assertion internally\n result.stdout.fnmatch_lines([\n '*::test_sth PASSED',\n ])\n\n # make sure that that we get a '0' exit code for the testsuite\n assert result.ret == 0", "def pytest_runtest_setup(item):\n if not item.originalname == \"test_setup\":\n scenario = scenario_re.match(item.name).groups()[0]\n if _scenario_setup_failed[scenario]:\n pytest.skip(f\"Setup for {scenario} failed, skipping...\")", "def test_twentythree_no_args():\n sys.argv = ['test']\n with pytest.raises(SystemExit):\n TwentyThree()", "def inner_test(param: int):\n self.assertEqual(param, 42)", "def runTest(self):\n self.setUp()\n self.test_ExtendSpine1()", "def test():\n pass", "def random_test(n_nightly_runs: int = 10, seed: Optional[int] = None):\n def convert_test_func(test_func: Callable):\n seeds = _get_seeds(n_nightly_runs=n_nightly_runs, seed=seed)\n\n def fixate_seed_and_yield_test_run(*args, seed, **kwargs):\n old_state = random.getstate()\n random.seed(seed)\n try:\n yield test_func(*args, seed=seed, **kwargs)\n except Exception:\n _print_seed(seed=seed, decorator_name='random_test')\n raise\n finally:\n random.setstate(old_state)\n # We need to use pytest.mark.parametrize rather than running the test in a for loop. If we\n # do the latter, pytest won't re-create the fixtures for each run.\n return pytest.mark.parametrize('seed', seeds)(_convert_function_to_function_or_coroutine(\n caller_func=fixate_seed_and_yield_test_run, callee_func=test_func))\n\n return convert_test_func", "def inner_test():\n pass" ]
[ "0.70208406", "0.6849156", "0.6716318", "0.6693665", "0.65544575", "0.6513483", "0.64935595", "0.645806", "0.64294404", "0.6421675", "0.6402381", "0.6400902", "0.63715297", "0.63665515", "0.63603586", "0.6353915", "0.6353915", "0.6350933", "0.6348224", "0.6341979", "0.63354605", "0.63190484", "0.6310217", "0.63094956", "0.63081974", "0.625219", "0.6233066", "0.6202118", "0.61991024", "0.6170579", "0.6170579", "0.6165284", "0.6144752", "0.6130364", "0.61121905", "0.6104784", "0.6104784", "0.6080483", "0.60592294", "0.6042204", "0.6020798", "0.600444", "0.5975065", "0.59396183", "0.5913713", "0.5905751", "0.589962", "0.5886928", "0.588621", "0.58709395", "0.5856339", "0.5853457", "0.58408594", "0.5828038", "0.5827938", "0.5826721", "0.5821691", "0.5804401", "0.5804401", "0.58020514", "0.57950205", "0.578283", "0.5776532", "0.57707", "0.5766282", "0.5766282", "0.57659143", "0.5751779", "0.57489204", "0.57479787", "0.57274944", "0.5720822", "0.5719528", "0.5711704", "0.5708343", "0.57025105", "0.5702134", "0.56756395", "0.5672235", "0.5672235", "0.5639554", "0.5637734", "0.5633362", "0.56314677", "0.56203467", "0.5604116", "0.560174", "0.5599888", "0.5594237", "0.55889684", "0.5581175", "0.55765116", "0.5575718", "0.55663556", "0.55567336", "0.55548155", "0.5553068", "0.55497247", "0.5544616", "0.5538989", "0.5535399" ]
0.0
-1
int ploidy return all possible genotypes, completely determined by ploidy
def all_genotype(ploidy): return ["".join(comb) for comb in cwr("ACGT-", ploidy)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def genotype(args) :\n from genotyper import genotype_samples\n genotype_samples(args)", "def collapse_genotypes(pL,gL):\n if len(gL) < 2:\n return gL\n else:\n uniqueL = [] # list of unique genotypes relative to ploidy\n for g in gL:\n s = ''\n for i in xrange(len(pL)):\n s += ''.join(sorted(g[0:pL[i]]))\n g = g[pL[i]:]\n if s not in uniqueL:\n uniqueL.append(s)\n return uniqueL", "def get_all_possible_genotypes(self):\n # Get all genotypes.\n return mutations_to_genotypes(self.mutations, wildtype=self.wildtype)", "def genotypes(self):\n return self.data.genotypes.values", "def generate_genotype(self):\n genes = []\n for i in range(self.n_genes):\n genes.append(self.Gene(n_bases=self.n_bases))\n self.genes = genes", "def _build_genotypes(self):\n x = np.zeros(self.n)\n \n # Frequencies derived from HWE.\n num_hetero = 2 * self.maf * (1 - self.maf) * self.n\n num_homo_minor = self.maf ** 2 * self.n\n \n x[:num_hetero] = 1\n x[num_hetero:num_hetero+num_homo_minor] = 2\n np.random.shuffle(x)\n \n # Add noise for dosage values if needed.\n if self.dosage_var:\n x[x == 0] += np.abs(\n np.random.normal(0, self.dosage_var, len(x[x == 0]))\n )\n x[x == 1] += np.random.normal(0, self.dosage_var, len(x[x == 1]))\n x[x == 2] -= np.abs(\n np.random.normal(0, self.dosage_var, len(x[x == 2]))\n )\n\n # Mask some values if the call rate is not 1.\n if self.call_rate < 1:\n missing_rate = 1 - self.call_rate\n missing_number = missing_rate * self.n\n missing_idx = np.arange(0, self.n)\n np.random.shuffle(missing_idx)\n missing_idx = missing_idx[:missing_number]\n x[missing_idx] = np.nan\n \n return x", "def __generate_genotype(self):\n if len(self.genotype) < self.__individual_genotype_length:\n gene = ''\n \n while len(self.genotype) < self.__individual_genotype_length:\n gene = str(random.randint(0,1))\n \n self.genotype = self.genotype + gene", "def phenotypes(self):\n return self.data.phenotypes.values", "def calculate_genotype_probabilities(self):\n for name, member in self.members.items():\n member.genotype_probabilities = self.genotype_probabilities_of(name)", "def get_my_mutations(quality_cutoff, coverage_cutoff):\n\n # my_mutations = {}\n # with open('/home/perry/Projects/loh/working/murim.exome.aa_chg.vars') as f:\n # for line in f:\n # my_mutations[line.strip()] = True\n # return my_mutations\n\n bed_file = 'data/nimblegen/2.1M_Human_Exome_Annotation/2.1M_Human_Exome.bed'\n bed_chr2st2end, bed_chr2posLs = bed_tools.load_bed(bed_file, \n 'NimbleGen Tiled Regions')\n # NimbleGen Tiled Regions\n # Target Regions\n\n use_data_dir = '/home/perry/Projects/loh/data/all_non_ref_hg18/'\n all_somatic = {}\n all_inherited = {}\n cancer_qualities = mutations.get_consensus_qualities(use_data_dir + 'yusanT.ann')\n normal_qualities = mutations.get_consensus_qualities(use_data_dir + 'yusanN.ann')\n for exome in global_settings.exome_types:\n data_file = use_data_dir + exome\n inherited, somatic, murim = mutations.get_mutations(data_file, normal_qualities,\n cancer_qualities, quality_cutoff,\n False, coverage_cutoff)\n # only use the bed_tools NimbleGen\n # restriction for hg18 data\n for s in somatic['yusan']: \n chr, pos = s.split(':')\n if bed_tools.find_location_in_bed(chr, int(pos), \n bed_chr2posLs,\n bed_chr2st2end):\n all_somatic[s] = True\n for i in inherited['yusan']: \n chr, pos = s.split(':')\n if bed_tools.find_location_in_bed(chr, int(pos), \n bed_chr2posLs,\n bed_chr2st2end):\n all_inherited[i] = True\n return (set(all_somatic.keys()) & set(get_murim_covered(quality_cutoff)), set(all_inherited.keys()) & set(get_murim_covered(quality_cutoff)))", "def _process_genotypes(self, limit):\n if self.testMode:\n g = self.testgraph\n else:\n g = self.graph\n model = Model(g)\n line_counter = 0\n\n raw = '/'.join((self.rawdir, 'genotype'))\n logger.info(\"building labels for genotypes\")\n geno = Genotype(g)\n fly_tax = 'NCBITaxon:7227'\n with open(raw, 'r') as f:\n f.readline() # read the header row; skip\n filereader = csv.reader(f, delimiter='\\t', quotechar='\\\"')\n for line in filereader:\n line_counter += 1\n\n (genotype_num, uniquename, description, name) = line\n\n # if self.testMode is True:\n # if int(object_key) not in self.test_keys.get('genotype'):\n # continue\n\n # add the internal genotype to pub mapping\n genotype_id = 'MONARCH:FBgeno'+str(genotype_num)\n self.idhash['genotype'][genotype_num] = genotype_id\n\n if description == '':\n description = None\n\n if not self.testMode \\\n and limit is not None and line_counter > limit:\n pass\n else:\n if self.testMode and \\\n int(genotype_num) not in \\\n self.test_keys['genotype']:\n continue\n\n model.addIndividualToGraph(\n genotype_id, uniquename,\n Genotype.genoparts['intrinsic_genotype'],\n description)\n # we know all genotypes are in flies\n # FIXME we assume here they are in melanogaster,\n # but that isn't necessarily true!!!\n # TODO should the taxon be == genomic background?\n geno.addTaxon(fly_tax, genotype_id)\n genotype_iid = self._makeInternalIdentifier(\n 'genotype', genotype_num)\n model.addComment(\n genotype_id, genotype_iid)\n if name.strip() != '':\n model.addSynonym(genotype_id, name)\n\n return", "def get_gene_biotypes(db_path, table=Annotation):\n session = start_session(db_path)\n query = session.query(table.GeneBiotype).distinct()\n return {x[0] for x in query.all()}", "def simulate_genotype_calls(\n n_variant: int, n_sample: int, p: Tuple[float, float, float], seed: int = 0\n) -> DataArray:\n rs = np.random.RandomState(seed)\n # Draw genotype codes with provided distribution\n gt = np.stack(\n [\n rs.choice([0, 1, 2], size=n_sample, replace=True, p=p)\n for i in range(n_variant)\n ]\n )\n # Expand 3rd dimension with calls matching genotypes\n gt = np.stack([np.where(gt == 0, 0, 1), np.where(gt == 2, 1, 0)], axis=-1)\n return xr.DataArray(gt, dims=(\"variants\", \"samples\", \"ploidy\"))", "def get_missing_genotypes(self):\n return utils.get_missing_genotypes(\n self.genotypes,\n mutations=self.mutations\n )", "def possible_motifs_by_length(length, base_set=\"ACGU\"):\n args = [base_set for i in xrange(length)]\n for permutation in itertools.product(*args):\n yield \"\".join(permutation)", "def phenotypes(self):\n\t\treturn Phenotype.PhenotypesByPatient(self.id, self.host)", "def genes():\n return [\"b2935\", \"b0723\", \"b0451\"]", "def simple_genotype_matrix(n, p):\n genotypes = np.zeros(shape=(n, p))\n for item in range(0, p):\n genotypes[:, item] = np.random.binomial(1, np.random.uniform(0.1, 0.5, 1), n)\n\n return genotypes", "def reproduce(population:list):\n new_gen = []\n probs = []\n for p in population:\n probs.append(p[3])\n while len(new_gen) != len(probs):\n parents = selection(probs)\n son,eval_son,daughter,eval_daughter = xo(population[parents[0]][0],population[parents[0]][1], population[parents[1]][0],population[parents[1]][1],2)\n new_gen.append([son,eval_son])\n new_gen.append([daughter,eval_daughter])\n # mutation\n # lets say 5% of the population gets mutated\n how_many_to_mutate = int(NUM_OF_CHROMOZOMS * (1/100))\n t = [i for i in range(NUM_OF_CHROMOZOMS)]\n # choose percent of the population randomly, uniformly\n indices_to_mutate = choice(t, how_many_to_mutate, replace=False)\n for i in range(len(indices_to_mutate)):\n mutate(new_gen[indices_to_mutate[i]])\n\n evaluateAll(new_gen)\n return new_gen", "def variations():", "def download_genotype_data():\n print(\"downloading genotype data\")\n download_from_url(PSAM_PATH, dst=f\"{GENOTYPE_DATA_PATH}/{MERGED_GENOTYPE_FILE}.psam\", desc=\"downloading psam\")\n download_from_url(PVAR_PATH, dst=f\"{GENOTYPE_DATA_PATH}/{MERGED_GENOTYPE_FILE}.pvar.zst\",\n desc=\"downloading pvar\")\n download_from_url(PGEN_PATH, dst=f\"{GENOTYPE_DATA_PATH}/{MERGED_GENOTYPE_FILE}.pgen.zst\",\n desc=\"downloading pgen\")\n decompress_genotype_file(f\"{MERGED_GENOTYPE_FILE}.pvar\")\n decompress_genotype_file(f\"{MERGED_GENOTYPE_FILE}.pgen\")", "def _get_genotypes(self, samples, records, switch):\n\n variant = np.zeros(len(samples))\n for idx, sample in enumerate(samples):\n try:\n gt = records.genotype(sample)['GT']\n except IndexError:\n print(\"something went wrong with:\")\n print('sample:', sample, 'variant:', records, '-- set value to missing')\n gt = '.'\n if gt == '.':\n gt = 0\n else:\n gt = re.split('\\||/', gt)\n gt = list(map(int, gt))\n variant[idx] = np.sum(gt)\n if switch:\n variant = np.abs(variant - 2)\n return variant", "def calc_genotype(self, arch_param):\n\n def _parse(weights, genos):\n gene = []\n n = 2\n start = 0\n for i in range(self.steps):\n end = start + n\n W = weights[start:end].copy()\n G = genos[start:end].copy()\n edges = sorted(range(i + 2),\n key=lambda x: -max(W[x][k] for k in range(len(W[x])) if G[x][k] != 'none'))[:2]\n for j in edges:\n k_best = None\n for k in range(len(W[j])):\n if G[j][k] != 'none':\n if k_best is None or W[j][k] > W[j][k_best]:\n k_best = k\n gene.append([G[j][k_best], i + 2, j])\n start = end\n n += 1\n return gene\n\n normal_param = np.array(self.darts_cfg.super_network.normal.genotype)\n reduce_param = np.array(self.darts_cfg.super_network.reduce.genotype)\n geno_normal = _parse(arch_param[0], normal_param[:, 0])\n geno_reduce = _parse(arch_param[1], reduce_param[:, 0])\n return [geno_normal, geno_reduce]", "def _genotype_updated(self):\n if self.data.get(\"GT\", None) is None:\n self.gt_alleles = None\n self.called = None\n self.ploidy = None\n else:\n self.gt_alleles = []\n for allele in ALLELE_DELIM.split(str(self.data[\"GT\"])):\n if allele == \".\":\n self.gt_alleles.append(None)\n else:\n self.gt_alleles.append(int(allele))\n self.called = all([al is not None for al in self.gt_alleles])\n self.ploidy = len(self.gt_alleles)", "def genotype(self):\n\t\tgenotype = \"\"\n\t\tfields = vars(self)\n\t\tfor name, field in fields.items():\n\t\t\tif isinstance(field, Pattern):\n\t\t\t\tgenotype += field.genotype()\n\t\t\telse:\n\t\t\t\tgenotype += str(field)\n\t\t\tgenotype += \"\\0\"\n\n\t\treturn genotype", "def make_oligos(protein_seq_files, wt_dna_fasta, amino_acid_range, primer_file, restriction_enzyme):\n wt_sequence, wt_protein_dict = parse_wt_sequences(wt_dna_fasta)\n protein_variants_objs = parse_protein_sequences(protein_seq_files)\n\n for variant in protein_variants_objs:\n dna_variant = convert_to_dna(variant, wt_protein_dict)\n\n oligo_seq_obj, dna_fragment_seq_obj, protein_fragment = add_flanking_nucleotides(\n dna_variant, variant, wt_sequence, primer_file, amino_acid_range, restriction_enzyme\n )\n checked_oligo = run_checks(oligo_seq_obj, dna_fragment_seq_obj, protein_fragment, restriction_enzyme)\n sys.stdout.write(\">%s\\n\" % dna_variant.name)\n sys.stdout.write(\"%s\\n\" % checked_oligo)", "def GoAnnot(prots, gos, onlyProts=False):\r\n with resources.open_text(\"autoprot.data\",\"Homo_sapiens.gene_info\") as d:\r\n geneInfo = pd.read_csv(d, sep='\\t')\r\n with resources.open_text(\"autoprot.data\",\"gene2go_alt\") as d:\r\n gene2go = pd.read_csv(d, sep='\\t')\r\n prots = pd.DataFrame(pd.Series([str(i).upper().split(';')[0] for i in prots]), columns=[\"Gene names\"])\r\n prots = prots.merge(geneInfo[[\"Symbol\", \"GeneID\"]], left_on=\"Gene names\", right_on=\"Symbol\", how='inner')\r\n \r\n prots = prots.merge(gene2go[[\"GeneID\", \"GO_ID\", \"GO_term\"]], on=\"GeneID\", how='inner')\r\n if onlyProts == True:\r\n for idx, go in enumerate(gos):\r\n if idx == 0:\r\n redProts = prots[\"Symbol\"][prots[\"GO_term\"].str.contains(go)]\r\n else:\r\n redProts = redProts.append(prots[\"Symbol\"][prots[\"GO_term\"].str.contains(go)])\r\n return redProts.drop_duplicates()\r\n else: \r\n for idx, go in enumerate(gos):\r\n if idx == 0:\r\n redProts = prots[prots[\"GO_term\"]==go]\r\n else:\r\n redProts = redProts.append(prots[prots[\"GO_term\"]==go])\r\n return redProts.drop_duplicates()", "def perm_vs_hyp():\n\n return [\"P\",\"P\",\"P\",\"P\",\"P\"]", "def n(self):\n return len(self.genotypes)", "def _get_prochirality(self):\n for atom in self.invarioms:\n atom.get_prochirality()\n atom.invariom.get_prochirality()", "def known_organisms():\n return [\"rat\"]", "def _get_samples_in_genotypes(self):\n with open_file(self.import_job.path_genotypes[0], \"rt\") as tsv:\n header = tsv.readline()[:-1].split(\"\\t\")\n first = tsv.readline()[:-1].replace('\"\"\"', '\"').split(\"\\t\")\n values = dict(zip(header, first))\n return list(json.loads(values[\"genotype\"]).keys())", "def create_guess_code(self, pegs):\n\t\tselected_pegs = random.sample(pegs, 4)\n\t\t\n\t\treturn selected_pegs", "def pymol_pocket(**opts):\n\n\n boolean_args = [\"constrain_radii\", \"subdivide\", \"protein_only\"]\n for arg in boolean_args:\n if not isinstance(opts.get(arg), bool):\n if opts.get(arg) in [\"True\", \"true\", \"t\", \"1\"]:\n opts[arg] = True\n elif opts.get(arg) in [\"False\", \"false\", \"f\", \"0\"]:\n opts[arg] = False\n else:\n logger.warning(\"Boolean argument {0} ({1}) not parsed correctly and reverting to default\".format(arg, opts[arg]))\n opts = configuration.clean_opts(opts)\n\n if opts.get(\"protein\") is None:\n if opts.get(\"prot_file\") is None:\n logger.error(\"No protein input: prot_file and protein inputs are empty\")\n raise ValueError\n else:\n logger.debug(\"Protein file already specified on disk; skipping protein processing.\")\n else:\n if opts.get(\"protein_only\"):\n opts[\"protein\"] = \"({0}) and (poly)\".format(opts.get(\"protein\"))\n\n if opts.get(\"ligand\") is not None:\n opts[\"protein\"] = \"({0}) and not ({1})\".format(opts.get(\"protein\"), opts.get(\"ligand\"))\n\n logger.debug(\"Final protein selection: {0}\".format(opts.get(\"protein\")))\n prot_atoms = cmd.count_atoms(opts.get(\"protein\"))\n if prot_atoms == 0:\n logger.error(\"No atoms included in protein selection--ending calculation\")\n return\n elif prot_atoms < 100:\n logger.warning(\"Only {0} atoms included in protein selection\".format(prot_atoms))\n\n cmd.save(opts.get(\"prot_file\"), opts.get(\"protein\"))\n logger.debug(\"Protein '{0}' saved to {1}\".format(opts.get(\"protein\"), opts.get(\"prot_file\")))\n\n if (opts.get(\"mode\") == \"specific\") and (opts.get(\"ligand\") is not None):\n cmd.save(opts.get(\"lig_file\"), opts.get(\"ligand\"))\n logger.debug(\"Ligand selection: {0}\".format(opts.get(\"ligand\")))\n\n if opts.get(\"coordinates\") is not None:\n opts[\"residue\"] = None\n else:\n if opts.get(\"residue\") is not None:\n opts[\"coordinates\"] = cmd.get_coords(\"{0} and sidechain\".format(opts.get(\"residue\")), 1)\n\n pockets, output_opts = identify.pocket_wrapper(**opts)\n\n display_pockets(pockets, **output_opts)\n return pockets, output_opts", "def novel_alleles(genes, test):\n\n for gene in genes:\n trunc = genes[gene][\"IsContigTruncation\"]\n\n if genes[gene][\"BlastResults\"] != None:\n \n match = genes[gene][\"CorrectMarkerMatch\"]\n subjaln = genes[gene][\"BlastResults\"][\"SubjAln\"]\n if not (trunc or match) and len(subjaln) > 0:\n yield gene, subjaln\n\n else:\n if genes[gene][\"ForwardPrimerBlastResult\"] != None:\n if genes[gene][\"ReversePrimerBlastResult\"] != None:\n if (not trunc) and len(genes[gene][\"Amplicon\"]) > 0:\n yield gene, genes[gene][\"Amplicon\"]", "def crossover(p1, p2):\n genotype = []\n \n #Your code here\n \n return {'genotype': genotype, 'fitness': None}", "def general_gantest(proba, nbr_qubits):\n for m in [4096, 2048]:\n for l in [1, 2, 3]:\n print(\"Easy mode results for m={} and l={}:\".format(m, l))\n Variationer_learn_gan(1000, l, m, proba=proba, n=nbr_qubits, distri_size=0, easy=True)\n print(\"\\n\")\n print(\"Distribution learning results for m={} and l={}:\".format(m, l))\n for d in [256, 512]:\n print(\"For \", d, \": \")\n Variationer_learn_gan(1000, l, m, proba=proba, n=nbr_qubits, distri_size=d, easy=False)\n print(\"Singleton learning results for m={} and l={}:\".format(m, l))\n Variationer_learn_gan(1000, l, m, proba=proba, n=nbr_qubits, distri_size=0, easy=False)", "def _process_phenotype(self, limit):\n\n if self.testMode:\n g = self.testgraph\n else:\n g = self.graph\n model = Model(g)\n raw = '/'.join((self.rawdir, 'phenotype'))\n logger.info(\"processing phenotype\")\n\n line_counter = 0\n\n with open(raw, 'r') as f:\n filereader = csv.reader(f, delimiter='\\t', quotechar='\\\"')\n f.readline() # read the header row; skip\n for line in filereader:\n (phenotype_id, uniquename, observable_id, attr_id, value,\n cvalue_id, assay_id) = line\n\n # 8505\tunspecified\n # 20142\tmesothoracic leg disc | somatic clone 87719 60468 60468 60468\n # 8507\tsex comb | ectopic 88877 60468 60468 60468\n # 8508\ttarsal segment\t83664 60468 60468 60468\n # 18404\toocyte | oogenesis stage S9\t86769 60468 60468 60468\n # for now make these as phenotypic classes\n # will need to xref at some point\n phenotype_key = phenotype_id\n phenotype_id = None\n phenotype_internal_id = self._makeInternalIdentifier(\n 'phenotype', phenotype_key)\n phenotype_label = None\n self.label_hash[phenotype_internal_id] = uniquename\n cvterm_id = None\n if observable_id != '' \\\n and int(observable_id) == 60468:\n # undefined - typically these are already phenotypes\n if cvalue_id in self.idhash['cvterm']:\n cvterm_id = self.idhash['cvterm'][cvalue_id]\n phenotype_id = self.idhash['cvterm'][cvalue_id]\n elif observable_id in self.idhash['cvterm']:\n # observations to anatomical classes\n cvterm_id = self.idhash['cvterm'][observable_id]\n phenotype_id = \\\n self.idhash['cvterm'][observable_id] + 'PHENOTYPE'\n if cvterm_id is not None and cvterm_id in self.label_hash:\n phenotype_label = self.label_hash[cvterm_id]\n phenotype_label += ' phenotype'\n self.label_hash[phenotype_id] = phenotype_label\n else:\n logger.info('cvtermid=%s not in label_hash', cvterm_id)\n\n else:\n logger.info(\n \"No observable id or label for %s: %s\",\n phenotype_key, uniquename)\n\n # TODO store this composite phenotype in some way\n # as a proper class definition?\n self.idhash['phenotype'][phenotype_key] = phenotype_id\n\n # assay_id is currently only \"undefined\" key=60468\n\n if not self.testMode and\\\n limit is not None and line_counter > limit:\n pass\n else:\n if phenotype_id is not None:\n # assume that these fit into the phenotypic uberpheno\n # elsewhere\n model.addClassToGraph(phenotype_id, phenotype_label)\n line_counter += 1\n\n return", "def verbrogentwogs(prot, twogs, prot_in_twogs):\n vt_lijst = []\n for x in prot_in_twogs:\n for y in prot_in_twogs:\n if indexfind(\"%s %s\" % (x, y), twogs):\n vt_lijst.append(\"%s %s\" % (x, y))\n vt_lijst.append(\"%s %s\" % (x, prot))\n vt_lijst.append(\"%s %s\" % (y, prot))\n return vt_lijst", "def get_potion_tests():\n potion_tests = []\n for pm in stones_and_potions.possible_potion_maps(\n precomputed_maps.get_perm_index_conversion()[1]):\n potion_tests.append(\n ([(pm.apply_inverse(l), l) for l in\n stones_and_potions.possible_latent_potions()],\n functools.partial(\n unity_python_conversion.to_potion_unity_properties,\n # It shouldn't matter what graph we use for testing this part.\n graph=graphs.create_graph_from_constraint(\n graphs.no_bottleneck_constraints()[0])),\n unity_python_conversion._potions_from_potion_unity_properties,\n lambda x: x, _make_tuple))\n return potion_tests", "def run_phaseg(locus_file, gam_file, vg_file, canu_alignments, true_haps):\n\trecombrate=1.26\n\tmax_coverage = 15\n\tall_heterozygous = False\n\tdistrust_genotypes = True\n\twith ExitStack() as stack:\n\t\tnode_seq_list, edge_connections = vg_graph_reader(vg_file)\n\t\tall_reads, alleles_per_pos, locus_branch_mapping = vg_reader(locus_file, gam_file, canu_alignments)\n\t\tall_positions = sorted(all_reads.get_positions())\n\t\tall_components = find_components(all_positions, all_reads)\n\t\tblocks = defaultdict(list)\n\t\tprint(\"all_components\")\n\t\tfor position, block_id in all_components.items():\n\t\t\tblocks[block_id].append(locus_branch_mapping[position][0][0][0])\n\t\tfor k,v in blocks.items():\n\t\t\tprint(k,v)\n\t\tprint(\"all_components\")\n\t\t\n\n\t\t#print(all_reads)\n\t\tselected_indices = readselection(all_reads, max_coverage)\n\t\tselected_reads = all_reads.subset(selected_indices)\n\n\t\t#selected_reads = slice_reads(all_reads, max_coverage)\n\t\t#print('positions from all reads')\n\t\t#print(len(all_reads.get_positions()))\n\t\tprint(\"reads after read-selection\")\n\t\tprint(len(selected_reads))\n\t\tprint(\"positions covered by atleast one read after read selection\")\n\t\tprint(len(selected_reads.get_positions()))\n\n\t\taccessible_positions = sorted(selected_reads.get_positions())\n\t\t\n\t\tprint(\"readset after read_selection\")\n\t\t#for read in selected_reads:\n\t\t\t#print(read.name)\n\t\tpedigree = Pedigree(NumericSampleIds())\n\t\t# compute the number of alleles at each position.\n\t\talleles_per_accessible_pos =[]\n\t\tgenotype_likelihoods = []\n\t\tfor pos in accessible_positions:\n\t\t\tif pos in alleles_per_pos:\n\t\t\t\tn_alleles = alleles_per_pos[pos] \n\t\t\t\tpossible_genotypes = n_alleles + ncr(n_alleles, 2)\n\t\t\t\tgenotype_likelihoods.append(None if all_heterozygous else PhredGenotypeLikelihoods([0]* possible_genotypes))\n\t\t# random input of genotypes, since distrust_genotypes is always ON.\n\t\tpedigree.add_individual('individual0', [0]* len(accessible_positions), genotype_likelihoods)\n\t\trecombination_costs = uniform_recombination_map(recombrate, accessible_positions)\n\t\t# Finally, run phasing algorithm\n\t\t#print(selected_reads)\n\t\tdp_table = PedigreeDPTable(selected_reads, recombination_costs, pedigree, distrust_genotypes, accessible_positions)\n\t\tsuperreads_list, transmission_vector = dp_table.get_super_reads()\n\n\t\tcost = dp_table.get_optimal_cost()\n\t\tprint(superreads_list[0])\n\t\t#print(cost)\n\t\tread_partitions = dp_table.get_optimal_partitioning()\n\t\t#print(read_partitions)\n\t\t\n\t\t## To generate the connected components and corresponding haplotypes.\n\t\tprint(\"in components\")\n\t\tf = open('whole_genome' + '.predicted_read_partionting.pred', 'w')\n\t\toverall_components = find_components(accessible_positions, selected_reads)\n\t\t\n\t\tread_partitions_dict ={}\n\t\tfor read, haplotype in zip(selected_reads, read_partitions):\n\t\t\tphaseset = overall_components[read[0].position] + 1\n\t\t\tprint(read.name, phaseset, haplotype, file=f)\n\t\t\tread_partitions_dict[read.name] = haplotype\n\t\t#phaset is blockid\n\n\t\tn_phased_blocks = len(set(overall_components.values()))\n\t\tall_phased_blocks = len(set(all_components.values()))\n\t\tprint('No. of phased blocks: %d', n_phased_blocks)\n\t\tlargest_component = find_largest_component(overall_components)\n\t\tprint('No. of blocks from all the reads: %d', all_phased_blocks)\n\t\tlargest_component_all_reads = find_largest_component(all_components)\n\t\tif len(largest_component) > 0:\n\t\t\tprint('Largest component contains %d variants',len(largest_component))\n\t\tif len(largest_component_all_reads) > 0:\n\t\t\tprint('Largest component contains %d variants',len(largest_component_all_reads))\n\t\t\n\t\t\n\t\t### To generate contig sequences\n\t\tsample = 0\n\t\tsuperreads, components = dict(), dict()\n\t\tsuperreads[sample] = superreads_list[0]\n\t\tcomponents[sample] = overall_components\n\t\t#generate_hap_contigs_based_on_canu(superreads_list[0], components[sample], node_seq_list, locus_branch_mapping, edge_connections, canu_alignments, vg_file)\n\t\t#generate_hap_contigs(superreads_list[0], overall_components, node_seq_list, locus_branch_mapping, edge_connections)\n\t\t\n\t\tnodes_in_bubbles =[]\n\t\twith stream.open(str(locus_file), \"rb\") as istream:\n\t\t\tfor data in istream:\n\t\t\t\tl = vg_pb2.SnarlTraversal()\n\t\t\t\tl.ParseFromString(data)\n\t\t\t\tfor i in range(0,len(l.visits)):\n\t\t\t\t\tnodes_in_bubbles.append(l.visits[i].node_id)\n\t\t\t\t#nodes_in_bubbles.append(l.snarl.end.node_id)\n\t\t\t\t#nodes_in_bubbles.append(l.snarl.start.node_id)\n\t\tedge_connections_tmp = defaultdict(list)\n\t\twith stream.open(str(vg_file), \"rb\") as istream:\n\t\t\tfor data in istream:\n\t\t\t\tl = vg_pb2.Graph()\n\t\t\t\tl.ParseFromString(data)\n\t\t\t\tfor j in range(len(l.edge)):\n\t\t\t\t\tfrom_edge = getattr(l.edge[j], \"from\")\n\t\t\t\t\t#if from_edge not in nodes_in_bubbles and l.edge[j].to not in nodes_in_bubbles:\n\t\t\t\t\tedge_connections_tmp[str(from_edge)].append(str(l.edge[j].to))\n\t\t\t\t\tedge_connections_tmp[str(l.edge[j].to)].append(str(from_edge))\n\n\n\t\t#generate_hap_contigs_based_on_canu(superreads, components, node_seq_list, locus_branch_mapping, edge_connections, canu_alignments, vg_file)\n\t\t#generate_hap_contigs_avgRL(superreads, components, node_seq_list, locus_branch_mapping, edge_connections, edge_connections_tmp, gam_file, read_partitions_dict, nodes_in_bubbles)\n\t\t\n\t\t# evaluation partition all the reads based on one iteration\n\t\t#print('partition all the reads based on haplotypes from one iteration')\n\t\t# Check here if you wanna do all reads or selected reads only\n\t\t#haplotag(superreads_list[0], selected_reads, overall_components, 1)\n\t\t\n\t\t#compute_read_partitioning_accuracy(\"true_partioning\")\n\n\n\n\t\t##generate_hap_contigs(superreads, components, node_seq_list, locus_branch_mapping, edge_connections)\n\t\t\n\t\t##For phasing accuracy, read true haps and generate corresponding superreads\n\t\t#all_reads_true, alleles_per_pos_true, locus_branch_mapping_true = vg_reader(locus_file, true_haps)\n\t\t# Finally, run phasing algorithm for true haplotypes\n\t\t#dp_table_true = PedigreeDPTable(all_reads_true, recombination_costs, pedigree, distrust_genotypes, accessible_positions)\n\t\t#superreads_list_true, transmission_vector_true = dp_table_true.get_super_reads()\n\t\t# to compute the phasing accuracy\n\t\t#true_haps = ReadSet()\n\t\t#for read in all_reads_true:\n\t\t\t#tmp_read = Read(read.name, 0, 0, 0)\n\t\t\t#for variant in read:\n\t\t\t\t#if variant.position in accessible_positions:\n\t\t\t\t\t#tmp_read.add_variant(variant.position, variant.allele, [10])\n\t\t\t#true_haps.add(tmp_read)\n\t\t#compare(superreads_list[0], true_haps, overall_components)\n\t\t## To perform iterative whatshap phasing\n\t\t#remaining_reads =[]\n\t\t#for read in all_reads:\n\t\t\t#remaining_reads.append(read.name)\n\t\t#prev_superreads = superreads_list[0]\n\t\t#for read in selected_reads:\n\t\t\t#remaining_reads.remove(read.name)\n\t\t#while len(remaining_reads)>0:\n\t\t\t#print('iteration')\n\t\t\t#iterative_reaset = ReadSet()\n\t\t\t#for read in all_reads:\n\t\t\t\t#if read.name in remaining_reads:\n\t\t\t\t\t#iterative_reaset.add(read)\n\n\t\t\t\t\n\t\t\t#selected_indices = readselection(iterative_reaset, max_coverage)\n\t\t\t#selected_reads = iterative_reaset.subset(selected_indices)\n\t\t\t#for read in prev_superreads:\n\t\t\t\t#selected_reads.add(read)\n\t\t\t\t#remaining_reads.append(read.name)\n\t\t\t#accessible_positions = sorted(selected_reads.get_positions())\n\t\t\t#selected_reads.sort()\n\t\t\t#pedigree = Pedigree(NumericSampleIds())\n\t\t\t## compute the number of alleles at each position.\n\t\t\t#alleles_per_accessible_pos =[]\n\t\t\t#genotype_likelihoods = []\n\t\t\t#for pos in accessible_positions:\n\t\t\t\t#if pos in alleles_per_pos:\n\t\t\t\t\t#n_alleles = alleles_per_pos[pos] \n\t\t\t\t\t#possible_genotypes = n_alleles + ncr(n_alleles, 2)\n\t\t\t\t\t#genotype_likelihoods.append(None if all_heterozygous else PhredGenotypeLikelihoods([0]* possible_genotypes))\n\t\t\t## random input of genotypes, since distrust_genotypes is always ON.\n\t\t\t#pedigree.add_individual('individual0', [0]* len(accessible_positions), genotype_likelihoods)\n\t\t\t#recombination_costs = uniform_recombination_map(recombrate, accessible_positions)\n\t\t\t## Finally, run phasing algorithm\n\t\t\t##print(selected_reads)\n\t\t\t#dp_table = PedigreeDPTable(selected_reads, recombination_costs, pedigree, distrust_genotypes, accessible_positions)\n\t\t\t#superreads_list, transmission_vector = dp_table.get_super_reads()\n\t\t\t#for read in selected_reads:\n\t\t\t\t#remaining_reads.remove(read.name)\n\t\t\t#prev_superreads = superreads_list[0]\n\t\t\t\n\t\t#print('I am final')\n\t\t#accessible_positions = sorted(all_reads.get_positions())\n\t\t#overall_components = find_components(accessible_positions, all_reads)\n\t\t#haplotag(superreads_list[0], all_reads, overall_components, \"all_iter\")\n\t\t#compare(superreads_list[0], superreads_list_true[0], overall_components)\n\t\t#print(superreads_list[0])\n\t\t\n\t\t#iterative whatshap for sparse matrices where we fix the phasing for variants at each iteration that reach max coverage.", "def getmulticombos(peeps):\n\n\tret = []\n\n\tfor p in peeps:\n\t\tu,s = getcombos(p)\n\n\t\tbestu = getbesttriplet(u)\n\t\tbests = getbesttriplet(s)\n\n\t\tret.append((bestu, bests))\n\n\treturn ret", "def __generate_random_gene_sequence(self):\n genes = []\n for j in range(self.chromosome_size):\n genes.append(random.choice(self.gene_pool))\n\n return genes", "def generateDichotomies(phenotype, phenotype_frame, data_samples, reverse_sort = False):\n phenotype_map = phenotype_frame[phenotype]\n phenotype_is_categorical = True\n for element in set(phenotype_map.values):\n try:\n float_element = float(element)\n if not float_element.is_integer():\n phenotype_is_categorical = False\n except ValueError:\n pass\n if phenotype_is_categorical:\n phenotype_categories = list(set(phenotype_map.values))\n phenotype_categories.sort(reverse = reverse_sort)\n for index in range(len(phenotype_categories)):\n if index == 1 and len(phenotype_categories) == 2:\n break\n signature_name = \"%s=%s_SIGNATURE\" % (phenotype, phenotype_categories[index])\n positive_samples = list(set(phenotype_map[phenotype_map == phenotype_categories[index]].index) & set(data_samples))\n negative_samples = list(set(phenotype_map[phenotype_map != phenotype_categories[index]].index) & set(data_samples))\n yield(signature_name, positive_samples, negative_samples)\n #### add method for dealing with either median or mean splits of dichotomies\n #### what if we want to split by median or mean for integer counts?", "def define_geotype(x):\n if x['population_km2'] > 2000:\n return 'urban'\n elif x['population_km2'] > 1500:\n return 'suburban 1'\n elif x['population_km2'] > 1000:\n return 'suburban 2'\n elif x['population_km2'] > 500:\n return 'rural 1'\n elif x['population_km2'] > 100:\n return 'rural 2'\n elif x['population_km2'] > 50:\n return 'rural 3'\n elif x['population_km2'] > 10:\n return 'rural 4'\n else:\n return 'rural 5'", "def test_enumerating_protomers(self):\n\n mol = Molecule.from_smiles(\"Oc2ccc(c1ccncc1)cc2\")\n\n # there should be three protomers for this molecule so restrict the output\n protomers = mol.enumerate_protomers(max_states=2)\n\n assert mol not in protomers\n assert len(protomers) == 2\n\n # now make sure we can generate them all\n protomers = mol.enumerate_protomers(max_states=10)\n\n assert mol not in protomers\n assert len(protomers) == 3\n\n # make sure each protomer is unique\n unique_protomers = set(protomers)\n assert len(protomers) == len(unique_protomers)", "def test_enumerating_protomers(self):\n\n mol = Molecule.from_smiles(\"Oc2ccc(c1ccncc1)cc2\")\n\n # there should be three protomers for this molecule so restrict the output\n protomers = mol.enumerate_protomers(max_states=2)\n\n assert mol not in protomers\n assert len(protomers) == 2\n\n # now make sure we can generate them all\n protomers = mol.enumerate_protomers(max_states=10)\n\n assert mol not in protomers\n assert len(protomers) == 3\n\n # make sure each protomer is unique\n unique_protomers = set(protomers)\n assert len(protomers) == len(unique_protomers)", "def gen_games(nlarge=range(5)):\n\n # Form the pool of small number cards\n sm_options = ''\n for i in range(10):\n sm_options += str(i) + str(i)\n # Form the pool of large number cards\n lg_options = 'abcd'\n\n for num_large in nlarge:\n num_small = 6 - num_large\n for sm_sel in permutations(sm_options, num_small):\n for lg_sel in permutations(lg_options, num_large):\n s = list(sm_sel) + list(lg_sel)\n s.sort()\n yield(''.join(s))", "def Allcombos():\n\n global allcombos\n\n allcombos = []\n\n results = product(\"ABCDEF\", repeat=4)\n\n allcombos = resulttolist(results)\n\n return AIguessing(allcombos)", "def generate_random_gene_sequence(gene_pool):\n genes = []\n for j in range(DEFAULT_CHROMOSOME_SIZE):\n genes.append(random.choice(gene_pool))\n\n return genes", "def genotype(self, normal_primitives, reduce_primitives):\n\n def _parse_proxyless(weights, primitives):\n # Find the best op in this weight.\n k_best = np.argmax(weights, axis=1)\n return [primitives[k] for k in k_best]\n\n _parse = _parse_proxyless\n gene_normal = _parse(F.softmax(self.alphas_normal, dim=-1).data.cpu().numpy(), normal_primitives)\n gene_reduce = _parse(F.softmax(self.alphas_reduce, dim=-1).data.cpu().numpy(), reduce_primitives)\n alphas_normal = self.alphas_normal\n alphas_reduce = self.alphas_reduce\n genotype = Genotype(\n normal=gene_normal,\n reduce=gene_reduce,\n alphas_normal=alphas_normal,\n alphas_reduce=alphas_reduce,\n )\n return genotype", "def choosePairs(self):\n \n return([roulette_wheel(self.popStructure[i,]) for i in range(self.popStructure.shape[0])])", "def genotype_coeffs(genotype, order=None):\n if order is None:\n order = len(genotype)\n length = len(genotype)\n mutations = [i + 1 for i in range(length) if genotype[i] == \"1\"]\n params = [[0]]\n for o in range(1, order + 1):\n params += [list(z) for z in it.combinations(mutations, o)]\n return params", "def generatePopulations(num_pops):\n distGenerator = DistributionGenerator()\n populations = []\n for i in range(num_pops):\n dist_type = random.sample(config.dist_types, 1)[0]\n populations.append(distGenerator.generateDistributions(dist_type, config.MaxDistributionSize))\n return populations", "def get_paren_combos():\n results = [None] * 4\n options = [('%s', '(%s)')]\n for i in range(1, 4):\n results[i] = list(itertools.product(*(i * options)))\n return results", "def pick_gynodioecious_parents(simu, config):\n rng = simu.getRNG()\n runif = rng.randUniform\n rint = rng.randInt\n try:\n sstar = config.sstar\n H = config.H\n def compound_generator(pop):\n \"\"\"\n Picks up parent(s) under gynodioecy using compound parameters.\n \"\"\"\n gen = -1\n while True:\n ngen = pop.dvars().gen\n if gen != ngen:\n # At the beginning of a generation, extract the\n # sex-specific subpopulations from a parental\n # population. The sex-specific subpopulations are used\n # throughout mating events in one generation.\n gen = ngen\n h = pop.extractSubPops(subPops=[(0, 0)])\n f = pop.extractSubPops(subPops=[(0, 1)])\n Nh = h.popSize()\n Nf = f.popSize()\n\n if runif() < sstar: # uniparental\n yield h.individual(rint(Nh))\n else: # biparental\n if runif() < H: # having a hermaphroditic seed parent\n first, second = rint(Nh), rint(Nh)\n while first == second:\n second = rint(Nh)\n yield [h.individual(first), h.individual(second)]\n else: # female seed parent\n yield [h.individual(rint(Nh)), f.individual(rint(Nf))]\n return compound_generator\n except KeyError:\n a = config.a\n sigma = config.sigma\n tau = config.tau\n def fundamental_generator(pop):\n \"\"\"\n Picks up parent(s) under gynodioecy using fundamental parameters.\n \"\"\"\n gen = -1\n while True:\n ngen = pop.dvars().gen\n if gen != ngen:\n # At the beginning of a generation, extract the\n # sex-specific subpopulations from a parental\n # population. The sex-specific subpopulations are used\n # throughout mating events in one generation.\n gen = ngen\n h = pop.extractSubPops(subPops=[(0, 0)])\n f = pop.extractSubPops(subPops=[(0, 1)])\n Nh = h.popSize()\n Nf = f.popSize()\n hermseed = Nh / (Nh * Nf * sigma)\n\n if runif() < hermseed: # hermaphroditic seed parent\n if runif() < a: # self-pollen\n yield h.individual(rint(Nh))\n else: # non self-pollen\n first, second = rint(Nh), rint(Nh)\n while first == second:\n second = rint(Nh)\n yield [h.individual(first), h.individual(second)]\n else: # female seed parent\n if runif() < tau:\n yield [h.individual(rint(Nh)), f.individual(rint(Nf))]\n return fundamental_generator", "def removeBiotype(df):\n\tdf = df[ df.Biotype != 'IG_C_gene']\n\tdf = df[ df.Biotype != 'IG_D_gene']\n\tdf = df[ df.Biotype != 'IG_J_gene']\n\tdf = df[ df.Biotype != 'IG_V_gene']\n\tdf = df[ df.Biotype != 'pseudogene']\n\tdf = df[ df.Biotype != 'rRNA']\n\tdf = df[ df.Biotype != 'sRNA']\n\tdf = df[ df.Biotype != 'TR_C_gene']\n\tdf = df[ df.Biotype != 'TR_D_gene']\n\tdf = df[ df.Biotype != 'TR_J_gene']\n\tdf = df[ df.Biotype != 'TR_V_gene']\n\tdf = df[ df.Biotype != 'macro_lncRNA']\n\tdf = df[ df.Biotype != 'bidirectional_promoter_lncRNA']\n\tdf = df[ df.Biotype != '3prime_overlapping_ncRNA']\n\tdf = df[ df.Biotype != 'non_coding']\n\tdf = df[ df.Biotype != 'pseudogene']\n\tdf = df[ df.Biotype != 'TR_J_pseudogene']\n\tdf = df[ df.Biotype != 'IG_C_pseudogene']\n\tdf = df[ df.Biotype != 'IG_J_pseudogene']\n\tdf = df[ df.Biotype != 'IG_pseudogene']\n\tdf = df[ df.Biotype != 'TR_V_pseudogene']\n\tdf = df[ df.Biotype != 'polymorphic_pseudogene']\n\tdf = df[ df.Biotype != 'IG_V_pseudogene']\n\tdf = df[ df.Biotype != 'TEC']\n\tdf = df[ df.Biotype != 'Predictif']\n\tdf = df[ df.Biotype != 'ribozyme']\n\tdf = df[ df.Biotype != 'scRNA']\n\tdf = df[ df.Biotype != 'scaRNA']\n\tdf = df[ df.Biotype != 'snRNA']\n\tdf = df[ df.Biotype != 'snoRNA']\n\tdf = df[ df.Biotype != 'vaultRNA']\n\tdf = df[ df.Biotype != 'translated_processed_pseudogene']\n\treturn df", "def fromgenotype(self):\n\t\tpass", "def genes_GT():\n df1=pd.read_csv(config['geneInfo'], sep=\" \")\n df1=df1[df1.chr == '22']\n df2=pd.read_csv(config['counts'], sep=\" \")\n genes=df1.merge(df2.gene_id, on=\"gene_id\")\n return list(set(genes['gene_id']))", "def eval_genomes(population, conf):\n for (_, g) in population:\n eval_genome(g, conf)", "def get_variant_genotypes(self, variant):\n if not self.has_index:\n raise NotImplementedError(\"Not implemented when IMPUTE2 file is \"\n \"not indexed (see genipe)\")\n\n # Find the variant in the index\n try:\n impute2_chrom = CHROM_STR_TO_INT[variant.chrom.name]\n except KeyError:\n raise ValueError(\n \"Invalid chromosome ('{}') for IMPUTE2.\".format(variant.chrom)\n )\n\n variant_info = self._impute2_index[\n (self._impute2_index.chrom == impute2_chrom) &\n (self._impute2_index.pos == variant.pos)\n ]\n\n if variant_info.shape[0] == 0:\n logging.variant_not_found(variant)\n return []\n\n elif variant_info.shape[0] == 1:\n return self._get_biallelic_variant(variant, variant_info)\n\n else:\n return self._get_multialleic_variant(variant, variant_info)", "def test_iter_genotypes(self):\n with self.reader_f() as f:\n for g in f.iter_genotypes():\n variant_name = VARIANT_NAME_FIX.get(\n (truth.variant_to_key[g.variant], g.coded),\n truth.variant_to_key[g.variant],\n )\n\n expected = truth.genotypes[variant_name]\n self.assertEqual(expected, g)", "def get_all_pairs(idioms_set, power_expressions, main_words):\r\n print('getting all pairs...')\r\n all_pairs = [' '.join(pair) for pair in list(itertools.product(power_expressions, main_words)) if ' '.join(pair) not in idioms_set]\r\n list_to_file(all_pairs, 'all_pairs.txt')\r\n print('file all_pairs.txt created')\r\n return all_pairs", "def phenotype(indiv):\n pheno = [[id, problem['weights'][id], problem['values'][id]] for id in range(len(indiv)) if indiv[id] == 1]\n return pheno", "def phpcredits():\n raise NotImplementedError()", "def gen(length):\n return itertools.product(LABELS,repeat=length)", "def genes():\n data=pd.read_csv(config['stan'], sep=\" \")\n return list(set(data['Gene_id']))", "def generate_random_individual():\n genotype = []\n ### Your code here\n return {'genotype': genotype, 'fitness': None }", "def _extract_genos(df):\n\n def _extract_genotype(geno_field):\n \"\"\"Extract the genotype from a format field.\"\"\"\n # Assume the genotype is the first format field and raise if it's not\n geno = geno_field.split(':')[0]\n if not GENO_REGEX.search(geno):\n raise ValueError('\"{}\" does not look like a genotype'.format(geno))\n return geno\n\n # Genotype columns range from the 10th to the last one\n df.iloc[:, 9:] = df.iloc[:, 9:].applymap(_extract_genotype)", "def setupGenotypeDirectories(self):\n self.allGenosDir = \"%s/%s_all-genotypes\" % (os.getcwd(),\n self.project2Id)\n if not os.path.exists(self.allGenosDir):\n os.mkdir(self.allGenosDir)\n for genoId,iGeno in self.dGenos.items():\n dirGeno = \"%s/%s\" % (self.allGenosDir, genoId)\n iGeno.dir = dirGeno\n if not os.path.exists(dirGeno):\n os.mkdir(dirGeno)\n if self.verbose > 0:\n msg = \"genotype directories: %s\" % self.allGenosDir\n print(msg); sys.stdout.flush()", "def haplotype(self, ploidy):\n \n i = 0 # counter for ploidy\n final = [] #to store seq\n got_bubble = True\n \n while i < ploidy:\n \n frag = []\n longest_path = []\n\n sorted_node = self.topological_sort()\n # find greatest weight path\n path = self.longest_path(sorted_node)\n # last node in the graph\n curr_max = max(path, key=path.get)\n next_node = ''\n str_temp = ''\n\n # while it is not the source node\n while len(self.prefix[curr_max]) != 0:\n # prev max node\n prev = path[curr_max][1]\n\n # if current node longer than prev and next node\n if len(curr_max) >= len(prev) and len(curr_max) >= len(next_node):\n str_temp = curr_max \n\n # if current node shorter than prev and next node \n elif len(curr_max) < len(prev) and len(curr_max) < len(next_node):\n start = self.suffix[prev][curr_max][2]\n end = self.prefix[next_node][curr_max][1]\n str_temp = curr_max[start:end]\n\n # if current node shorter than prev but longer than the next node \n elif len(curr_max) < len(prev) and len(curr_max) >= len(next_node):\n start = self.suffix[prev][curr_max][2]\n str_temp = curr_max[start:]\n\n # if current node longer than prev but shorter than the next node \n else:\n end = self.prefix[next_node][curr_max][1]\n str_temp = curr_max[:end]\n\n frag.insert(0, str_temp)\n longest_path.insert(0, curr_max)\n next_node = curr_max\n curr_max = prev\n\n\n # if this is the first node\n if len(self.prefix[curr_max]) == 0:\n\n # if current longer than next node\n if len(curr_max) > len(next_node):\n frag.insert(0, curr_max)\n next_node = curr_max\n\n else: \n end = self.prefix[next_node][curr_max][1]\n str_temp = curr_max[:end]\n frag.insert(0, str_temp)\n next_node = curr_max\n \n longest_path.insert(0, curr_max) \n\n\n # combine all string\n seq = ''.join(frag)\n final.append(seq)\n \n got_bubble = self.reduce_graph(longest_path, sorted_node)\n if got_bubble is False:\n break\n i += 1\n \n \n return final", "def main():\n\n args = get_args()\n codons = {\n 'A': 4, 'C': 2, 'D': 2, 'E': 2, 'F': 2, 'G': 4, 'H': 2, 'I': 3,\n 'K': 2, 'L': 6, 'M': 1, 'N': 2, 'P': 4, 'Q': 2, 'R': 6, 'S': 6,\n 'T': 4, 'V': 4, 'W': 1, 'Y': 2, '*': 3,\n }\n print(product(map(codons.get, args.protein + '*')) % args.modulo)", "def _get_possible_sense_combinations(self, taggable, tagged):\n\t\tprint(\"\\tget possible combinations...\")\n\t\t# first create a list of the already tagged senses and store for each of those one list inside that contains the one single correct sense\n\t\ttagged_sense_keys = [[(token, token.wn_sense_key)] for token in tagged]\n\t\ttaggable_possible_sense_keys = []\n\n\t\t# for each token that has to be tagged now find all possible senses and collect them\n\t\tfor token in taggable:\n\t\t\ttoken_sense_pairs = []\n\t\t\t# for each possible sense of the token add one to the list of that sense\n\t\t\tpossible_senses = self._get_possible_wn_senses_for_token(token)\n\t\t\tfor single_possible_sense in possible_senses:\n\t\t\t\ttoken_sense_pairs.append((token, single_possible_sense))\n\t\t\ttaggable_possible_sense_keys.append(token_sense_pairs)\n\n\t\tcomplete_list_of_tokens = taggable_possible_sense_keys + tagged_sense_keys\n\n\t\tprint(\"\\t\\t...building combinations\")\n\t\t# return a dot product of the lists of possible senses of all tokens\n\t\treturn list_product(*complete_list_of_tokens)", "def pentakis(self):\n return self.nlegomena(5)", "def genes(self) -> Tuple[Gene, ...]:\n return tuple(self.influence_graph.genes)", "def read_legos(filename):\n \n all_legos = []\n for line in open(filename):\n line = line.strip(\"\\n\")\n lego_info = line.split(\",\")\n lego_type = lego_info[0].strip()\n lego_count = int(lego_info[1])\n for i in range(lego_count):\n all_legos.append(lego_type)\n return all_legos", "def test_parameterize_protein(\n self,\n toolkit_registry,\n force_field,\n ):\n\n mol_path = get_data_file_path(\"proteins/T4-protein.mol2\")\n molecule = Molecule.from_file(mol_path, allow_undefined_stereo=False)\n topology = Topology.from_molecules(molecule)\n\n labels = force_field.label_molecules(topology)[0]\n\n assert len(labels[\"Bonds\"]) == 2654\n assert len(labels[\"Angles\"]) == 4789\n assert len(labels[\"ProperTorsions\"]) == 6973\n assert len(labels[\"ImproperTorsions\"]) == 528\n\n force_field.create_openmm_system(\n topology,\n charge_from_molecules=[molecule],\n toolkit_registry=toolkit_registry,\n )", "def nextGen(self):\n\n p = []\n while len(p) < len(self.p):\n #select mates and produce offspring\n p1, p2 = self.select()\n offspring = self.mate(p1, p2)\n\n #put the offspring in the next generation (with mutation)\n for child in offspring:\n child=self.mutate(child)\n p.append(child)\n \n\n # the world belongs to the new generation\n return p", "def possible_subpeptides(self):\n ret = [\"\"]\n protein_len = len(self.protein)\n for l in range(1, protein_len):\n for i in range(protein_len):\n if i + l <= protein_len:\n ret += [self.protein[i : i+l]]\n else:\n ret += [self.protein[i:] + self.protein[:(i+l)%protein_len]]\n ret += [self.protein]\n return ret", "def main():\n parser = argparse.ArgumentParser(description='MergeGVCFs and genotype them using the GATK')\n parser.add_argument('-g', '--gatk', dest='gatk', help=\"Location of the GATK\", required=True)\n parser.add_argument('-x', '--xmx', dest='xmx', help=\"Memory to use with JAVA\", required=True)\n parser.add_argument('-c', '--cores', dest='cores', help=\"Number of cores to use\")\n parser.add_argument('-o', '--output', dest='output', \n help='Final output from the haplotype caller')\n parser.add_argument('-r', '--reference', dest='reference', \n help='Reference FASTA file')\n parser.add_argument('-b','--bed', dest='bed_file',\n help=\"Bed file for limiting the GATK\")\n parser.add_argument('-p', '--ploidy', dest='ploidy', \n help=\"Sample ploidy\", default=2)\n parser.add_argument('-d', '--out_directory', dest='directory', help='Output director')\n parser.add_argument('bams', nargs=\"*\", help='gVCF variant call files output from the GATK')\n args = parser.parse_args()\n args.cores = int(args.cores)\n args.xmx = args.xmx.strip('\"')\n print args.bams\n genovcfs = haplotype_caller(gatk=args.gatk, xmx=args.xmx, cores=args.cores,\n bams=args.bams, reference=args.reference,\n out_directory=args.directory, ploidy=args.ploidy, bed_file=args.bed_file)\n outputs = merge_gvcfs(gatk=args.gatk, xmx=args.xmx, cores=args.cores,\n gvcfs=genovcfs, reference=args.reference)\n genotype_gvcfs(gatk=args.gatk, xmx=args.xmx, cores=args.cores,\n inputs=outputs, output=args.output, reference=args.reference,bed_file=args.bed_file)\n #haplotype_single(gatk=args.gatk, xmx=args.xmx, cores=args.cores,\n # inputs=args.gvcfs, reference=args.reference)", "def genPopulation(self):\r\n self.population_list = []\r\n for i in xrange(0, self.pop_size):\r\n individual = bitarray(self.indv_size)\r\n # Loop for randomizing the 'individual' string.\r\n for j in xrange(0, self.board_size):\r\n vert_pos = random.randint(0, self.board_size-1)\r\n vert_pos_bitnum = toBitArray(vert_pos, self.pos_bits_size)\r\n # print \"\\t\\t\", j, vert_pos_bitnum, vert_pos\r\n for k in range(0, self.pos_bits_size):\r\n individual[j * self.pos_bits_size + k] = vert_pos_bitnum[k]\r\n self.population_list.append(individual)\r\n # print \"\\t\", i, individual\r", "def get_init_genotype_by_count(simu, nalleles):\n return (nalleles, simu.InitGenotype(prop=[1 / nalleles for _ in range(nalleles)]))", "def palues(self):\n return self[self.peys()]", "def palues(self):\n return self[self.peys()]", "def alleles(gt1, gt2):\n\n\t# Count genotypes 1 and 2\n\tcount = {}\n\tfor g in gt1 :\n\t\tcount[g] = count.get(g, 0) + 1\n\n\tfor g in gt2 :\n\t\tcount[g] = count.get(g, 0) + 1\n\n\t# Find mayor allele (we call it 'ref')\n\tmaxCount = 0\n\tref = ''\n\tfor g in count:\n\t\tif count[g] > maxCount:\n\t\t\tmaxCount = count[g]\n\t\t\tref = g\n\n\t# Find minor allele (we call these one 'alt')\n\talt = ''\n\tfor g in count:\n\t\tif g != ref and g != '0': alt = g\n\n\t# Create a genotype string (VCF style)\n\tgtstr = \"\"\n\tfor i in range(len(gt1)):\n\t\tgtstr += \"\\t\" + gtVcf(ref, alt, gt1[i]) + \"/\" + gtVcf(ref, alt, gt2[i])\n\n\treturn ref, alt, count[alt], gtstr", "def gen_input_permutation():\n return [(arch, src, dst) for arch in architecture.ARCH_ACCEPTED for src in PRODUCT_TYPE for dst in PRODUCT_TYPE]", "def mutations(self, mu):\n # make a copy of the data, and make it an integer\n new_alleles = np.copy(self.geno)\n\n # for an array of the same shape as newAlleles, draw mutations at each\n # position with probability mu.\n vals = np.random.binomial(1, mu, self.size * self.nloci * 2)\n mutate = np.reshape(vals, [ self.size, self.nloci, 2])\n mutate = (mutate == 1)\n # swap zeroes and ones.\n new_alleles[mutate] = 1 - new_alleles[mutate] \n\n # Apply to geno_probs\n new_geno_probs = calculate_geno_probs(new_alleles, mu=mu)\n\n output = genotypeArray(\n geno = new_alleles,\n geno_probs = new_geno_probs,\n names = self.names,\n mothers= self.mothers,\n fathers = self.fathers\n )\n\n return output", "def genotype(self, arch):\n backbone_r, neck_r = arch\n return (backbone_r.genotype, neck_r.genotype if neck_r is not None else None)", "def treatment_plugs(data_plugs):\r\n\r\n no_plugs = []\r\n little = []\r\n means = []\r\n great = []\r\n large_enough = []\r\n super_large = []\r\n\r\n\r\n for i in data_plugs:\r\n print(i[0])\r\n if i[1] in ('None', None)\\\r\n or i[0] in ('None', None):\r\n pass\r\n elif i[0] == 'non':\r\n no_plugs.append(int(i[1]))\r\n elif i[0] == 'petit':\r\n little.append(int(i[1]))\r\n elif i[0] == 'moyen':\r\n means.append(int(i[1]))\r\n elif i[0] == 'grand':\r\n great.append(int(i[1]))\r\n elif i[0] == 'assez grand':\r\n large_enough.append(int(i[1]))\r\n elif i[0] == 'tres grand':\r\n super_large.append(int(i[1]))\r\n\r\n\r\n data = len(no_plugs) + len(little) +\\\r\n len(means) + len(great)+\\\r\n len(large_enough)+\\\r\n len(super_large)\r\n\r\n print(data)\r\n\r\n data_no = moyenne(no_plugs)\r\n data_little = moyenne(little)\r\n data_means = moyenne(means)\r\n data_great = moyenne(great)\r\n data_large_enough = moyenne(large_enough)\r\n data_super_large = moyenne(super_large)\r\n\r\n return data_no[0],\\\r\n data_little[0],\\\r\n data_means[0],\\\r\n data_great[0],\\\r\n data_large_enough[0],\\\r\n data_super_large[0],\\\r\n data_no[1],\\\r\n data_little[1],\\\r\n data_means[1],\\\r\n data_great[1],\\\r\n data_large_enough[1],\\\r\n data_super_large[1],\\\r\n data", "def haplotype_caller(gatk, xmx, reference, bams, cores, out_directory, ploidy, bed_file=None):\n gvcfs = []\n bam_pairs = get_bam_pairs(bams)\n commands = []\n try:\n os.mkdir(out_directory)\n except OSError:\n pass\n for sample, bams in bam_pairs.items():\n output = os.path.join(out_directory, os.path.basename(sample + '.g.vcf'))\n command = HAPLOTYPE_CALLER.format(xmx, gatk, reference, output, ploidy)\n command = command + ' -I ' + ' -I '.join(bams) \n command = command + ' -bamout ' + output + \".bam\"\n if bed_file is not None:\n command = command + \" -L \" + bed_file\n commands.append(command)\n print command\n gvcfs.append(output)\n queue_jobs(commands, \"haplotypeCaller\", cores)\n return gvcfs", "def expected_inheritance(variant_obj):\n manual_models = set()\n for gene in variant_obj.get('genes', []):\n manual_models.update(gene.get('manual_inheritance', []))\n return list(manual_models)", "def simulate_generations(gene_pool, environment, gen=DEFAULT_GENERATIONS):\n seq_to_fitness = multiprocessing.Manager().dict()\n chromosomes = []\n fittest_chromosome = []\n\n for i in range(DEFAULT_POPULATION_SIZE):\n chromosomes.append(generate_random_gene_sequence(gene_pool))\n\n for i in range(gen):\n chromosomes, fittest_chromosome = simulate_generation(chromosomes,\n gene_pool,\n environment,\n seq_to_fitness)\n\n if i < gen - 1:\n chromosomes = delete_duplicates(chromosomes, gene_pool)\n\n return fittest_chromosome", "def get_formatted_genotypes(self, allele_ids, sample_id):\n\n allele_query = (\n self.session.query(\n genotype.Genotype.id,\n genotype.GenotypeSampleData.type,\n genotype.GenotypeSampleData.multiallelic,\n allele.Allele.change_from,\n allele.Allele.change_to,\n )\n .join(\n genotype.GenotypeSampleData,\n and_(\n genotype.Genotype.id == genotype.GenotypeSampleData.genotype_id,\n genotype.GenotypeSampleData.secondallele.is_(False),\n genotype.GenotypeSampleData.sample_id == sample_id,\n ),\n )\n .join(allele.Allele, allele.Allele.id == genotype.Genotype.allele_id)\n .filter(\n or_(\n genotype.Genotype.allele_id.in_(allele_ids),\n genotype.Genotype.secondallele_id.in_(allele_ids),\n )\n )\n )\n allele_query = allele_query.subquery()\n\n secondallele_query = (\n self.session.query(\n genotype.Genotype.id,\n genotype.GenotypeSampleData.type.label(\"second_type\"),\n genotype.GenotypeSampleData.multiallelic.label(\"second_multiallelic\"),\n allele.Allele.change_from.label(\"second_change_from\"),\n allele.Allele.change_to.label(\"second_change_to\"),\n )\n .join(\n genotype.GenotypeSampleData,\n and_(\n genotype.Genotype.id == genotype.GenotypeSampleData.genotype_id,\n genotype.GenotypeSampleData.secondallele.is_(True),\n genotype.GenotypeSampleData.sample_id == sample_id,\n ),\n )\n .join(allele.Allele, allele.Allele.id == genotype.Genotype.secondallele_id)\n .filter(\n or_(\n genotype.Genotype.allele_id.in_(allele_ids),\n genotype.Genotype.secondallele_id.in_(allele_ids),\n )\n )\n )\n secondallele_query = secondallele_query.subquery()\n\n genotype_query = self.session.query(\n allele_query.c.id,\n allele_query.c.type,\n allele_query.c.multiallelic,\n allele_query.c.change_from,\n allele_query.c.change_to,\n secondallele_query.c.second_type,\n secondallele_query.c.second_multiallelic,\n secondallele_query.c.second_change_from,\n secondallele_query.c.second_change_to,\n ).outerjoin(secondallele_query, allele_query.c.id == secondallele_query.c.id)\n\n genotype_candidates = genotype_query.all()\n\n genotype_id_formatted = dict()\n for g in genotype_candidates:\n gt1 = gt2 = None\n\n if g.type == \"No coverage\":\n gt1 = gt2 = \".\"\n genotype_id_formatted[g.id] = \"/\".join([gt1, gt2])\n continue\n\n if g.type == \"Homozygous\":\n gt1 = gt2 = g.change_to or \"-\"\n genotype_id_formatted[g.id] = \"/\".join([gt1, gt2])\n continue\n\n if g.second_type == \"Homozygous\":\n gt1 = gt2 = g.second_change_to or \"-\"\n genotype_id_formatted[g.id] = \"/\".join([gt1, gt2])\n continue\n\n # Many of these cases concern when the sample is not the proband,\n # and we're lacking some data.\n # If proband has secondallele, it must be heterozygous on both,\n # anything else doesn't make sense.\n # Note: multiallelic is True if there was a '.' in the genotype in vcf\n\n # If not multiallelic we can take the type at face value\n if not g.multiallelic:\n if g.type == \"Heterozygous\":\n gt1 = g.change_from or \"-\"\n gt2 = g.change_to or \"-\"\n elif g.type == \"Reference\":\n gt1 = gt2 = g.change_from or \"-\"\n\n elif g.second_multiallelic is not None and not g.second_multiallelic:\n if g.second_type == \"Heterozygous\":\n gt1 = g.second_change_from or \"-\"\n gt2 = g.second_change_to or \"-\"\n elif g.second_type == \"Reference\":\n gt1 = gt2 = g.second_change_from or \"-\"\n\n # If one or two are multiallelic, things gets a bit murkier\n else:\n if not g.second_type:\n if g.type == \"Heterozygous\":\n # Multiallelic, but no secondallele -> no data for one allele\n gt1 = g.change_to or \"-\"\n gt2 = \"?\"\n elif g.type == \"Reference\":\n # We cannot know whether we have one or no reference, so both are unknown.\n # This should very rarely happen\n gt1 = gt2 = \"?\"\n else:\n # Most of these are non-proband cases\n if g.second_type == \"Heterozygous\":\n # Check whether we have the other allele stored in db\n if g.type == \"Heterozygous\":\n gt1 = g.change_to or \"-\"\n gt2 = g.second_change_to or \"-\"\n elif g.type == \"Reference\":\n gt1 = g.second_change_to or \"-\"\n gt2 = \"?\"\n elif g.second_type == \"Reference\":\n if g.type == \"Heterozygous\":\n gt1 = g.change_to or \"-\"\n gt2 = \"?\"\n elif g.type == \"Reference\":\n gt1 = \"?\"\n gt2 = \"?\"\n\n assert gt1 is not None and gt2 is not None\n genotype_id_formatted[g.id] = \"/\".join([gt1, gt2])\n\n return genotype_id_formatted", "def shuffle_opacities(mutated_genome):\n mutated_genome", "def create_population(conf, verbose=False):\n if verbose:\n print(\"number of connections: \", int((N_HIDDEN + 3) * (N_HIDDEN + 1) * (1 - DROPOUT)))\n population = {}\n for i in range(POPULATION):\n g = create_genome(conf, i)\n population[i] = g\n\n return list(population.items())", "def get_all_permutations(self):\n params = []\n for branch in self.branches:\n if branch.branches == []:\n params.append(branch.param)\n else:\n params.extend(branch.get_all_permutations())\n return params", "def _samples(self):\n finite_types = \\\n [QuiverMutationType(t) for t in [['A', 1], ['A', 5], ['B', 2], ['B', 5],\n ['C', 3], ['C', 5], ['D', 2], ['D', 5],\n [\"E\", 6], [\"E\", 7], [\"E\", 8], [\"F\", 4],\n [\"G\", 2]]]\n affine_types = \\\n [QuiverMutationType(t) for t in [['A', [1,1], 1], ['A', [4,5], 1], ['D', 4, 1], ['BB', 5, 1]]]\n elliptic_types = \\\n [QuiverMutationType(t) for t in [['E', 6, [1,1]], ['E', 7, [1,1]]]]\n mutation_finite_types = \\\n [QuiverMutationType(t) for t in [['R2',(1,5)], ['R2',(3,5)]]]\n mutation_infinite_types = \\\n [QuiverMutationType(t) for t in [['E',10], ['BE',5], ['GR',(3,10)], ['T',(3,3,4)]]]\n\n return finite_types + affine_types + elliptic_types + mutation_finite_types + mutation_infinite_types", "def pomegranate(args):\n p = OptionParser(pomegranate.__doc__)\n opts, args, iopts = p.set_image_options(args, figsize=\"9x7\")\n\n if len(args) != 5:\n sys.exit(not p.print_help())\n\n seqidsfile, klayout, datafile, bedfile, slayout = args\n\n fig = plt.figure(1, (iopts.w, iopts.h))\n root = fig.add_axes([0, 0, 1, 1])\n\n Karyotype(fig, root, seqidsfile, klayout)\n Synteny(fig, root, datafile, bedfile, slayout)\n\n # legend showing the orientation of the genes\n draw_gene_legend(root, 0.42, 0.52, 0.48)\n\n labels = ((0.04, 0.96, \"A\"), (0.04, 0.52, \"B\"))\n panel_labels(root, labels)\n\n root.set_xlim(0, 1)\n root.set_ylim(0, 1)\n root.set_axis_off()\n\n pf = \"pomegranate-karyotype\"\n image_name = pf + \".\" + iopts.format\n savefig(image_name, dpi=iopts.dpi, iopts=iopts)", "def constituents(self, pnp=False):\n a = []\n for word in self.words:\n if pnp and word.pnp is not None:\n if len(a) == 0 or a[-1] != word.pnp:\n a.append(word.pnp)\n elif word.chunk is not None:\n if len(a) == 0 or a[-1] != word.chunk:\n a.append(word.chunk)\n else:\n a.append(word)\n return a", "def count_genotypes(genotypeList,StateGenPosData, x, y):\r\n allMos = 0\r\n nonEggs = 0\r\n Adults = 0\r\n for i in range(len(genotypeList)):\r\n gt = genotypeList[i]\r\n b = sum(1 for item in StateGenPosData if not 'new' in item[0] and not 'gestating' in item[0] and gt in item[1] and item[2]==(x,y))\r\n c = sum(1 for item in StateGenPosData if 'adult' in item[0] and 'XX' in item[1] and not 'gestating' in item[0] and gt in item[1] and item[2]==(x,y))\r\n d = sum(1 for item in StateGenPosData if 'adult' in item[0] and gt in item[1] and item[2]==(x,y))\r\n## for item in StateGenPosData:\r\n## print(item[0],item[1],item[2])\r\n## if 'adult' in item[0] and gt in item[1] and item[2]==(x,y):\r\n## d+=1\r\n## print('yay')\r\n## if not 'new' in item[0] and not 'egg' in item[0] and not 'gestating' in item[0] and gt in item[1] and item[2]==(x,y):\r\n## c+=1\r\n## if not 'new' in item[0] and not 'gestating' in item[0] and gt in item[1] and item[2]==(x,y):\r\n## b+=1\r\n allMos = allMos + b\r\n nonEggs = nonEggs + c\r\n Adults = Adults + d\r\n return allMos, nonEggs, Adults" ]
[ "0.69029266", "0.6711919", "0.6711495", "0.6549798", "0.5954817", "0.59225327", "0.57624036", "0.5698024", "0.56348896", "0.5612911", "0.5511534", "0.54864615", "0.5481483", "0.5451472", "0.54416704", "0.542101", "0.5358761", "0.5353916", "0.53418523", "0.53012073", "0.5292634", "0.5275678", "0.52519774", "0.52376693", "0.5203634", "0.51542187", "0.515295", "0.5145658", "0.5110281", "0.50952756", "0.5092456", "0.5063327", "0.50159895", "0.50127345", "0.49934515", "0.49875566", "0.49601284", "0.49574363", "0.49572614", "0.49531636", "0.49434388", "0.49345636", "0.49312532", "0.4922358", "0.4920404", "0.49155757", "0.49155757", "0.49128738", "0.49121872", "0.49114993", "0.48996502", "0.48849776", "0.48826548", "0.4866113", "0.48587817", "0.48564255", "0.4850722", "0.48436958", "0.4842173", "0.48405546", "0.48339176", "0.4829589", "0.4826741", "0.48220137", "0.48142007", "0.4811388", "0.480003", "0.4798733", "0.47923222", "0.47699806", "0.4767124", "0.47662526", "0.47615045", "0.47551808", "0.4746246", "0.47435465", "0.47386098", "0.47376326", "0.4735041", "0.4729109", "0.47208008", "0.47206444", "0.4712845", "0.4712845", "0.4710982", "0.47089478", "0.47045162", "0.4700516", "0.46985018", "0.4698384", "0.46945053", "0.46936652", "0.46881837", "0.46830344", "0.46821678", "0.46807355", "0.46773893", "0.46656105", "0.46632624", "0.46528617" ]
0.78792745
0
str genotype str base return P(base in genotype)
def prob_t_N(genotype, base): cnter = Counter(genotype) return cnter.get(base, 0) * 1/len(genotype)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_label(genotype_type):\n if genotype_type == \"Hom\":\n return 0\n elif genotype_type == \"Het\":\n return 1\n elif genotype_type == \"Hom_alt\":\n return 2", "def all_genotype(ploidy):\n return [\"\".join(comb) for comb in cwr(\"ACGT-\", ploidy)]", "def genotype(args) :\n from genotyper import genotype_samples\n genotype_samples(args)", "def fromgenotype(self):\n\t\tpass", "def base_codes(self):\n bases = []\n\n if self.is_gas_giant:\n bases.append(\"G\")\n if self.is_naval_base:\n bases.append(\"N\")\n if self.is_scout_base:\n bases.append(\"S\")\n if self.is_research_base:\n bases.append(\"R\")\n if self.is_tas:\n bases.append(\"T\")\n if self.is_consulate:\n bases.append(\"I\")\n if self.is_pirate_base:\n bases.append(\"P\")\n\n return \" \".join(bases)", "def genotype(self):\n\t\tgenotype = \"\"\n\t\tfields = vars(self)\n\t\tfor name, field in fields.items():\n\t\t\tif isinstance(field, Pattern):\n\t\t\t\tgenotype += field.genotype()\n\t\t\telse:\n\t\t\t\tgenotype += str(field)\n\t\t\tgenotype += \"\\0\"\n\n\t\treturn genotype", "def __generate_genotype(self):\n if len(self.genotype) < self.__individual_genotype_length:\n gene = ''\n \n while len(self.genotype) < self.__individual_genotype_length:\n gene = str(random.randint(0,1))\n \n self.genotype = self.genotype + gene", "def get_basestrings(self):\n baseStrs = set()\n for x in self.xvals():\n for y in self.yvals():\n p = self.get_plaquette(x, y)\n if p is not None and p.base is not None:\n baseStrs.add(p.base)\n return list(baseStrs)", "def generate_mutation(base):\n\tif base in ['A', 'C', 'G', 'T']:\n\t\tbases = ['A', 'C', 'G', 'T']\n\t\tbases.remove(base)\n\t\treturn np.random.choice(bases)\n\telse:\n\t\traise Exception('base is not a proper DNA nucleotide (ACGT).')", "def test_check_all_default_bases_positional(self, number, base):\n converted = positional.encode(number, base)\n self.assertEqual(positional.decode(converted, base), number)", "def get_ig_name ( base_name ) :\n return base_name + '-GW'", "def define_geotype(x):\n if x['population_km2'] > 2000:\n return 'urban'\n elif x['population_km2'] > 1500:\n return 'suburban 1'\n elif x['population_km2'] > 1000:\n return 'suburban 2'\n elif x['population_km2'] > 500:\n return 'rural 1'\n elif x['population_km2'] > 100:\n return 'rural 2'\n elif x['population_km2'] > 50:\n return 'rural 3'\n elif x['population_km2'] > 10:\n return 'rural 4'\n else:\n return 'rural 5'", "def base_pair(c):\n dna_complement = {'A': 'T', 'C': 'G', 'G': 'C', 'T': 'A'}\n return dna_complement[c.upper()].lower() if c.upper() in dna_complement else 'unknown'", "def gen_pronto_from_raw(seq1, seq2, base=None, freq=None):\n data = gen_pronto_from_raw_int(seq1, seq2, base, freq)\n for value in data:\n yield \"{0:0{1}x}\".format(value, 4)", "def _extract_genotype(geno_field):\n # Assume the genotype is the first format field and raise if it's not\n geno = geno_field.split(':')[0]\n if not GENO_REGEX.search(geno):\n raise ValueError('\"{}\" does not look like a genotype'.format(geno))\n return geno", "def generatebasepairs(self, x):\n currentbases = \"\"\n for u, v in zip(x, range(len(x))):\n if u == 0:\n currentbases += '_'\n else:\n currentbases += self.sequences[v][u-1]\n\n return currentbases", "def create_plaquette(self, baseStr):\n raise NotImplementedError(\"Derived class must implement this.\")", "def get_genotype_from_call(ref_allele, alternate_allele, call):\n genotype = './.'\n if call.lower() == 'undefined' or call.lower() == 'undetermined':\n return genotype\n if call == 'Both':\n call = ref_allele + alternate_allele\n callset = set(call)\n if ref_allele in callset and len(callset) == 1:\n genotype = '0/0'\n elif ref_allele in callset and alternate_allele in callset:\n genotype = '0/1'\n callset.remove(ref_allele)\n elif alternate_allele in callset and len(callset) == 1:\n genotype = '1/1'\n else:\n msg = 'Call {call} does not match any of the alleles (ref:{ref_allele}, alt:{alternate_allele})'\n raise ValueError(msg.format(call=call, ref_allele=ref_allele, alternate_allele=alternate_allele))\n return genotype", "def base_repr(i, base):\n\n assert i>=0 and base>=2\n \n if i==0:\n return ['0']\n\n if base<=10:\n return _small_base(i, base)\n\n assert base<=36\n return _large_base(i, base)", "def generate(base):\n if base == '':\n yield base\n else:\n for character in JugglerPassGen.dictionary(base[0]):\n for rest in JugglerPassGen.generate(base[1:]):\n yield character + rest", "def get_primitives(base):\n\n operands = []\n operators = []\n for nparams, s in enumerate(base):\n s = s.replace('%', '%%').split()\n for s in (x.replace('_', ' ') for x in s):\n if nparams and '$' not in s:\n assert nparams in (1, 2)\n s = '%s%s$' % ('$' if nparams == 2 else '', s)\n assert nparams == s.count('$'), (nparams, s)\n s = s.replace('$', ' %s ').strip()\n\n # Normalize the spacing\n s = s.replace(' ,', ',')\n s = s.replace(' . ', '.')\n s = s.replace(' [ ', '[').replace(' ]', ']')\n s = s.replace(' ( ', '(').replace(' )', ')')\n if nparams == 1:\n s = s.replace('+ ', '+')\n s = s.replace('- ', '-')\n s = s.replace('~ ', '~')\n\n if nparams:\n operators.append((s, nparams))\n else:\n operands.append(s)\n return operators, operands", "def complement_base(base,material='DNA'):\n if base in 'Aa':\n if material == 'DNA':\n return 'T'\n elif material == 'RNA':\n return 'U'\n elif base in 'TtUu':\n return 'A'\n elif base in 'Gg':\n return 'C'\n else:\n return 'G'", "def lookup_phenotype_by_name( name, greent ):\n logger=logging.getLogger('application')\n #This performs a case-insensitive exact match, and also inverts comma-ed names\n hpo_ids = greent.hpo.search( name )\n if len(hpo_ids) == 0:\n logger.error('Could not convert phenotype name: {}.'.format(name))\n else:\n logger.debug('Found ids for phenotype name: {} {}.'.format(name,' '.join(hpo_ids)))\n return hpo_ids", "def make_presentBase(verb, conj_id):\n if conj_id == 3:\n # For verbs like cupio, statuo\n # the present base is cupi-, statu-\n if verb[0].endswith(\"io\") or verb[0].endswith(\"uo\"):\n b_present = verb[0][:-1]\n else:\n b_present = verb[1][:-3]\n elif conj_id == 4:\n b_present = verb[1][:-2] \n else: \n b_present = verb[1][:-2]\n return b_present", "def design_grna(seq):\n\n transcript = {'A': 'U', 'C': 'G', 'G': 'C', 'T': 'A'}\n grna = \"\".join(transcript[n] for n in seq)\n\n return grna", "def getAnsofBase(length, base):\n ans = 1\n for i in range(length-1):\n ans = ans * base + 1\n return ans", "def genes():\n return [\"b2935\", \"b0723\", \"b0451\"]", "def subspace2proposition(primes: dict, subspace: Union[dict, str]) -> str:\n\n if not subspace or subspace == len(primes) * \"-\":\n return \"TRUE\"\n\n if type(subspace) is str:\n subspace = pyboolnet.state_space.subspace2dict(primes, subspace)\n\n return \"&\".join([name if value == 1 else f\"!{name}\" for name, value in sorted(subspace.items())])", "def getSJMotifCode(startBases, endBases):\n\n motif = (startBases + endBases).upper()\n\n if motif == \"GTAG\":\n return 1\n elif motif == \"CTAC\":\n return 2\n elif motif == \"GCAG\":\n return 3\n elif motif == \"CTGC\":\n return 4\n elif motif == \"ATAC\":\n return 5\n elif motif == \"GTAT\":\n return 6\n else:\n return 0", "def translate_sequence(sequence, genetic_code = {'GUC': 'V', 'ACC': 'T', 'GUA': 'V', 'GUG': 'V', 'ACU': 'T', 'AAC': 'N', 'CCU': 'P', 'UGG': 'W', 'AGC': 'S', 'AUC': 'I', 'CAU': 'H', 'AAU': 'N', 'AGU': 'S', 'GUU': 'V', 'CAC': 'H', 'ACG': 'T', 'CCG': 'P', 'CCA': 'P', 'ACA': 'T', 'CCC': 'P', 'UGU': 'C', 'GGU': 'G', 'UCU': 'S', 'GCG': 'A', 'UGC': 'C', 'CAG': 'Q', 'GAU': 'D', 'UAU': 'Y', 'CGG': 'R', 'UCG': 'S', 'AGG': 'R', 'GGG': 'G', 'UCC': 'S', 'UCA': 'S', 'UAA': '*', 'GGA': 'G', 'UAC': 'Y', 'GAC': 'D', 'UAG': '*', 'AUA': 'I', 'GCA': 'A', 'CUU': 'L', 'GGC': 'G', 'AUG': 'M', 'CUG': 'L', 'GAG': 'E', 'CUC': 'L', 'AGA': 'R', 'CUA': 'L', 'GCC': 'A', 'AAA': 'K', 'AAG': 'K', 'CAA': 'Q', 'UUU': 'F', 'CGU': 'R', 'CGC': 'R', 'CGA': 'R', 'GCU': 'A', 'GAA': 'E', 'AUU': 'I', 'UUG': 'L', 'UUA': 'L', 'UGA': '*', 'UUC': 'F'}, start_pos = 0):\n #find first orf\n #first_orf_seq = find_first_orf(sequence)\n\n # ensure sequence is uppercase\n seq = sequence.upper()\n\n #translate the sequence\n protein = \"\"\n for i in range(0, len(seq) - (len(seq) % 3), 3):\n codon = seq[i:i + 3]\n if genetic_code[codon] == \"*\":\n break\n protein += genetic_code[codon]\n return protein", "def get_alts_in_hom_pileup(pileup_str, ref_base):\n alts = {'A':0, 'C':0, 'G':0, 'T':0}\n for base in pileup_str:\n if base != ref_base and base in alts.keys():\n alts[base] += 1\n\n return max(alts, key=alts.get), alts[max(alts, key=alts.get)]", "def test_get_prior_string_discrete(self):\n dim = Integer(\"yolo\", \"uniform\", 1, 2)\n assert dim.get_prior_string() == \"uniform(1, 3, discrete=True)\"", "def protein_variant(variant):\n _validate_str(variant)\n if variant == WILD_TYPE_VARIANT:\n return WILD_TYPE_VARIANT\n elif variant == SYNONYMOUS_VARIANT:\n return SYNONYMOUS_VARIANT\n else:\n matches = re.findall(\"\\((p\\.\\S*)\\)\", variant)\n if len(matches) == 0:\n raise ValueError(\"Invalid coding variant string.\")\n # uniqify and remove synonymous\n seen = {\"p.=\": True}\n unique_matches = list()\n for v in matches:\n if v in seen:\n continue\n else:\n seen[v] = True\n unique_matches.append(v)\n if len(unique_matches) == 0:\n return SYNONYMOUS_VARIANT\n else:\n return \", \".join(unique_matches)", "def gene_to_protein(gene: str, intrones: Union[str, Collection[str]]) -> str:\n intrones = intrones if not isinstance(intrones, str) else (intrones,)\n for introne in intrones:\n gene = gene.replace(introne, \"\")\n return dna_to_protein(gene)", "def find_gpas(s):\n \"*** YOUR CODE HERE ***\"", "def test_check_base_numbers_to_positional(self, number, base, expected):\n result = positional.encode(number, base)\n self.assertEqual(result, expected)", "def encode_chromosome(in_num):\n convert_dict = {23: \"X\", 24: \"Y\", 25: \"MT\"}\n return convert_dict[in_num] if in_num in convert_dict else str(in_num)", "def get_bases():\n\treturn ((MV.ONE,),) + MV.blades[1:]\n\t# return ((MV.ONE,),) + MV.bases[1:]", "def latin_to_genus_species(self, latin_name):\n # Default #\n genus_name = 'missing'\n species_name = 'missing'\n # Lower case the input #\n latin_name = latin_name.lower()\n # Split into words #\n words = latin_name.split()\n # Get all possible genera #\n genera = self.known_species['genus'].unique().tolist()\n # Compare against all possible genera, take first match #\n for word in words:\n if genus_name != 'missing': break\n for genus in genera:\n if genus_name != 'missing': break\n if word == genus: genus_name = genus\n # Get all possible species #\n all_species = self.known_species.query(\"genus==@genus_name\")\n all_species = all_species['species'].unique().tolist()\n # Compare against all possible species, take first match #\n for word in words:\n if species_name != 'missing': break\n for species in all_species:\n if species_name != 'missing': break\n if word == species: species_name = species\n # Return #\n return genus_name, species_name", "def complement_base(base):\n\n if base == 'A' or base == 'a':\n return 'T'\n elif base == 'T' or base == 't':\n return 'A'\n elif base == 'G' or base == 'g':\n return 'C'\n else:\n return 'G'", "def fact_base_to_permute_string(factbase,N):\n\n letters = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']\n alphabets = \"\"\n\n for i in range(N):\n alphabets+= letters[i]\n\n permuteString = \"\"\n for i in range(len(factbase)):\n permuteString+= alphabets[int(factbase[i])]\n alphabets = new_string(alphabets,factbase[i])\n return permuteString", "def test_fidelity_set_base(self):\n dim = Fidelity(\"epoch\", 1, 2, base=3)\n assert dim.get_prior_string() == \"fidelity(1, 2, base=3)\"", "def PPString(inp, mol, i, n, outFile):\n alchemy = re.compile('^\\w*2\\w*_\\d\\d\\d$')\n ppstr = re.sub('\\*', '', mol.string[i])\n if ppstr:\n PPStr = ppstr\n pp_root, pp_ext = os.path.split(ppstr)\n else:\n if inp.setting['pp_type'] == 'geodecker':\n element = mol.type_list[i].title()\n if 'd_shell' in inp.setting:\n if type(inp.setting['d_shell']) is not list:\n inp.setting['d_shell'] = [inp.setting['d_shell']]\n if qtk.n2ve(mol.type_list[i].title()) > 10:\n shell = '-d'\n elif 'd_shell' in inp.setting \\\n and element in inp.setting['d_shell']:\n shell = '-d'\n else:\n element = qtk.element[mol.type_list[i].title()]\n if element.group < 3 and mol.Z[i] > 1:\n if mol.Z[i] != 3:\n shell = '-sp'\n else:\n shell = '-s'\n else:\n shell = ''\n pp_xc_dict = {\n 'lda': 'pz',\n 'pbe0': 'pbe',\n 'b3lyp': 'blyp',\n }\n pp_xc = inp.setting['pp_theory'].lower()\n if pp_xc in pp_xc_dict:\n pp_xc = pp_xc_dict[pp_xc]\n PPStr = ''.join([c for c in mol.type_list[i] if not c.isdigit()])\\\n + '.' + pp_xc + shell + '-hgh.UPF'\n elif inp.setting['pp_type'] == 'cpmd':\n PPStr = PPName(inp, mol, i, n)\n xc = inp.setting['pp_theory'].lower()\n if not mol.string[i]:\n if inp.setting['pp_type'] == 'geodecker':\n PPCheck(pp_xc, mol.type_list[i].title(), PPStr)\n elif inp.setting['pp_type'] == 'cpmd':\n saved_pp = PPCheck_cpmd(pp_xc, mol.type_list[i].title(), PPStr)\n new_pp1 = saved_pp + '.UPF'\n conv_pp = sp.Popen(\"%s %s\" % \\\n (qtk.setting.espresso_cpmd2upf_exe, saved_pp),\n shell=True)\n conv_pp.wait()\n new_pp1_file = os.path.split(new_pp1)[1]\n new_pp1_trg = os.path.join(qtk.setting.espresso_pp, new_pp1_file)\n if not os.path.exists(new_pp1_trg):\n shutil.copy(new_pp1, qtk.setting.espresso_pp)\n PPStr = PPStr + '.UPF'\n\n elif alchemy.match(mol.string[i]):\n cpmd_pp = alchemyPP(xc, PPStr)\n new_pp1 = cpmd_pp + '.UPF'\n if not os.path.exists(new_pp1):\n qtk.report('espresso', \"rewrite Goedecker's PP to UPF\")\n conv_pp = sp.Popen(\"%s %s\" % \\\n (qtk.setting.espresso_cpmd2upf_exe, cpmd_pp),\n shell=True)\n conv_pp.wait()\n if conv_pp.returncode != 0:\n # dirty fix for espresso alchemy conversion routine\n qtk.warning('conversion failed..., trying path end points')\n root, _ = os.path.splitext(PPStr)\n element_str = re.sub('_.*', '', root)\n element1 = re.sub('2.*', '', element_str)\n element2 = re.sub('.*2', '', element_str)\n fraction = float(re.sub('.*_', '', root))/100\n if fraction == 0.0:\n strpp = element1 + \"_q\" + str(qtk.n2ve(element1)) +\\\n \"_\" + xc + '.psp'\n elif fraction == 1.0:\n strpp = element2 + \"_q\" + str(qtk.n2ve(element2)) +\\\n \"_\" + xc + '.psp'\n else:\n qtk.exit(\"PP conversion failed for intermediate lambda\")\n strpp = os.path.join(qtk.setting.cpmd_pp, strpp)\n conv_pp = sp.Popen(\"%s %s\" % \\\n (qtk.setting.espresso_cpmd2upf_exe, strpp),\n shell=True)\n conv_pp.wait()\n os.rename(strpp + '.UPF', new_pp1)\n new_pp1_file = os.path.split(new_pp1)[1]\n new_pp1_trg = os.path.join(qtk.setting.espresso_pp, new_pp1_file)\n if not os.path.exists(new_pp1_trg):\n shutil.copy(new_pp1, qtk.setting.espresso_pp)\n PPStr = PPStr + '.UPF'\n\n return PPStr", "def phenotype(indiv):\n pheno = [[id, problem['weights'][id], problem['values'][id]] for id in range(len(indiv)) if indiv[id] == 1]\n return pheno", "def get_uniprot(tax, acc):\n\n taxid1 = list(acc['taxid'])\n taxid2 = list(acc['taxid2'])\n taxid3 = list(acc['species_taxid'])\n\n if int(tax) in taxid1:\n unip = acc[acc['taxid']==int(tax)]['uniprot'].item()\n\n elif int(tax) in taxid2:\n unip = acc[acc['taxid2']==int(tax)]['uniprot'].item()\n\n elif int(tax) in taxid3:\n unip = acc[acc['species_taxid']==int(tax)]['uniprot'].item()\n \n else:\n unip = 'NA'\n\n return unip", "def pos_to_name(reg):\n l,b = position_region(reg).galactic() \n if numpy.sign(b) == 1:\n pm = \"+\"\n else:\n pm = \"-\"\n text = \"G%4.2f%1s%4.2f\" % (l,pm,abs(b))\n return text", "def complement_base(base, material='DNA'):\n\n if base == 'A' or base == 'a':\n if material == 'DNA':\n return 'T'\n elif material == 'RNA':\n return 'U'\n elif base == 'T' or base == 't' or base == 'U' or base == 'u':\n return 'A'\n elif base == 'G' or base == 'g':\n return 'C'\n else:\n return 'G'", "def hot1_get(seqs_1hot, pos):\n\n if seqs_1hot[pos, 0] == 1:\n nt = 'A'\n elif seqs_1hot[pos, 1] == 1:\n nt = 'C'\n elif seqs_1hot[pos, 2] == 1:\n nt = 'G'\n elif seqs_1hot[pos, 3] == 1:\n nt = 'T'\n else:\n nt = 'N'\n return nt", "def getComplex(self, base, aspirated=False):\n res = ''\n if base == 'c':\n res = self.useRetroflex and 'ʈ͡ʂ' or 't͡ɕ'\n elif base == 'j':\n res = self.useRetroflex and 'ɖ͡ʐ' or 'd͡ʑ'\n elif base == 'ts':\n res = 't͡s'\n else:\n res = 'd͡z'\n if aspirated:\n res += 'ʰ'\n return res", "def genotype_probabilities_of(self, member_name):\n # For members already calculated (including the ones with power, who automatically have an aa genotype):\n if self.members[member_name].is_already_processed:\n return self.members[member_name].genotype_probabilities\n\n # Any Asgardian w/o power, and with a parent OR child w/ power, must be genotype Aa.\n if self.any_parent_has_power(member_name) or self.any_child_has_power(member_name):\n return np.array([0, 1, 0])\n\n # Any Asgardian with neither power, nor parents or children with power, has either genotype AA or Aa.\n # If he or she has parents, the probability will depend on genotype probabilities of parents.\n # If not, then 50/50 is assigned.\n if not self.has_parents(member_name):\n return np.array([0.5, 0.5, 0])\n\n parent_genotypes = []\n for parent in self.parents_of(member_name):\n parent_genotypes.append(self.genotype_probabilities_of(parent.name))\n \n probs = np.zeros((3,))\n for i in range(3):\n for j in range(3):\n p = parent_genotypes[0][i]*parent_genotypes[1][j]\n w = WEIGHTS[i, j]\n probs += p * w\n \n # Since we don't have the power, probability of aa is 0:\n probs[2] = 0\n \n # Renormalize:\n probs /= sum(probs)\n \n return probs", "def random_strings(sequence, GC_array):\r\n\r\n AT = 0\r\n GC = 0\r\n\r\n for nt in sequence:\r\n if nt == \"A\" or nt == \"T\":\r\n AT += 1\r\n elif nt == \"G\" or nt == \"C\":\r\n GC += 1\r\n\r\n probabilities = []\r\n\r\n #Calculate probability of G = probability of C = %GC / 2\r\n #Calculate probability of A = probability of T = (1 - %GC) / 2\r\n\r\n #For each consecutive base in provided sequence:\r\n #1. Convert total probability to logarithm using math.log(probability, base=10)\r\n #2. Total probability to be multiplied by probability of specifically that base\r\n\r\n for i in range(len(GC_array)):\r\n prob = (AT * math.log10((1 - GC_array[i])/2)) + (GC * math.log10(GC_array[i]/2))\r\n\r\n probabilities.append('%0.3f' % prob)\r\n\r\n print(*probabilities, sep= \" \")", "def base_binom_pro(pro,n0):\n res = stats.binom.pmf(range(n0+1), n0, 1/2.0)\n a = 0\n for i in range(n0+1):\n a = a + res[i]\n if a>=pro: \n return i", "def EnglishToPig(str):\r\n\r\n # TODO: Your code here\r\n\r\n\r\n # Change the return to return the converted string\r\n return(\"\")", "def _ptype(ptype_value, verbose=True):\n types = {\n 0:[\"PT_NULL\",\"Program header table entry unused\"],\n 1:[\"PT_LOAD\",\"Loadable program segment\"],\n 2:[\"PT_DYNAMIC\",\"Dynamic linking information\"],\n 3:[\"PT_INTERP\",\"Program interpreter\"],\n 4:[\"PT_NOTE\",\"Auxiliary information\"],\n 5:[\"PT_SHLIB\",\"Reserved\"],\n 6:[\"PT_PHDR\",\"Entry for header table itself\"],\n 7:[\"PT_TLS\",\"Thread-local storage segment\"],\n 8:[\"PT_NUM\",\"Number of defined types\"],\n 0x60000000:[\"PT_LOOS\",\"Start of OS-specific\"],\n 0x6474e550:[\"PT_GNU_EH_FRAME\",\"GCC .eh_frame_hdr segment\"],\n 0x6474e551:[\"PT_GNU_STACK\",\"Indicates stack executability\"],\n 0x6474e552:[\"PT_GNU_RELRO\",\"Read-only after relocation\"],\n 0x6ffffffa:[\"PT_SUNWBSS\",\"Sun Specific segment\"],\n 0x6ffffffb:[\"PT_SUNWSTACK\",\"Stack segment\"],\n 0x6fffffff:[\"PT_HIOS\",\"End of OS-specific\"],\n 0x70000000:[\"PT_LOPROC\",\"Start of processor-specific\"],\n 0x7fffffff:[\"PT_HIPROC\",\"End of processor-specific\"],\n }\n\n if types.get(ptype_value):\n value = types[ptype_value]\n if verbose:\n return \"%s (%s)\" % (value[0], value[1])\n else:\n return \"%s\" % value[0]\n else:\n return \"UNKNOWN\"", "def likelihood_genotype(genotype, bases_all_reads, error_rates):\n likelihood = 1\n for observed_base in bases_all_reads:\n p = 0\n for base in \"ACGT-\":\n l = prob_t_N(genotype, base) * error_rates[base][observed_base]\n p += l\n likelihood *= p\n\n return likelihood", "def _get_genotypes(self, samples, records, switch):\n\n variant = np.zeros(len(samples))\n for idx, sample in enumerate(samples):\n try:\n gt = records.genotype(sample)['GT']\n except IndexError:\n print(\"something went wrong with:\")\n print('sample:', sample, 'variant:', records, '-- set value to missing')\n gt = '.'\n if gt == '.':\n gt = 0\n else:\n gt = re.split('\\||/', gt)\n gt = list(map(int, gt))\n variant[idx] = np.sum(gt)\n if switch:\n variant = np.abs(variant - 2)\n return variant", "def binaryRep( i, gen ):\n \n length = numberOfNodes( gen )\n b = scipy.binary_repr( i, length )\n \n return b", "def test_get_prior_string_uniform(self):\n dim = Dimension(\"yolo\", \"uniform\", 1, 2)\n assert dim.get_prior_string() == \"uniform(1, 3)\"", "def gen_parameter(self, g, ng, p):\n pass", "def test_get_prior_string_normal(self):\n dim = Dimension(\"yolo\", \"norm\", 1e-10, 1)\n assert dim.get_prior_string() == \"normal(1e-10, 1)\"", "def getBP(vt):\n vt['indivo_prefix'] = 'bp_' + vt['name']\n return getVital(vt)", "def represent_polypeptide(polypeptide, verbosity_level=0):\n output_string = \"\"\n separator = \"\"\n separator_backspace = 0\n if verbosity_level == IUPAC_1:\n separator = \"\"\n amino_acid_repr_strings = [amino_acid.IUPAC_1 for amino_acid in polypeptide]\n elif verbosity_level == IUPAC_3:\n separator = \"/\"\n amino_acid_repr_strings = [amino_acid.IUPAC_3 for amino_acid in polypeptide]\n elif verbosity_level == FULL_NAME:\n separator = \", \"\n amino_acid_repr_strings = [amino_acid.full_name for amino_acid in polypeptide]\n else:\n raise ValueError(\"Representation verbosity level must be one of: IUPAC_1, IUPAC_3, FULL_NAME.\")\n \n return separator.join(amino_acid_repr_strings)", "def test_genbank_to_genome_taxonomy(self):\n result = self.gfu.genbank_to_genome(self.ctx, {\n 'workspace_name': self.ws_name,\n 'generate_ids_if_needed': 'true', # why is this a string\n 'taxon_id': '3702',\n 'file': {\n 'path': f\"{_DATA_PATH}/wigglesworthia/genome.gb\"\n },\n 'genome_name': str(uuid4()),\n })\n ('result', result)\n ref = result[0]['genome_ref']\n self.assertTrue(ref, 'Genome ref exists')\n info = result[0]['genome_info']\n typ = info[2]\n self.assertTrue(typ.startswith('KBaseGenomes.Genome'))\n info_details = info[-1]\n self.assertEqual(info_details['Taxonomy'], (\n \"cellular organisms;Eukaryota;Viridiplantae;\"\n \"Streptophyta;Streptophytina;Embryophyta;Tracheophyta;\"\n \"Euphyllophyta;Spermatophyta;Magnoliopsida;Mesangiospermae;\"\n \"eudicotyledons;Gunneridae;Pentapetalae;rosids;malvids;\"\n \"Brassicales;Brassicaceae;Camelineae;Arabidopsis\"\n ))\n self.assertEqual(info_details['Size'], '697724')\n self.assertEqual(info_details['Source'], 'Genbank')\n self.assertEqual(info_details['Name'], 'Wigglesworthia glossinidia endosymbiont of Glossina brevipalpis')\n self.assertEqual(info_details['GC content'], '0.22479')\n self.assertEqual(info_details['Genetic code'], '11')\n self.assertEqual(info_details['Number of Genome Level Warnings'], '1')\n self.assertEqual(info_details['Source ID'], 'BA000021')\n self.assertEqual(info_details['Number of Protein Encoding Genes'], '20')\n self.assertEqual(info_details['Domain'], 'Eukaryota')\n self.assertTrue(info_details['Assembly Object'])\n self.assertEqual(info_details['Number contigs'], '1')\n self.assertEqual(info_details['Number of CDS'], '20')\n self.assertTrue(info_details['MD5'])", "def encode_syn(prime, base, a_public):\n\n return 'SYN;%i;%i;%i' % (prime, base, a_public)", "def choose_bin_base() -> int:\n return npr.choice((2, 8, 16))", "async def get_base_mult(self, base: str, other_base: str):\n\n if base == other_base:\n return 1.0\n\n try:\n convert_pair = '{}-{}'.format(base, other_base)\n return self.base_rates[convert_pair]\n\n except KeyError:\n raise ValueError('Invalid base rate {}-{}'.format(base, other_base))", "def DictProteomeNameToSeq(X, n):\n DictProtToSeq_UP = {}\n for rec2 in SeqIO.parse(X, \"fasta\"):\n UP_seq = str(rec2.seq)\n if n == \"full\":\n UP_name = rec2.description.split(\"HUMAN \")[1].split(\" OS\")[0]\n DictProtToSeq_UP[UP_name] = str(UP_seq)\n if n == \"gene\":\n try:\n UP_name = rec2.description.split(\" GN=\")[1].split(\" \")[0]\n DictProtToSeq_UP[UP_name] = str(UP_seq)\n except BaseException:\n continue\n return DictProtToSeq_UP", "def gen_pronto_from_raw_int(seq1, seq2, base=None, freq=None):\n clock = 0.241246 # Pronto clock base: 1000000 / (32768 * 506 / 4)\n\n if freq is None:\n if base is None:\n freq = 0.040\n else:\n freq = 1.0 / (base * clock)\n\n if base is None:\n base = int(1 / (freq * clock))\n\n yield 0\n yield base\n\n def fixup(x):\n return list(raw.paired(raw.simplify(x)))\n\n simple1 = fixup(seq1)\n simple2 = fixup(seq2)\n\n yield int(len(simple1)/2) # sequence 1\n yield int(len(simple2)/2) # sequence 2\n\n for x in simple1:\n yield int(abs(x) * freq)\n\n for x in simple2:\n yield int(abs(x) * freq)", "def psea(pname): # -> str:\n ...", "def getOneRead(self, pos, probs):\n if not self.isValid(pos, pos + self.readlen):\n return False\n bases = []\n f = self.stream\n f.seek(pos)\n n = 0\n while True:\n b = f.read(1)\n if b == '>':\n return False\n if b not in \"ACGTNXacgtnx\":\n continue\n if random.random() < probs[n]:\n while True:\n nb = random.choice('ACGT')\n if nb != b:\n b = nb\n break\n bases.append(b)\n n += 1\n if n == self.readlen:\n break\n return bases", "def variable_base(self, k, base):\n q = []\n for b in base:\n q.append(k % b)\n k /= b\n return q", "def calculate_genotype_probabilities(self):\n for name, member in self.members.items():\n member.genotype_probabilities = self.genotype_probabilities_of(name)", "def _process_prosody(sonority):\n assert 9 not in sonority[1:-1]\n assert sonority[0] == sonority[-1] == 9\n\n # create the output values\n psequence = []\n first = True # stores whether first syllable is currently being processed\n\n for i in range(1, len(sonority) - 1):\n # get a segment with context\n a, b, c = sonority[i - 1], sonority[i], sonority[i + 1]\n\n if b == 7: # a vowel\n if first:\n psequence.append('X')\n first = False\n elif c == 9: # last\n psequence.append('Z')\n else:\n psequence.append('Y')\n elif b == 8: # a tone\n psequence.append('T')\n elif a >= b >= c or c == 8: # descending\n if c == 9: # word final position\n psequence.append('Z' if b == 7 else 'N') # vowel or consonant\n else:\n if first:\n first = False\n psequence.append('A')\n else:\n psequence.append('L')\n elif b < c or a > b <= c or a < b <= c: # ascending\n # check for syllable first\n if a == 9:\n psequence.append('A')\n elif a >= b:\n if c == 9:\n psequence.append('N')\n else:\n if psequence[-1] != 'A':\n psequence = psequence[:-1] + [psequence[-1].replace('L', 'M')] + ['B']\n else:\n psequence.append('C')\n else:\n psequence.append('C')\n elif a < b > c: # consonant peak\n if first:\n psequence.append('X')\n first = False\n else:\n psequence.append('Y')\n else:\n raise ValueError(\n \"Conversion to prosodic string failed due to a condition which was not \"\n \"defined in the convertion, for details compare the numerical string \"\n \"{0} with the profile string {1}\".format(sonority, psequence))\n return psequence", "def probability(structure,seq, react=None):\n return energy_to_proba(get_ens_energy(seq,react),get_stru_energy(structure,seq,react))", "def pig_latin(word):\n first_letter = word[0]\n rest_of_word = word[1 : ]\n #print(\"First letter is\", first_letter)\n #print(\"rest_of_word is\", rest_of_word)\n if first_letter == 'a' or first_letter == 'e' or first_letter == 'i' or first_letter == 'o' or first_letter == 'u': \n pig_latin_word = word + 'way'\n else: \n pig_latin_word = rest_of_word + first_letter + 'ay'\n return pig_latin_word", "def base_composition(reads, base):\n assert base.upper() in set(\"ACGT\")\n\n \"\"\" Reports nucelotide frequencies at each position in the\n sam sequences\n \"\"\"\n # DNA_Alphabet=[\"A\",\"C\",\"T\",\"G\",\"N\"]\n all_nucs = []\n for read in reads:\n nucs = {} # Dictionary to store nucleotide data.\n seq = read[9]\n for i in range(0, len(seq)):\n nucs[str(i + 1)] = seq[i]\n all_nucs.append(nucs)\n all_items = []\n counts = []\n for dicts in all_nucs:\n for item in dicts.items():\n all_items.append(item)\n all_items.sort(key=operator.itemgetter(0))\n groups = [map(operator.itemgetter(1), list(group))\n for key, group in itertools.groupby(\n all_items, operator.itemgetter(0))]\n for group in groups:\n counts.append(group.count(base))\n\n pos = range(1, len(seq) + 1)\n\n # Create plot.\n plt.figure(1, figsize=(8, 8))\n plt.axes([0.1, 0.1, 0.8, 0.8])\n plt.bar(pos, counts, facecolor='g')\n plt.xlabel(\"Position\")\n plt.ylabel(\"number of mapped reads\")\n plt.title(base)\n plt.show()", "def genotypes(self):\n return self.data.genotypes.values", "def base():", "def get_variant_type(variant):\n _validate_str(variant)\n v = variant.split(\", \")[0] # test first token of multi-mutant\n if re_protein.match(v) is not None:\n return \"protein\"\n elif re_coding.match(v) is not None:\n return \"coding\"\n elif re_noncoding.match(v) is not None:\n return \"noncoding\"\n else:\n return None", "def map_base_to_int(base):\n return __BASES_MAP__[base]", "def decode(chromosome):\n # this needs to be a mini state machine.\n # We expect a stream of number + operator pairs terminated with a number\n output = \"\"\n need_op = False\n for key in chromosome:\n gene = genes[key]\n if need_op:\n if gene in operators:\n output += gene\n need_op = False\n else:\n continue\n else:\n if gene in digits:\n output += gene\n need_op = True\n else:\n continue\n if not need_op:\n # we don't want an op hanging off the end\n output = output[:len(output)-1]\n return output", "def basestr(cls: Any) -> str:\n return baserepr(cls)", "async def get_pair_base_mult(self, base: str, pair: str):\n\n pair_base = pair.split('-')[0]\n return await self.get_base_mult(base, pair_base)", "def to_baseN(self, value, base, other_base = False):\r\n numeral = self.numeral\r\n \r\n if other_base: #If value is not in base 10\r\n conv_to_x = self.to_base_ten(value, other_base) #Use the above function to first convert to base 10.\r\n return self.to_baseN(conv_to_x, base) # Recursively convert from base 10 to the new base.\r\n\r\n else: # Since value supplied to this part is in decimal, we can work in base 10\r\n int_part = int(value) #Remove fractional part\r\n frac_part = value - int_part #Keep fractional part\r\n\r\n if value == 0:\r\n return \"0\"\r\n\r\n if int_part < 0:\r\n return '-' + self.to_baseN(abs(int_part), base, other_base) #for number < 0, work with its absolute form before adding -\r\n\r\n if not 2 <= base <= len(numeral):\r\n raise ValueError(f'Base must be between 2 and {len(numeral)}')\r\n \r\n int_result = \"-\" if int_part < 0 else \"\" #add - to negatiive numbers\r\n frac_result = \"\"\r\n\r\n while int_part != 0:\r\n int_result += numeral[int_part % base]\r\n int_part //= base\r\n\r\n while frac_part != 0:\r\n frac_result += numeral[int(frac_part * base)]\r\n frac_part = (frac_part * base) - int(frac_part * base)\r\n result = (int_result[::-1] + \".\" + frac_result[::1]) if frac_result else int_result[::-1]\r\n \r\n if result.startswith('.'):\r\n return \"0\" + result\r\n else:\r\n return result", "def mutate(chromosome, p):\n code = {'0':'1', '1':'0'}\n return ''.join(code[num] if random() < p else num for num in chromosome)", "def decode_proba(self, P):\n c = {}\n for i in range(CONFIG.max_input_len):\n if max(P[i]) < CONFIG.allowed_threshold:\n c[i] = max(P[i])\n P = P.argmax(axis=-1)\n return ''.join(self.indices_char[x] for x in P if x), c", "def limpiar_type(palabra):\n if palabra == 'Boat' or palabra == 'Boatomg':\n palabra = 'Boating'\n return palabra\n \n return palabra", "def faceFromLinkSub(prop):\n return prop[0], int(prop[1][0][4:]) - 1 # string \"faceX\"", "def discriminator(self) -> str:", "def Geometric(name, p):\n return rv(name, GeometricDistribution, p)", "def mut(base,det=False):\n\n bases = ('A','T','C','G')\n base = base.upper()\n if base not in bases:\n raise ValueError(\"base passed to mut(): \" + str(base) + \" not one of (A,T,C,G)\")\n\n if det:\n if base == 'A':\n return 'T'\n elif base == 'T':\n return 'A'\n elif base == 'G':\n return 'C'\n elif base == 'C':\n return 'G'\n\n else:\n mut = base\n while mut == base:\n mut = bases[int(random.uniform(0,4))]\n return mut", "def bp_from_digit(digit):\r\n # You must use the following dictionary:\r\n bp_map = {0: 'a', 1: 'c', 2: 'g', 3: 't'}\r\n\r\n # YOUR CODE HERE\r", "def clean_genus(genus: str) -> str:\n if genus in {\"Galasimus\", \"Gelasimes\", \"Gelasius\", \"Gelasmus\", \"Gelsimus\", \"Gelassimus\", \"Gelasima\"}:\n return \"Gelasimus\"\n elif genus in {\"Uka\", \"Uça\"}:\n return \"Uca\"\n elif genus in {\"Goneplax\"}:\n return \"Gonoplax\"\n elif genus in {\"Ocypoda\"}:\n return \"Ocypode\"\n elif genus in {\"Ciecie\"}:\n return \"\"\n else:\n return genus", "def str_base(num, base=36, numerals=\"0123456789abcdefghijklmnopqrstuvwxyz\"):\n if base < 2 or base > len(numerals):\n raise ValueError(\"`base` must be between 2 and %i\")\n\n if num == 0:\n return '0'\n\n buf = BytesIO()\n\n if num < 0:\n buf.write(\"-\")\n num = -num\n\n while num:\n buf.write(numerals[num % base])\n num //= base\n\n return buf.getvalue()", "def check_if_TaxID_valid_for_GENOME_and_try_2_map_otherwise(taxid, pqo, args_dict=dict()):\n if taxid in pqo.taxid_2_proteome_count:\n return taxid, True # taxid is part of UniProt Ref Prots\n else:\n # try to find a parent that is in of UniProt Ref Prots\n for taxid_parent in pqo.ncbi.iter_direct_parent(taxid): # relevant for e.g. Taxid 511145; Escherichia coli str. K-12 substr. MG1655 --> should match to 83333 not 83334!\n taxid_parent = int(taxid_parent)\n if taxid_parent in pqo.taxid_2_proteome_count:\n return taxid_parent, True # taxid is part of UniProt Ref Prots\n elif pqo.TaxidSpecies_2_TaxidProteome_dict.get(taxid_parent, False):\n taxid = pqo.TaxidSpecies_2_TaxidProteome_dict[taxid_parent]\n return taxid, True\n try:\n taxid_mapped = pqo.TaxidSpecies_2_TaxidProteome_dict[taxid]\n return taxid_mapped, True # taxid can easily be mapped, if ambiguous will be mapped to the reference proteome with the highest number of proteins\n except KeyError:\n pass\n args_dict[\"ERROR TaxID\"] = \"The Taxid '{}' you've provided is not a valid UniProt Reference Proteome TaxID (https://www.uniprot.org/proteomes). Unfortunately, we can't map your taxid input to an NCBI taxid that has a UniProt Reference Proteome. Please use a valid taxid and try again.\".format(taxid)\n return taxid, False", "def provn_representation(self):\n return \"'%s'\" % self._str", "def genotype(self, normal_primitives, reduce_primitives):\n\n def _parse_proxyless(weights, primitives):\n # Find the best op in this weight.\n k_best = np.argmax(weights, axis=1)\n return [primitives[k] for k in k_best]\n\n _parse = _parse_proxyless\n gene_normal = _parse(F.softmax(self.alphas_normal, dim=-1).data.cpu().numpy(), normal_primitives)\n gene_reduce = _parse(F.softmax(self.alphas_reduce, dim=-1).data.cpu().numpy(), reduce_primitives)\n alphas_normal = self.alphas_normal\n alphas_reduce = self.alphas_reduce\n genotype = Genotype(\n normal=gene_normal,\n reduce=gene_reduce,\n alphas_normal=alphas_normal,\n alphas_reduce=alphas_reduce,\n )\n return genotype", "def chtype(var):\n return str(type(var)).split('\\'')[1]", "def dna_to_protein(seq):\n\n # Verify a convertible sequence\n if len(seq) % 3 != 0:\n raise RuntimeError('Total number of bases must be a multiple of 3')\n\n # Iterate through adding the proteins\n protein = ''\n for i in range(0, len(seq), 3):\n protein += bioinfo_dicts.codons[seq[i:i+3]]\n return protein", "def test_get_prior_string_shape(self):\n dim = Dimension(\"yolo\", \"alpha\", 1, 2, shape=(2, 3))\n assert dim.get_prior_string() == \"alpha(1, 2, shape=(2, 3))\"" ]
[ "0.60589737", "0.5741285", "0.5715102", "0.5583383", "0.5533688", "0.55254024", "0.5489838", "0.5393062", "0.5336175", "0.5331854", "0.5328015", "0.5308519", "0.5293455", "0.5247759", "0.5218406", "0.5206605", "0.5196159", "0.51130825", "0.5110904", "0.50777864", "0.50563097", "0.50279284", "0.50170976", "0.5005599", "0.49523324", "0.49401432", "0.49291328", "0.4924317", "0.49187836", "0.49120113", "0.48828188", "0.48790947", "0.48679656", "0.48483115", "0.484176", "0.4833667", "0.4828573", "0.48224905", "0.4821201", "0.48138335", "0.48027554", "0.48022926", "0.47877166", "0.4786647", "0.47840592", "0.4781178", "0.47759235", "0.4766789", "0.47642162", "0.47614986", "0.47470808", "0.4742855", "0.47323504", "0.4719671", "0.47182631", "0.4711613", "0.4705988", "0.4703635", "0.46989146", "0.46989003", "0.4688831", "0.46876645", "0.46851918", "0.46803564", "0.46763766", "0.4664549", "0.46571735", "0.46558762", "0.46502388", "0.46470535", "0.46438184", "0.4638379", "0.463721", "0.4635563", "0.46312118", "0.46302658", "0.46264726", "0.46208557", "0.4619254", "0.46070352", "0.46048912", "0.4598568", "0.45979145", "0.45965058", "0.45949936", "0.45842543", "0.45831612", "0.45827985", "0.4582761", "0.45819712", "0.45819098", "0.4577452", "0.45753208", "0.45698425", "0.45614138", "0.45606178", "0.45600343", "0.45580283", "0.4556743", "0.45465347" ]
0.5752146
1
str genotype iterableobj bases_all_reads, list or np.array return P(data|genotype) == likelihood
def likelihood_genotype(genotype, bases_all_reads, error_rates): likelihood = 1 for observed_base in bases_all_reads: p = 0 for base in "ACGT-": l = prob_t_N(genotype, base) * error_rates[base][observed_base] p += l likelihood *= p return likelihood
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def genotype(args) :\n from genotyper import genotype_samples\n genotype_samples(args)", "def calculate_genotype_probabilities(self):\n for name, member in self.members.items():\n member.genotype_probabilities = self.genotype_probabilities_of(name)", "def genotypes(self):\n return self.data.genotypes.values", "def __init__(self, length=64, bitstring_or_list=None):\r\n self._fitness = 0\r\n self.repr = None\r\n self.default_length = length\r\n if not bitstring_or_list:\r\n # random array of bytes\r\n self.genes = [random.getrandbits(1)\r\n for _ in range(self.default_length)]\r\n else:\r\n if isinstance(bitstring_or_list, str):\r\n self.genes = [int(b) for b in bitstring_or_list]\r\n elif isinstance(bitstring_or_list, list):\r\n self.genes = bitstring_or_list", "def pval_at_rna_by_nbinom(\n self, pos_dict_of_counts: Mapping[str, List], neg_vals_at_rna: np.array, gene_and_type,\n log_if_values_above=1E9,\n log_values=False, which='per_read',\n verbose=False):\n\n if len(neg_vals_at_rna) == 0:\n return None\n\n log_scale_high_value = (np.mean(neg_vals_at_rna) > log_if_values_above)\n\n if log_values or log_scale_high_value:\n log_this_gene = True\n neg_vals_at_rna = np.log10(neg_vals_at_rna)\n else:\n log_this_gene = False\n \n #if not np.any(neg_vals_at_rna):\n #print(\"No positive values in negatives.\")\n # neg_vals_at_rna = np.array([\n # self.negatives.lowest_positive_vals[which][x]/10 for x in \\\n # self.negatives.metadata.random_proteins])\n #print(f\"negatives now {neg_vals_at_rna}\")\n mean_negative = np.average(neg_vals_at_rna)\n std_negative = np.std(neg_vals_at_rna)\n\n vmr = (std_negative**2)/mean_negative\n\n verbose and print(f'vmr for negatives={vmr}')\n # Use a poisson if the var/mean is low enough:\n if vmr < 2:\n verbose and print(\"Using poisson.\")\n self.stats_log['vmr<2'] += 1\n pois = stats.poisson(mean_negative)\n return self.use_dist(pos_dict_of_counts, log_this_gene, pois)\n\n verbose and print(\"Wil try to use NB.\")\n self.stats_log['vmr>=2'] += 1\n\n # Try to fit a NB useing statsmodels.\n q = sm.NegativeBinomial(\n neg_vals_at_rna, np.array([1] * len(neg_vals_at_rna)), loglike_method='nb2')\n try:\n res = q.fit(disp=0)\n except: # If a NB can't be fit, revert to a poisson.\n print(f\"Could not run q.fit(disp=0) on neg_vals_at_rna= {neg_vals_at_rna}. Using poisson.\")\n pois = stats.poisson(mean_negative)\n return self.use_dist(pos_dict_of_counts, log_this_gene, pois)\n\n # Create a scipy.stats.nbinom object to use its cdf, based on the statsmodels fit parameters.\n # There is no cdf function for the statsmodels object.\n mu = res.predict()[0] # alpha = res.params[1]\n size = 1. / res.params[1] # prob = size / (size + mu)\n\n verbose and print(f\"Fit NB mu={mu}\")\n \n pvals = self.use_dist(\n pos_dict_of_counts, log_this_gene, stats.nbinom(size, size/(size + mu)))\n\n return pvals", "def test_iter_genotypes(self):\n with self.reader_f() as f:\n for g in f.iter_genotypes():\n variant_name = VARIANT_NAME_FIX.get(\n (truth.variant_to_key[g.variant], g.coded),\n truth.variant_to_key[g.variant],\n )\n\n expected = truth.genotypes[variant_name]\n self.assertEqual(expected, g)", "def G_stat(data):\r\n # G = 2*sum(f_i*ln(f_i/f_i_hat)) over all i phenotypes/sample classes\r\n # calculate the total number of observations under the consideration that\r\n # multiple observations in a given group are averaged.\r\n n = sum([arr.mean() for arr in data])\r\n a = len(data) # a is number of phenotypes or sample classes\r\n obs_freqs = array([sample_type.mean() for sample_type in data]) # f_i vals\r\n exp_freqs = zeros(a) + (n / float(a)) # f_i_hat vals\r\n G = 2. * (obs_freqs * log(obs_freqs / exp_freqs)).sum()\r\n return G", "def n(self):\n return len(self.genotypes)", "def calculateLogJointProbabilities(self, datum):\n logJoint = util.Counter()\n for cls in self.classes:\n class_probability = self.prior_prob[cls]\n for key, value in datum.items():\n relative_feature_values = self.likelihoods[cls][key]\n class_probability += math.log(relative_feature_values.get(datum[key], 0.01))\n\n logJoint[cls] = class_probability\n\n return logJoint", "def _genotype_updated(self):\n if self.data.get(\"GT\", None) is None:\n self.gt_alleles = None\n self.called = None\n self.ploidy = None\n else:\n self.gt_alleles = []\n for allele in ALLELE_DELIM.split(str(self.data[\"GT\"])):\n if allele == \".\":\n self.gt_alleles.append(None)\n else:\n self.gt_alleles.append(int(allele))\n self.called = all([al is not None for al in self.gt_alleles])\n self.ploidy = len(self.gt_alleles)", "def prob_t_N(genotype, base):\n cnter = Counter(genotype)\n return cnter.get(base, 0) * 1/len(genotype)", "def generate_genotype(self):\n genes = []\n for i in range(self.n_genes):\n genes.append(self.Gene(n_bases=self.n_bases))\n self.genes = genes", "def _get_genotypes(self, samples, records, switch):\n\n variant = np.zeros(len(samples))\n for idx, sample in enumerate(samples):\n try:\n gt = records.genotype(sample)['GT']\n except IndexError:\n print(\"something went wrong with:\")\n print('sample:', sample, 'variant:', records, '-- set value to missing')\n gt = '.'\n if gt == '.':\n gt = 0\n else:\n gt = re.split('\\||/', gt)\n gt = list(map(int, gt))\n variant[idx] = np.sum(gt)\n if switch:\n variant = np.abs(variant - 2)\n return variant", "def test_get_representatives(self):\r\n\r\n result = \"\"\">1: 5\r\nABABABA\r\n>3: 1\r\nBABA\r\n>4: 1\r\nABABAA\r\n>8: 2\r\nBABBA\r\n\"\"\"\r\n seqs = self.data.iteritems\r\n mapping = self.mapping\r\n test_result = list(get_representatives(mapping, seqs()))\r\n test_result_as_fasta = \"\".join(\r\n map(lambda a: a.to_fasta(), test_result))\r\n\r\n self.assertEqual(test_result_as_fasta, result)\r\n\r\n # another example\r\n mapping = {'1': ('a', 'b', 'c'),\r\n '2': ('d', 'e', 'f')}\r\n seqs = [('1', \"ACGT\"), ('2', \"TAGC\"), ('a', \"TTTTT\")]\r\n\r\n observed = list(get_representatives(mapping, seqs))\r\n expected = [BiologicalSequence(\"ACGT\", id=\"1\"),\r\n BiologicalSequence(\"TAGC\", id='2')]\r\n self.assertEqual(observed, expected)", "def likelihood(self):\n \n raise NotImplementedError()", "def calculateLogJointProbabilities(self, datum):\n logJoint = util.Counter()\n \"*** YOUR CODE HERE ***\"\n\t#Adds log(P(y)) to calculate P(y|f1,f2...)\n for label in self.legalLabels:\n\t\tlogJoint[label] += math.log(self.prior[label])\n\t#Adds log(P(f1|y)), log(P(f2|y))... to calculate P(y|f1, f2...)\n for key in datum:\n\t\t#if key == (7, 3):\n\t\t\t#print self.condprobs[key, 0]\n\t\tfor label in self.legalLabels:\n\t\t\t#print str(key) + str(datum[key])\n\t\t\tlogJoint[label] += math.log(self.condprobs[key, label][datum[key]])\n return logJoint", "def treats_data(data, typo='mean'):\n\n nomalized_data = []\n\n if typo == 'mean':\n for i, v in enumerate(data):\n nomalized_data.append( fitness_medio(data[i]) )\n\n if typo == 'gen_mean':\n gens = [0]*len(data[0])\n\n for i, v in enumerate(data):\n for j, v in enumerate(data[i]):\n gens[j] += data[i][j]\n for i,v in enumerate(gens):\n nomalized_data.append( (gens[i])/len(data[0]) )\n\n if typo == 'max':\n for i, v in enumerate(data):\n nomalized_data.append( max(data[i]) )\n\n if typo == 'gen_max':\n gens = [0]*len(data[0])\n\n for i, v in enumerate(data):\n for j, v in enumerate(data[i]):\n if data[i][j] > gens[j]:\n gens[j] = (data[i][j])\n\n for i,v in enumerate(gens):\n nomalized_data.append( (gens[i]) )\n\n return nomalized_data", "def _getPerBaseInfo( self, readGroup ):\r\n\r\n if 'AlignmentArray' not in readGroup:\r\n return None\r\n\r\n alignmentArrayDS = readGroup['AlignmentArray']\r\n dataSize = len(alignmentArrayDS)\r\n \r\n # fetch all to memory for speeding up, it \r\n # requires explicitly slicing coordinate to copy the data \r\n alignmentArray = alignmentArrayDS[0:dataSize] \r\n \r\n ### these are done in numpy, fast,.,\r\n binRBases = (alignmentArray & 0xf0) >> 4; \r\n binTBases = (alignmentArray & 0x0f) ;\r\n rSeqAll = \"\".join(Basemap[binRBases])\r\n tSeqAll = \"\".join(Basemap[binTBases])\r\n\r\n return { \"tSeq\":tSeqAll, \"rSeq\":rSeqAll }", "def viterbi(self, word_seq):\n # Initialize scores\n scores = [{}]\n path = {}\n # Populate scores\n for i in range(0, len(word_seq)):\n for label in self.label_type_map:\n scores[i][label] = 0\n scores.append({})\n self.initialize(scores, word_seq, path)\n path = self.iterate(scores, word_seq, path)\n return self.identify(scores, word_seq, path)", "def probability(structure,seq, react=None):\n return energy_to_proba(get_ens_energy(seq,react),get_stru_energy(structure,seq,react))", "def calculateLogJointProbabilities(self, datum):\n logJoint = util.Counter()\n \n \"*** YOUR CODE HERE ***\"\n \n # -- OUR CODE HERE\n \n \n import math\n for label in self.legalLabels:\n sumThing = 0.0\n for pixel in self.conditionalProb[label]:\n if datum[pixel] is 1:\n #assert self.conditionalProb[label][pixel] < 1.0 # -- sanity check that the probability is valid\n sumThing += math.log((self.conditionalProb[label][pixel]*1.0))\n else:\n sumThing+=math.log(1-self.conditionalProb[label][pixel]*1.0)\n logJoint[label] = math.log(self.prior[label]*1.0) + sumThing*1.0\n \n\n \n \n import time\n #print \"logJoint is :: \", logJoint\n #time.sleep(2)\n \n \n # -- uses the conditional probability tables computed in the current iteration\n # -- in train and tune\n \n return logJoint", "def gw_heritability(\n input_snp_filename: \"Data Input, use the SNPs file from dataParse\",\n output_summary_filename: 'output file for the genomewide results summary, use .csv',\n logger_filename: 'file for the logger, use a txt',\n sweeps: \"number of samples for each chain\" = 1000,\n burnin: \"number of burnin samples\" = 1000,\n n_chains: \"number of chains of the sampler\" = 4,\n n_cores: \"number of parallel cores to use\" = 4,\n N_1kG: \"number of SNPs onwhich the LD-score is calculates\" = 1290028,\n chromosome: \"chromosome on which the analysis is run\" = \"all\",\n sep: \"separator for the input files, use t for tab separated (not \\t)\" = \",\",\n model: 'regression model'='normal',\n fix_intercept = False,\n ):\n\n # Initialisation of the logger\n output_logger = log.setup_logger(\"output_logger\", logger_filename)\n log.initialise_log(output_logger,\n 'genome-wide regression, model: %s' %model,\n [input_snp_filename],\n [output_summary_filename],\n sweeps,\n burnin,\n chromosome = str(chromosome),\n other_params_diz = {'chains': n_chains, 'cores': n_cores})\n\n # Initialisation function, it reads the summary stats file, filters the SNPs,\n # creates the output files\n\n logging.info(\"Start Analysis\")\n\n snps = s.Snps()\n # read table\n snps.read_table(input_snp_filename, separator=sep)\n # generate chi squared stats\n snps.generate_stats()\n # update the summary stats\n snps.update_summary()\n output_logger.info(\" Sample size \" + str(snps.n_patients) + \"\\n\")\n\n\n snps.apply_filter_table(s.baghera_filter)\n snps.update_summary()\n output_logger.info(\"After baghera init filter.\\nNumber of SNPs: %s\\nNumber of genes: %s\\n\" \\\n %(str(snps.n_snps), str(snps.n_genes)) )\n\n # Non coding SNPs are assigned to a dummy gene, such that the regression is done on the entire SNPs' set\n snps.rename_non_annotated(name='NonCoding')\n\n if chromosome != \"all\":\n snps.apply_filter_table(snps.cut_single_chrom, **{'chromosome': chromosome})\n output_logger.info(\n \"Analysis restricted to chr %s\" %str(chromosome) )\n\n snps.update_summary()\n output_logger.info(\"Analysis. Number of SNPs: %s\\n, Number of genes: %s\\n\" \\\n %(str(snps.n_snps), str(snps.n_genes)) )\n\n\n if model =='normal':\n [intercept, slope] = heritability.gw_normal(snps, output_summary_filename, output_logger,\n sweeps, burnin, n_chains, n_cores, N_1kG, fix_intercept)\n elif model=='gamma':\n [intercept, slope] = heritability.gw_normal(snps, output_summary_filename, output_logger,\n sweeps, burnin, n_chains, n_cores, N_1kG, fix_intercept)\n else:\n logging.info('Normal model by default')\n [intercept, slope] = heritability.gw_normal(snps, output_summary_filename, output_logger,\n sweeps, burnin, n_chains, n_cores, N_1kG, fix_intercept)\n logging.info(\"Analysis complete\")", "def likelihood(A, B, word_list):\n\tstr_buf = []\n\tloglh = 0.0\n\tlh = 1\n\n\t# read each line and compute likelihood.\n\tfor line in sys.stdin:\n\t\tline = line.split()\n\t\t# print(line)\n\t\tif len(line) == 3:\n\t\t\tstr_buf.append((str(line[0]), str(line[1])))\n\n\t\telse:\n\t\t\t# if come to the end of a sentence\n\t\t\tif len(str_buf) != 0:\n\t\t\t\tstr_buf = [('<s>','BOS')] + str_buf + [('</s>', 'EOS')]\n\t\t\t\tfor i in range(len(str_buf) - 1):\n\t\t\t\t\t# print(str_buf[i][0], str_buf[i+1][0])\n\t\t\t\t\tif str_buf[i+1][0] in word_list:\n\t\t\t\t\t\t# print('debug: A[',str_buf[i][1],'][', str_buf[i+1][1],']:', A[ str_buf[i][1] ][ str_buf[i+1][1] ])\n\t\t\t\t\t\t# print('debug: B[',str_buf[i+1][1],'][', [str_buf[i+1][0]], ']:', B[ str_buf[i+1][1] ][str_buf[i+1][0]])\n\t\t\t\t\t\tloglh += ( log(A[ str_buf[i][1] ][ str_buf[i+1][1] ]) + log(B[ str_buf[i+1][1] ][str_buf[i+1][0]]) )\n\t\t\t\t\telse:\n\t\t\t\t\t\t# print('debug: else A[',str_buf[i][1],'][', str_buf[i+1][1],']:', A[ str_buf[i][1] ][ str_buf[i+1][1] ])\n\t\t\t\t\t\t# print('debug: else B[',str_buf[i+1][1],'][', '<UNK>', ']:', B[ str_buf[i+1][1] ][ '<UNK>' ])\n\t\t\t\t\t\tloglh += ( log(A[ str_buf[i][1] ][ str_buf[i+1][1] ]) + log(B[ str_buf[i+1][1] ]['<UNK>']) )\n\n\t\t\t\tlh = e ** loglh\n\t\t\t\t# for s in str_buf:\n\t\t\t\t# \tprint(s[0], end=\" \")\n\t\t\t\t# print(':', lh)\n\t\t\t\tprint(lh)\n\n\t\t\t\tstr_buf = []\t\n\t\t\t\tloglh = 0\n\t\t\t\tlh = 1", "def evaluate_iob(predicted, gold, label_field, stats):\n gold_cpu = gold.cpu().numpy()\n pred_cpu = predicted.cpu().numpy()\n gold_cpu = list(gold_cpu.reshape(-1))\n pred_cpu = list(pred_cpu.reshape(-1))\n # pred_cpu = [l for sen in predicted for l in sen]\n\n id2label = {v:k for k,v in label_field.items()}\n # Compute spans for the gold standard and prediction.\n gold_spans = to_spans(gold_cpu, id2label)\n pred_spans = to_spans(pred_cpu, id2label)\n\n # Finally, update the counts for correct, predicted and gold-standard spans.\n compare(gold_spans, pred_spans, stats, 'strict')", "def __init__(self, n, g_bases, g_len, m_prob):\n self.n = n\n self.g_bases = g_bases\n self.g_len = g_len\n self.m_prob = m_prob", "def evaluate ( self , genome ) :\n\n\t\tassert isinstance( genome , Genome ), 'genome supplied must be of type cc3dtools.Genome!'\n\t\tloci = genome.get_mutated_loci()\n\t\tmatched_phenotypes = []\n\t\tphenotypes = self.phenotypes.items()\n\n\t\tfor locus in loci:\n\t\t\tfor phenotype, region in phenotypes:\n\t\t\t\t# check if the locus is in the region\n\t\t\t\t# 'locus.locus' to get the float value of that mutation rather \n\t\t\t\t# than an object!\n\t\t\t\tif locus.locus > region[0] and locus.locus < region[1]:\n\t\t\t\t\tmatched_phenotypes.append( phenotype )\n\t\treturn Counter( matched_phenotypes )", "def _process_genotypes(self, limit):\n if self.testMode:\n g = self.testgraph\n else:\n g = self.graph\n model = Model(g)\n line_counter = 0\n\n raw = '/'.join((self.rawdir, 'genotype'))\n logger.info(\"building labels for genotypes\")\n geno = Genotype(g)\n fly_tax = 'NCBITaxon:7227'\n with open(raw, 'r') as f:\n f.readline() # read the header row; skip\n filereader = csv.reader(f, delimiter='\\t', quotechar='\\\"')\n for line in filereader:\n line_counter += 1\n\n (genotype_num, uniquename, description, name) = line\n\n # if self.testMode is True:\n # if int(object_key) not in self.test_keys.get('genotype'):\n # continue\n\n # add the internal genotype to pub mapping\n genotype_id = 'MONARCH:FBgeno'+str(genotype_num)\n self.idhash['genotype'][genotype_num] = genotype_id\n\n if description == '':\n description = None\n\n if not self.testMode \\\n and limit is not None and line_counter > limit:\n pass\n else:\n if self.testMode and \\\n int(genotype_num) not in \\\n self.test_keys['genotype']:\n continue\n\n model.addIndividualToGraph(\n genotype_id, uniquename,\n Genotype.genoparts['intrinsic_genotype'],\n description)\n # we know all genotypes are in flies\n # FIXME we assume here they are in melanogaster,\n # but that isn't necessarily true!!!\n # TODO should the taxon be == genomic background?\n geno.addTaxon(fly_tax, genotype_id)\n genotype_iid = self._makeInternalIdentifier(\n 'genotype', genotype_num)\n model.addComment(\n genotype_id, genotype_iid)\n if name.strip() != '':\n model.addSynonym(genotype_id, name)\n\n return", "def analyse_loglike(test_data, mods):\r\n l1 = list(map(lambda x: x + ' NB', mods.names))\r\n l1.extend(list(map(lambda x: x + ' ZI', mods.names)))\r\n l1.extend(list(map(lambda x: x + ' P', mods.names)))\r\n loglikeNB = np.array(mods.compute_log_likelihood(test_data, 'NB'))\r\n loglikeZI = np.array(mods.compute_log_likelihood(test_data, 'ZI'))\r\n loglikeP = np.array(mods.compute_log_likelihood(test_data, 'P'))\r\n # loglikeG = np.array(mods.compute_log_likelihood_gaussian(test_data))\r\n # loglikegeo = np.array(mods.compute_log_likelihood_geom(test_data))\r\n LL = np.zeros((loglikeNB.shape[0] * 3, loglikeNB.shape[1]))\r\n LL[:loglikeNB.shape[0], :] = loglikeNB\r\n LL[loglikeNB.shape[0]:2 * loglikeNB.shape[0], :] = loglikeZI\r\n LL[2 * loglikeNB.shape[0]:3 * loglikeNB.shape[0], :] = loglikeP\r\n # LL[3 * loglikeNB.shape[0]:4 * loglikeNB.shape[0], :] = loglikeG\r\n # LL[4 * llzi.shape[0]:, :] = np.array(mods.loglikegeo)\r\n print('mean per model', list(zip(np.ma.masked_invalid(LL).sum(axis=1), map(lambda x: x.mod.name, mods.models))))\r\n print('mean per distrib')\r\n print(np.ma.masked_invalid(LL[:loglikeNB.shape[0], :]).mean())\r\n print(np.ma.masked_invalid(LL[loglikeNB.shape[0]:loglikeNB.shape[0] * 2, :]).mean())\r\n print(np.ma.masked_invalid(LL[loglikeNB.shape[0] * 2:loglikeNB.shape[0] * 3, :]).mean())\r\n # print(np.nanmean(LL[1-np.isinf(LL)], axis=1))\r\n # print(np.nanmean(LL[LL != np.inf],axis=1))\r\n LL[np.isnan(LL)] = 0\r\n LL[np.isinf(LL)] = 0\r\n LL[LL == 0] = -np.inf\r\n r = np.argmax(LL, axis=0)\r\n # LL /= mx\r\n print('mean_best', np.mean(np.ma.masked_invalid(LL[r, range(LL.shape[1])])))\r\n mx = np.max(LL, axis=0)\r\n LL = LL / mx\r\n means = test_data.get_miniOD(None)[test_data.get_stations_col(None)].mean(axis=0).to_numpy()\r\n # for i in np.unique(r):\r\n # print(means[r == i].max())\r\n print('mean NB', means[r < loglikeNB.shape[0]].mean())\r\n print('mean ZI', means[(r < 2 * loglikeNB.shape[0]) * (r > loglikeNB.shape[0])].mean())\r\n print('mean poisson', means[(r < 3 * loglikeNB.shape[0]) * (r > 2 * loglikeNB.shape[0])].mean())\r\n # print('mean ga', means[(r < 4 * llzi.shape[0]) * (r > 3 * llzi.shape[0])].mean())\r\n # print('mean Gaussian', means[r > 3 * loglikeNB.shape[0]].mean())\r\n print('model name, mean trips per model, LL/maxLL, N inf')\r\n for i in range(LL.shape[0]):\r\n print(l1[i], means[r == i].mean(), np.mean(np.ma.masked_invalid(LL[i, :])), np.sum(np.isinf(LL[i, :])))\r\n print(np.ma.corrcoef(np.ma.masked_invalid(LL[i, :]), means[:LL.shape[1]])[1, 0])\r\n plt.hist(r, bins=np.arange(-0.5, 3 * len(mods.names) + 1, 1))\r\n\r\n # l1.extend(list(map(lambda x: x + ' geo', mods.names)))\r\n # l1.extend(list(map(lambda x: x + ' G', mods.names)))\r\n plt.xticks(range(len(l1)), l1, rotation='vertical')\r\n plt.show()\r\n\r\n for m in mods.loglike:\r\n print(m)\r\n print(m[np.logical_not(np.isinf(m))].mean())", "def likelihoods(self, alleles):\n\n models = self.models_dict[len(alleles)]\n\n F = self.joint_frequencies_combo(alleles)\n\n ### BPH ###\n (((A0, A1),((B0,),)),) = models['BPH'][1].items()\n\n BPH = (A0 / A1) * F[B0]\n\n\n BPH += sum( sum(F[B0] * F[B1] for (B0, B1) in C) * A0 / A1\n for (A0, A1), C in models['BPH'][2].items())\n\n if len(alleles)>2:\n BPH += sum( sum(F[B0] * sum( F[B1] * F[B2] for (B1, B2) in C[B0]) for B0 in C) * A0 / A1\n for (A0, A1), C in models['BPH'][3].items())\n\n ### SPH ###\n (((A0, A1),((B0,),)),) = models['SPH'][1].items()\n SPH = (A0 / A1) * F[B0]\n\n SPH += sum( sum(F[B0] * F[B1] for (B0, B1) in C) * A0 / A1\n for (A0, A1), C in models['SPH'][2].items())\n\n ### DIPLOIDY ###\n (((A0, A1),((B0,),)),) = models['DISOMY'][1].items()\n DISOMY = (A0 / A1) * F[B0]\n\n DISOMY += sum( sum( F[B0] * F[B1] for (B0, B1) in C) * A0 / A1\n for (A0, A1), C in models['DISOMY'][2].items())\n\n ### MONOSOMY ###\n ((B0,),) = models['MONOSOMY'][1][(1,1)]\n MONOSOMY = F[B0]\n\n result = likelihoods_tuple(MONOSOMY, DISOMY, SPH, BPH)\n return result", "def get_bases():\n\treturn ((MV.ONE,),) + MV.blades[1:]\n\t# return ((MV.ONE,),) + MV.bases[1:]", "def calculateLogJointProbabilities(self, datum):\n\tlogJoint = util.Counter()\n\t#want to calculate log(P(y)) + log(sum(P(fi|y)))\n\t#where y is a label\n\tfor label in self.legalLabels:\n\t\tlogJoint[label] = math.log(self.prior_distribution_prob[label])\n\t\tfor feature, value in datum.items():\n\t\t\tcp = self.conditional_prob[label][feature][value]\n\t\t\tif cp > 0: #condition check for values < 0 because log(0) is undefined and math domain error occurs\n\t\t\t\tlogJoint[label] += math.log(cp) #summing up\n\t\t\t\t\n\treturn logJoint", "def genes():\n return [\"b2935\", \"b0723\", \"b0451\"]", "def testInternal():\n\n deflike = trainInternalLikelihood() #perform the training\n\n token_counts = Counter()\n avedeflike = Counter()\n\n with open (\"../data/2016/data/test\", \"r\") as f:\n records = re.split(\"\\n\\n\",f.read().strip()) #separate by double new line\n\n for record in records:\n data = [re.split(\"\\t\", d) for d in re.split(\"\\n\", record)]\n try:\n tokens, tags = zip(*data)\n except:\n print data\n pass\n\n for token in tokens:\n token_counts[token] += 1.\n denom = len(token)\n for indices, f in fqs(token, 0.5):\n context, numer = internalContext(indices, token)\n if deflike[context]:\n avedeflike[token] += numer/denom * f * deflike[context]\n\n avedeflike = Counter({token: avedeflike[token] / token_counts[token] for token in avedeflike})\n return avedeflike", "def log_prob(self):", "def get_prob_l_can_see_x_strict(self, obj_type):\n ret_probs = zeros(len(self.obj_names)) + 1e-12\n for i, name in enumerate(self.obj_names):\n vtags, itags_t = self.obj_to_visibility[i]\n vtags = set(vtags)\n itags = set([t for t in itags_t if not t in vtags])\n\n if obj_type in vtags:\n ret_probs[i] = 1.0\n return ret_probs", "def GoAnnot(prots, gos, onlyProts=False):\r\n with resources.open_text(\"autoprot.data\",\"Homo_sapiens.gene_info\") as d:\r\n geneInfo = pd.read_csv(d, sep='\\t')\r\n with resources.open_text(\"autoprot.data\",\"gene2go_alt\") as d:\r\n gene2go = pd.read_csv(d, sep='\\t')\r\n prots = pd.DataFrame(pd.Series([str(i).upper().split(';')[0] for i in prots]), columns=[\"Gene names\"])\r\n prots = prots.merge(geneInfo[[\"Symbol\", \"GeneID\"]], left_on=\"Gene names\", right_on=\"Symbol\", how='inner')\r\n \r\n prots = prots.merge(gene2go[[\"GeneID\", \"GO_ID\", \"GO_term\"]], on=\"GeneID\", how='inner')\r\n if onlyProts == True:\r\n for idx, go in enumerate(gos):\r\n if idx == 0:\r\n redProts = prots[\"Symbol\"][prots[\"GO_term\"].str.contains(go)]\r\n else:\r\n redProts = redProts.append(prots[\"Symbol\"][prots[\"GO_term\"].str.contains(go)])\r\n return redProts.drop_duplicates()\r\n else: \r\n for idx, go in enumerate(gos):\r\n if idx == 0:\r\n redProts = prots[prots[\"GO_term\"]==go]\r\n else:\r\n redProts = redProts.append(prots[prots[\"GO_term\"]==go])\r\n return redProts.drop_duplicates()", "def getOneRead(self, pos, probs):\n if not self.isValid(pos, pos + self.readlen):\n return False\n bases = []\n f = self.stream\n f.seek(pos)\n n = 0\n while True:\n b = f.read(1)\n if b == '>':\n return False\n if b not in \"ACGTNXacgtnx\":\n continue\n if random.random() < probs[n]:\n while True:\n nb = random.choice('ACGT')\n if nb != b:\n b = nb\n break\n bases.append(b)\n n += 1\n if n == self.readlen:\n break\n return bases", "def gt_bases(self):\n result = []\n for a in self.gt_alleles:\n if a is None:\n result.append(None)\n elif a == 0:\n result.append(self.site.REF)\n else:\n result.append(self.site.ALT[a - 1].value)\n return tuple(result)", "def probabilities_of_structures(sequence, structure_list, react=None):\n ensemble_energy = get_ens_energy(sequence, react = react)\n energies = [get_stru_energy(x, sequence, react = react) for x in structure_list]\n probabilities = [energy_to_proba(ensemble_energy, x) for x in energies]\n #probabilities = normalize(probabilities, norm='l1').tolist()[0]\n return [(stru,proba) for stru,proba in zip(structure_list,probabilities)]", "def recognize_ngram(models: dict, test_set: SinglesData,probs,BIC_guesses):\n warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n probabilities = []\n guesses = []\n\n model = arpa.loadf(\"devel-lm-M3.sri.lm\")\n lm = model[0] # ARPA files may contain several models.\n # TODO implement the recognizer\n # return probabilities, guesses\n test_sequences = list(test_set.get_all_Xlengths().values())\n word_keys = list(test_set.get_all_Xlengths().keys())\n i = -1 \n for sentence in test_set.sentences_index.values():\n f = {}\n maxs = float(\"-inf\")\n prob = []\n words = []\n\n sentenceLength = 0\n for word_index in sentence:\n i+=1\n word = test_set.wordlist[word_index]\n sentenceLength+=1\n try:\n f[word] = probs[word][i]\n except:\n f[word] = float(\"-inf\")\n prob.append(f[word]) ## These are Just the probabilities unchanged from the BIC recognizer.\n \n # Find Six most probable words and generate the possible permutations \n sixwords = sorted(f,key=f.get,reverse=True)[:6]\n for k in permutations(sixwords, r=sentenceLength):\n l = 0\n for j in range(len(k)):\n l += f[k[j]]\n try:\n sentenceLP = l + 13*lm.log_s(\" \".join(k)) ## According to one student in the forum 13 is the best hyperparameter\n if sentenceLP > maxs: ## https://discussions.udacity.com/t/slm-data-for-this-asl-dataset/230822/8?u=spiros\n sentence = \" \".join(k)\n maxs = sentenceLP\n words = list(k)\n except:\n pass\n\n if(words == []):\n words = BIC_guesses[len(guesses):len(guesses)+sentenceLength] ## Fall back to BIC guesses\n probabilities.append(prob) \n guesses += words\n return (probabilities,guesses)", "def calculate_likelihoods_bernoulli(data, labels, vocab):\r\n classes = set(labels)\r\n likelihoods = {}\r\n # Calculate likelihood for each class\r\n for cls in classes:\r\n documentsInClass = [set(map(lambda y: y[0], data[x])) for x in range(len(data)) if labels[x] == cls]\r\n numDocsInClass = len(documentsInClass)\r\n results = {}\r\n for word in vocab:\r\n numDocsWithWordInClass = len(filter(lambda x: word in x, documentsInClass))\r\n # Binary variable-- either present or not present\r\n results[word] = laplace_smooth(numDocsWithWordInClass, numDocsInClass, 2)\r\n # Special laplace smoothing for words not found in training data\r\n results[None] = laplace_smooth(0, numDocsInClass, 2)\r\n likelihoods[cls] = results\r\n return likelihoods", "def gene_heritability(\n input_snp_filename: \"Data Input, use the SNPs file from dataParse\",\n output_genes_filename: 'output file for gene-level results, use .csv',\n output_summary_filename: 'output file for the genomewide results summary, use .csv',\n logger_filename: 'file for the logger, use a txt',\n sweeps: \"number of samples for each chain\" = 1000,\n burnin: \"number of burnin samples\" = 1000,\n n_chains: \"number of chains of the sampler\" = 4,\n n_cores: \"number of parallel cores to use\" = 4,\n N_1kG: \"number of SNPs onwhich the LD-score is calculated\" = 1290028,\n chromosome: \"chromosome on which the analysis is run\" = \"all\",\n snp_thr: \"threshold for the minimum number of SNPs in a gene\" = 10,\n sep: \"separator for the input files, use t for tab separated (not \\t)\" = \",\",\n model: 'specify the model for the regression, one betwenn normal/gamma' = 'normal',\n fix_intercept = False,\n ):\n\n # Initialisation of the logger\n output_logger = log.setup_logger(\"output_logger\", logger_filename)\n log.initialise_log(output_logger,\n 'gene level regression, model: %s' %model,\n [input_snp_filename],\n [output_genes_filename,output_summary_filename],\n sweeps,\n burnin,\n chromosome = str(chromosome),\n other_params_diz = {'chains': n_chains, 'cores': n_cores, 'SNP threshold': snp_thr})\n\n # Initialisation function, it reads the summary stats file, filters the SNPs,\n # creates the output files\n\n logging.info(\"Start Analysis\")\n\n snps = s.Snps()\n # read table\n logging.info(\"Reading SNP file: %s,\\n\\t with %s delimiter\"%(input_snp_filename, sep))\n snps.read_table(input_snp_filename, separator=sep)\n # generate chi squared stats\n snps.generate_stats()\n # update the summary stats\n snps.update_summary()\n output_logger.info(\" Sample size \" + str(snps.n_patients) + \"\\n\")\n\n\n\n snps.apply_filter_table(s.baghera_filter)\n snps.update_summary()\n output_logger.info(\"After baghera init filter.\\n\\t Number of SNPs: %s\\n\\t Number of genes: %s\\n\" \\\n %(str(snps.n_snps), str(snps.n_genes)) )\n\n # Non coding SNPs are assigned to a dummy gene, such that the regression is done on the entire SNPs' set\n snps.rename_non_annotated(name='NonCoding')\n\n if chromosome != \"all\":\n snps.apply_filter_table(snps.cut_single_chrom, **{'chromosome': chromosome})\n output_logger.info(\n \"Analysis restricted to chr %s\" %str(chromosome) )\n\n snps.update_summary()\n output_logger.info(\"Analysis. Number of SNPs: %s\\n, Number of genes: %s\\n\" \\\n %(str(snps.n_snps), str(snps.n_genes)) )\n\n # Creates the genes table with the number of SNPs for each gene and the basic stats values\n genes=g.Genes()\n genes.initialise_genes(snps.table.copy(), snps_thr=snp_thr)\n\n output_logger.info(\"Output gene table initialised:\\nNumber of genes: %s\\n\" \\\n %(str(genes.n_genes)) )\n\n snps.set_non_annotated(genes.cut_genes, 'NonCoding')\n\n if model == 'gamma':\n result = gr.analyse_gamma(snps, output_summary_filename, output_logger,\n sweeps, burnin, n_chains, n_cores, N_1kG, fix_intercept,\n )\n else:\n result = gr.analyse_normal(snps, output_summary_filename, output_logger,\n sweeps, burnin, n_chains, n_cores, N_1kG, fix_intercept,\n )\n\n logging.info(\"Saving genes table\")\n genes.table = genes.table.merge(\n result, left_index=False, left_on=\"name\", right_on=\"name\")\n\n k = genes.table.n_snps / float(N_1kG)\n genes.table[\"h2g\"] = genes.table.bg_mean.astype(\"float\") * k\n\n genes.table = genes.table.sort_values(by=[\"P\", \"bg_median\"])\n\n genes.save_table(output_genes_filename)\n\n non_coding = genes.table[genes.table.name == \"NonCoding\"]\n h2g_tot = np.sum(genes.table[\"h2g\"].values) - non_coding[\"h2g\"].values\n\n output_logger.info(\" Non coding heritability : \" +\n str(non_coding[\"h2g\"].values) + \"\\n\")\n output_logger.info(\" Coding heritability : \" + str(h2g_tot) + \"\\n\")", "def test_Bernoulli_NB_estimators():", "def mult_reads_gmm(reads, training_reads, components):\n\n\tprediction_zero_100 = 0\n\tprediction_one_100 = 0\n\tprediction_zero_200 = 0\n\tprediction_one_200 = 0\n\n\tbase_opts = ['A', 'C', 'G', 'T']\n\n\n\tmodel = mixture.GMM(n_components=components, covariance_type='spherical')\n\tnum_reads = len(reads)\n\n\ttraining_reads = [read.get_read().replace('\\'', '') for read in training_reads]\n\n\tread_input = [read.get_read().replace('\\'', '') for read in reads]\n\t# alignment_inputs = []\n\t# alignment_inputs.extend(read.get_alignments())\n\n\t# Generates observations\n\t# bases are converted to their ascii character values\n\tread_list = []\n\tfor read in read_input:\n\t\tread_char = [convert_letter(c) for c in read]\n\t\tread_list.append(read_char)\n\n\tobservations = []\n\t\n\tfor alignment in training_reads:\n\t\talignment_list = [convert_letter(c) for c in alignment] \n\t\tobservations.append( alignment_list )\n\t# for base_index, base in enumerate(read_main):\n\t# \tbase_observations = [ord(base)]\n\t# \tfor alignment in alignments:\n\t# \t\tbase_observations.append(ord(alignment[base_index]))\n\n\t# \tobservations.append(base_observations)\n\n\tmodel.fit(observations)\n\tmeans = np.round(model.means_, 2)\n\tcovars = np.round(model.covars_, 2)\n\tconverted_means = []\n\tfor num_list in means:\n\t\t# convert to nearest acceptable letter\n\t\t#char_means = [chr(int(n)) for n in num_list]\n\t\tchar_means = [convert_to_letter(n) for n in num_list]\n\t\tconverted_means.append(char_means)\n\t\n\tpredictions = model.predict(read_list)\n\n\tread_predictions = []\n\tfor index, prediction in enumerate(predictions):\n\t\tmapping = [prediction, reads[index]]\n\t\tread_predictions.append(mapping)\n\t\n\n\tfor read_pr in read_predictions:\n\t\t\n\t\tprediction = read_pr[0]\n\t\t# def filt(x): return x[0] == prediction\n\t\t# matches = filter(filt, read_predictions)\n\t\tpr = prediction\n\t\trps = int(float(read_pr[1].get_position()))\n\t\t# print '\\n'\n\t\t# print prediction\n\t\t# print 'Converted Means: '\n\t\t# print ''.join(converted_means[prediction])\n\t\t# print 'Actual Read'\n\t\t# print read_pr[1].get_read()\n\t\t# print read_pr[1].get_position()\n\t\t# print 'Matches'\n\t\t# for m in matches:\n\t\t# \tprint m[1].get_read() + ' Position: ' + m[1].get_position()\n\t\t# \tm[1].print_read()\n\n\t\tif pr == 0:\n\t\t\tif rps == 100:\n\t\t\t\tprediction_zero_100 = prediction_zero_100 + 1\n\t\t\t\t\n\t\t\telse:\n\t\t\t\tprediction_zero_200 = prediction_zero_200 + 1\n\t\t\t\t\n\t\telse:\n\t\t\tif rps == 100:\n\t\t\t\tprediction_one_100 = prediction_one_100 + 1\n\t\t\t\t\n\t\t\telse:\n\t\t\t\tprediction_one_200 = prediction_one_200 + 1\n\t\t\t\t\n\n\tprint '\\n-------------Predictions---------------------'\n\tprint 'Prediction: 0 Position: 100 Num: ' + str(prediction_zero_100)\n\tprint 'Prediction: 1 Position: 100 Num: ' + str(prediction_one_100)\n\tprint 'Prediction: 0 Position: 200 Num: ' + str(prediction_zero_200)\n\tprint 'Prediction: 1 Position: 200 Num: ' + str(prediction_one_200)\n\n\tprint '\\n------Means: -----------'\n\tfor mean in converted_means:\n\t\tprint ''.join(mean) \n\n\t# for index, prediction in enumerate(predictions):\n\t# \tprint 'Read: '\n\t# \tprint reads[index].get_read()\n\t# \tprint 'Prediction: '\n\t# \tprint prediction\n\t# \tprint converted_means[prediction]\n\t# \tprint 'Means: '\n\t# \tprint means[prediction]\n\t# \tprint covars[prediction]\n\t# \tprint '----------------------------------------\\n'\n\n\n\t# posteriors = model.predict_proba(read_list)\n\t# print model.get_params(deep=True)\n\t# sample = model.sample()\n\t# print [convert_to_letter(n) for n in sample[0]]", "def all_genotype(ploidy):\n return [\"\".join(comb) for comb in cwr(\"ACGT-\", ploidy)]", "def _build_genotypes(self):\n x = np.zeros(self.n)\n \n # Frequencies derived from HWE.\n num_hetero = 2 * self.maf * (1 - self.maf) * self.n\n num_homo_minor = self.maf ** 2 * self.n\n \n x[:num_hetero] = 1\n x[num_hetero:num_hetero+num_homo_minor] = 2\n np.random.shuffle(x)\n \n # Add noise for dosage values if needed.\n if self.dosage_var:\n x[x == 0] += np.abs(\n np.random.normal(0, self.dosage_var, len(x[x == 0]))\n )\n x[x == 1] += np.random.normal(0, self.dosage_var, len(x[x == 1]))\n x[x == 2] -= np.abs(\n np.random.normal(0, self.dosage_var, len(x[x == 2]))\n )\n\n # Mask some values if the call rate is not 1.\n if self.call_rate < 1:\n missing_rate = 1 - self.call_rate\n missing_number = missing_rate * self.n\n missing_idx = np.arange(0, self.n)\n np.random.shuffle(missing_idx)\n missing_idx = missing_idx[:missing_number]\n x[missing_idx] = np.nan\n \n return x", "def base_composition(reads, base):\n assert base.upper() in set(\"ACGT\")\n\n \"\"\" Reports nucelotide frequencies at each position in the\n sam sequences\n \"\"\"\n # DNA_Alphabet=[\"A\",\"C\",\"T\",\"G\",\"N\"]\n all_nucs = []\n for read in reads:\n nucs = {} # Dictionary to store nucleotide data.\n seq = read[9]\n for i in range(0, len(seq)):\n nucs[str(i + 1)] = seq[i]\n all_nucs.append(nucs)\n all_items = []\n counts = []\n for dicts in all_nucs:\n for item in dicts.items():\n all_items.append(item)\n all_items.sort(key=operator.itemgetter(0))\n groups = [map(operator.itemgetter(1), list(group))\n for key, group in itertools.groupby(\n all_items, operator.itemgetter(0))]\n for group in groups:\n counts.append(group.count(base))\n\n pos = range(1, len(seq) + 1)\n\n # Create plot.\n plt.figure(1, figsize=(8, 8))\n plt.axes([0.1, 0.1, 0.8, 0.8])\n plt.bar(pos, counts, facecolor='g')\n plt.xlabel(\"Position\")\n plt.ylabel(\"number of mapped reads\")\n plt.title(base)\n plt.show()", "def test_11(self):\n for _ in range(1000):\n num_types = np.random.randint(1, 10)\n edu_start = np.random.randint(10, 100)\n type_shares = np.random.normal(0, 1, size=num_types * 2)\n\n args = [type_shares, np.array([edu_start])]\n\n py = get_conditional_probabilities(*args)\n fort = fort_debug.wrapper_get_conditional_probabilities(*args + [num_types])\n\n assert_almost_equal(np.sum(py), 1.0)\n assert_almost_equal(py, fort)", "def genotype_probabilities_of(self, member_name):\n # For members already calculated (including the ones with power, who automatically have an aa genotype):\n if self.members[member_name].is_already_processed:\n return self.members[member_name].genotype_probabilities\n\n # Any Asgardian w/o power, and with a parent OR child w/ power, must be genotype Aa.\n if self.any_parent_has_power(member_name) or self.any_child_has_power(member_name):\n return np.array([0, 1, 0])\n\n # Any Asgardian with neither power, nor parents or children with power, has either genotype AA or Aa.\n # If he or she has parents, the probability will depend on genotype probabilities of parents.\n # If not, then 50/50 is assigned.\n if not self.has_parents(member_name):\n return np.array([0.5, 0.5, 0])\n\n parent_genotypes = []\n for parent in self.parents_of(member_name):\n parent_genotypes.append(self.genotype_probabilities_of(parent.name))\n \n probs = np.zeros((3,))\n for i in range(3):\n for j in range(3):\n p = parent_genotypes[0][i]*parent_genotypes[1][j]\n w = WEIGHTS[i, j]\n probs += p * w\n \n # Since we don't have the power, probability of aa is 0:\n probs[2] = 0\n \n # Renormalize:\n probs /= sum(probs)\n \n return probs", "def evaluate(self, test_data):\n log_like = 0\n for sequence in test_data:\n for event, context in self.extract_ngrams(sequence):\n log_like += math.log(self.cond_prob(event, context))\n return log_like", "def processNT(organism, chain, nuc, quals):\n\n ch = chain.lower()\n\n quals = np.array(quals.split('.')).astype(int)\n res = parse_unpaired_dna_sequence_blastn(organism, chain, nuc, info='',\n nocleanup=False, hide_nucseq=False,\n extended_cdr3=True,\n return_all_good_hits=True,\n max_bit_score_delta_for_good_hits=50)\n genes,evalues,status,all_good_hits_with_scores = res\n labels = ['v%s_gene','v%s_rep', 'v%s_mm', 'j%s_gene', 'j%s_rep', 'j%s_mm', 'cdr3%s_plus']\n tmp = {g:v for g,v in zip([lab % ch for lab in labels], genes)}\n tmp.update({'%s_evalue' % k.lower():evalues[k][0] for k in evalues.keys()})\n tmp.update({'%s_bitscore_gap' % k.lower():evalues[k][1] for k in evalues.keys()})\n\n tmp['%s_status' % ch] = 'OK' if not status else status\n tmp['%s_good_hits' % ch] = all_good_hits_with_scores\n\n tmp['cdr3%s' % ch],tmp['cdr3%s_nucseq' % ch] = tmp['cdr3%s_plus' % ch].split('-')\n tmp['cdr3%s_quals' % ch] = get_qualstring( tmp['cdr3%s_plus' % ch], nuc, quals )\n tmp['v%s_mismatches' % ch] = tmp['v%s_mm' % ch][0]\n tmp['j%s_mismatches' % ch] = tmp['j%s_mm' % ch][0]\n tmp['v%s_alignlen' % ch] = np.sum(tmp['v%s_mm' % ch])\n tmp['j%s_alignlen' % ch] = np.sum(tmp['j%s_mm' % ch])\n\n hits = tmp['%s_good_hits' % ch]\n if hits and len(hits) == 2 and hits[0] and hits[1]:\n tmp['v%s_blast_hits' % ch] = ';'.join( '{}:{}'.format(x[0],x[1]) for x in hits[0] )\n tmp['j%s_blast_hits' % ch] = ';'.join( '{}:{}'.format(x[0],x[1]) for x in hits[1] )\n va_genes = util.get_top_genes( tmp['v%s_blast_hits' % ch] ) ## a set\n ja_genes = util.get_top_genes( tmp['j%s_blast_hits' % ch] )\n tmp['v%s_genes' % ch] = ';'.join( sorted( va_genes ) )\n tmp['j%s_genes' % ch] = ';'.join( sorted( ja_genes ) )\n tmp['v%s_reps' % ch] = ';'.join( sorted( util.get_top_reps( tmp['v%s_blast_hits' % ch], organism ) ) )\n tmp['j%s_reps' % ch] = ';'.join( sorted( util.get_top_reps( tmp['j%s_blast_hits' % ch], organism ) ) )\n tmp['v%s_countreps' % ch] = ';'.join( sorted( set( (util.get_mm1_rep_gene_for_counting(x,organism) for x in va_genes ))))\n tmp['j%s_countreps' % ch] = ';'.join( sorted( set( (util.get_mm1_rep_gene_for_counting(x,organism) for x in ja_genes ))))\n\n chain = TCRChain(**tmp)\n return chain", "def basisVar(bases):\n \n var = [] # The ordered list of active coordinates \n k = 0\n for b in bases:\n if np.isscalar(b):\n # A singleton, inactive coordinate\n k += 1\n else:\n # An active basis\n for i in range(b.nd):\n var.append(k)\n k += 1 \n return var", "def log_likelihood_ratios(self, groundtype):\n\t\tif groundtype == 'Foreground':\n\t\t\tLLR = self.foreground['LLR']\n\t\telif groundtype == 'Background':\n\t\t\tLLR = self.background['LLR']\n\t\t\n\t\treturn LLR", "def readGenos(self,genofile):\n self.gen = np.zeros((len(self.ped),len(self.mark)))\n self.gen[:] = np.nan\n marklist = None\n with open(genofile,'r') as fin:\n for line in fin:\n if line.startswith('#'):\n if not marklist: marklist = line.strip('#').strip().split()\n continue\n l = line.strip().split()\n if len(l) < 1: continue\n try: irow = self.ped[l[self.nc]]['rank']\n except KeyError:\n continue\n for i,mark in enumerate(self.marklist):\n if mark not in self.mark: continue\n icol = self.mark[mark]['rank']\n if self.ia == 1:\n a = l[i+self.ic]\n elif self.ia == 2:\n a = self.tbase012(l[i+self.ic],mark)\n elif self.ia == 3:\n a = self.tbase012(l[i*2+self.ic]+l[i*2+1+self.ic],mark)\n if a not in ['0','1','2']: a = np.nan\n else: a = int(a)\n self.gen[irow,icol] = a", "def test_Sobol_G_raises_error_if_values_not_numpy_array():\n fixture = [list(range(8)), str(12345678)]\n for x in fixture:\n evaluate(x)", "def compatibility_g_a(gen, anot):\n print(\"Checking compatibility of genome with annotation file\")\n r_code = 0\n for seq in gen:\n if seq not in anot:\n print(\"WARN\\t{} sequence not found in annotaion file\".format(seq))\n r_code = 1\n for seq in anot:\n if seq not in gen:\n print(\"FAIL\\t{} sequence in annotation \"\n \"but not in genome.\".format(seq))\n r_code = 2\n elif anot[seq] > gen[seq]:\n print(\"FAIL\\tannotation interval on {} sequence is out of \"\n \"reference range.\".format(seq))\n r_code = 2\n print()\n return r_code", "def gen_profile(self):\n return np.array(self['gen'], dtype=np.float32)", "def gen_profile(self):\n return np.array(self['gen'], dtype=np.float32)", "def test_convert_likelihood_roundtrip():\n significance = [1, 3, 5, 10]\n\n for df in [1, 2, 3, 4]:\n # TODO: add tests for `chi2`\n for to in ['probability', 'ts']:\n val = convert_likelihood(to=to, significance=significance, df=df)\n significance2 = convert_likelihood(to='significance', df=df, **{to: val})\n assert_allclose(significance2, significance)", "def simulate_genotype_calls(\n n_variant: int, n_sample: int, p: Tuple[float, float, float], seed: int = 0\n) -> DataArray:\n rs = np.random.RandomState(seed)\n # Draw genotype codes with provided distribution\n gt = np.stack(\n [\n rs.choice([0, 1, 2], size=n_sample, replace=True, p=p)\n for i in range(n_variant)\n ]\n )\n # Expand 3rd dimension with calls matching genotypes\n gt = np.stack([np.where(gt == 0, 0, 1), np.where(gt == 2, 1, 0)], axis=-1)\n return xr.DataArray(gt, dims=(\"variants\", \"samples\", \"ploidy\"))", "def sample(self, like_params):\n\t\tassert len(like_params) == 1, f\"BernoulliLikelihood only takes\" \\\n\t\t\t\t+ f\" a single parameter. Found {len(like_params)}.\"\n\t\t# Unwrap the single parameter tuple.\n\t\tlike_params = like_params[0] # [b,s,m,m_dim]\n\t\tdist = Bernoulli(logits=like_params)\n\t\tsamples = dist.sample()\n\t\treturn (samples,)", "def retr_metr(gdat, indxvaluthis=None, strgvarbthis=None):\n\n metr = np.zeros((gdat.numbepoc, 2, 3 )) - 1\n\n loss = np.empty(gdat.numbepoc)\n numbepocchec = 5\n \n print gdat.modl.summary()\n for y in gdat.indxepoc:\n print 'Training epoch %d...' % y\n histinpt = gdat.inpttran[:, :, None]\n hist = gdat.modl.fit(histinpt, gdat.outptran, epochs=1, batch_size=gdat.numbdatabtch, verbose=1)\n loss[y] = hist.history['loss'][0]\n indxepocloww = max(0, y - numbepocchec)\n \n for layr in gdat.modl.layers:\n func = keras.backend.function([gdat.modl.input, keras.backend.learning_phase()], [layr.output])\n \n listweigbias = layr.get_weights()\n #assert len(listweigbias) == 2\n print 'listweigbias'\n for n in range(len(listweigbias)):\n print 'n'\n print n\n print 'listweigbias[n]'\n summgene(listweigbias[n])\n stat = func([histinpt, 1.])\n print 'type(stat)'\n print type(stat)\n print 'len(stat)'\n print len(stat)\n for n in range(len(stat)):\n print 'stat[n]'\n summgene(stat[n])\n print\n print\n\n\n if y == gdat.numbepoc - 1 and 100. * (loss[indxepocloww] - loss[y]):\n print 'Warning! The optimizer may not have converged.'\n print 'loss[indxepocloww]\\n', loss[indxepocloww], '\\nloss[y]\\n', loss[y], '\\nloss\\n', loss\n\n for r in gdat.indxrtyp:\n if r == 0:\n inpt = gdat.inpttran\n outp = gdat.outptran\n numdatatemp = gdat.numbdatatran\n else:\n inpt = gdat.inpttest\n outp = gdat.outptest\n numbdatatemp = gdat.numbdatatest\n inpt = inpt[:, :, None]\n \n outppredsigm = gdat.modl.predict(inpt)\n outppred = (outppredsigm > 0.5).astype(int)\n matrconf = confusion_matrix(outp, outppred)\n if matrconf.size == 1:\n matrconftemp = np.copy(matrconf)\n matrconf = np.empty((2, 2))\n matrconf[0, 0] = matrconftemp\n trne = matrconf[0, 0]\n flpo = matrconf[0, 1]\n flne = matrconf[1, 0]\n trpo = matrconf[1, 1]\n \n if float(trpo + flpo) > 0:\n metr[y, r, 0] = trpo / float(trpo + flpo) # precision\n else:\n pass\n #print ('No positive found...')\n #raise Exception('')\n metr[y, r, 1] = float(trpo + trne) / (trpo + flpo + trne + flne) # accuracy\n if float(trpo + flne) > 0:\n metr[y, r, 2] = trpo / float(trpo + flne) # recall\n else:\n print 'No relevant sample!'\n #raise Exception('')\n \n print 'metr[y, r, :]'\n print metr[y, r, :]\n print \n return metr", "def calc_genotype(self, arch_param):\n\n def _parse(weights, genos):\n gene = []\n n = 2\n start = 0\n for i in range(self.steps):\n end = start + n\n W = weights[start:end].copy()\n G = genos[start:end].copy()\n edges = sorted(range(i + 2),\n key=lambda x: -max(W[x][k] for k in range(len(W[x])) if G[x][k] != 'none'))[:2]\n for j in edges:\n k_best = None\n for k in range(len(W[j])):\n if G[j][k] != 'none':\n if k_best is None or W[j][k] > W[j][k_best]:\n k_best = k\n gene.append([G[j][k_best], i + 2, j])\n start = end\n n += 1\n return gene\n\n normal_param = np.array(self.darts_cfg.super_network.normal.genotype)\n reduce_param = np.array(self.darts_cfg.super_network.reduce.genotype)\n geno_normal = _parse(arch_param[0], normal_param[:, 0])\n geno_reduce = _parse(arch_param[1], reduce_param[:, 0])\n return [geno_normal, geno_reduce]", "def random_strings(sequence, GC_array):\r\n\r\n AT = 0\r\n GC = 0\r\n\r\n for nt in sequence:\r\n if nt == \"A\" or nt == \"T\":\r\n AT += 1\r\n elif nt == \"G\" or nt == \"C\":\r\n GC += 1\r\n\r\n probabilities = []\r\n\r\n #Calculate probability of G = probability of C = %GC / 2\r\n #Calculate probability of A = probability of T = (1 - %GC) / 2\r\n\r\n #For each consecutive base in provided sequence:\r\n #1. Convert total probability to logarithm using math.log(probability, base=10)\r\n #2. Total probability to be multiplied by probability of specifically that base\r\n\r\n for i in range(len(GC_array)):\r\n prob = (AT * math.log10((1 - GC_array[i])/2)) + (GC * math.log10(GC_array[i]/2))\r\n\r\n probabilities.append('%0.3f' % prob)\r\n\r\n print(*probabilities, sep= \" \")", "def get_IOUs_enhanced(annotations, predictions, n_classes, consider_class = True):\n NUMBER_OF_BBOX = annotations.shape[-2]\n obj_indexes = np.where(annotations[:,:,:,:,0] == 1)\n annotated_bboxes = annotations[obj_indexes][:][:,n_classes+1:n_classes+1+4]\n bboxes_iou = np.zeros([annotated_bboxes.shape[0], NUMBER_OF_BBOX])\n annotated_class = np.argmax(annotations[obj_indexes][:][:, 1: 1 + n_classes], axis = -1)\n bboxes_indexes = (*obj_indexes[0:-1],)\n predictions_filtered = predictions[bboxes_indexes]\n for i in range(NUMBER_OF_BBOX):\n predicted_data = predictions_filtered[:,i,:]\n predicted_bboxes = predicted_data[:,n_classes+1:n_classes+1+4]\n predicted_class = np.argmax(predicted_data[:, 1: 1 + n_classes], axis = -1)\n predicted_class_prob = np.max(softmax(predicted_data[:, 1: 1 + n_classes]), axis = -1)\n predicted_obj_prob = sigmoid(predicted_data[:, 0])\n IOUs, _ = getIUO(annotated_bboxes, \n predicted_bboxes, \n from_center_to_box=True)\n if consider_class:\n bboxes_iou[:,i] = IOUs * (predicted_class == annotated_class) # * predicted_obj_prob * predicted_class_prob\n else:\n bboxes_iou[:,i] = IOUs\n best_bbox_idxs = np.argmax(bboxes_iou, axis = 1)\n best_bbox_ious = np.max(bboxes_iou, axis = 1)\n return np.mean(best_bbox_ious)", "def test_Gaussian_NB_estimators():", "def samples(self, gp):\r\n raise NotImplementedError", "def get_likelihoods(self, alleles):\n\n l = len(alleles)\n if l==2:\n result = self.likelihoods2(alleles)\n elif l==3:\n result = self.likelihoods3(alleles)\n elif l==4:\n result = self.likelihoods4(alleles)\n elif l==5:\n result = self.likelihoods5(alleles)\n else:\n result = self.likelihoods(alleles)\n return result", "def observation(self):\n # This consists of two things:\n # - the measured bitstrings\n # - the vectorized representation of the optimization problem\n #\n # In particular, the first 10*NUM_SHOTS (i.e. 100) entries are measured\n # qubit values. The remaining entries are the weights of the problem\n # graph.\n return np.concatenate([self.bitstrings.flatten(), self._prob_vec])", "def Likeli(data, dist, limits,**kwargs):\n n = len(data) # Number of data points\n data = array(data)\n (ll,ul) = limits #limits for the parameter space\n step = (ul-ll)/1024.\n \n if dist == 'normal': # In this case, L is a function of the mean. SD is set to the SD(data)\n sd = std(data) #standard deviation of data\n prec = 1/sd #precision of the data\n res = array([exp(like.Normal(data,mu,prec)) for mu in arange(ll,ul,step)]) \n lik = res/max(res) # Likelihood function \n print max(lik), min(lik)\n elif dist == 'exponential':\n res = [lamb**n*exp(-lamb*sum(data)) for lamb in arange(ll,ul,step)]\n lik = array(res)/max(array(res))\n \n elif dist == 'bernoulli':\n if ll<0 or ul>1:\n print \"Parameter p of the bernoulli is out of range[0,1]\"\n res = [exp(like.Bernoulli(data,p)) for p in arange(ll,ul,step)]\n lik = array(res)/max(array(res))\n \n elif dist == 'poisson':\n res = [exp(like.Poisson(data,lb)) for lb in arange(ll,ul,step)]\n lik = array(res)/max(array(res))\n \n elif dist == 'lognormal':\n sd = std(data) #standard deviation of data\n prec = 1/sd #precision of the data\n res = [exp(like.Lognormal(data,mu,prec)) for mu in arange(ll,ul,step)]\n lik = array(res)/max(array(res)) \n else:\n print 'Invalid distribution type. Valid distributions: normal, exponential, bernoulli and poisson'\n return lik", "def get_prob_l_is_x_strict(self, obj_type):\n ret_probs = []\n\n for i in range(len(self.obj_locations[0])):\n name = self.obj_names[i]\n if name == obj_type:\n myprob = 1.0\n else:\n myprob = 1e-12\n\n ret_probs.append(myprob)\n return ret_probs", "def get_ngramlogprobs_fromcorpus(tokenizedseqs, n):\n return", "def __len__(self):\n return len(self.samples)", "def log_likelihood(self, x):\n # set nuisance parameters to their central values!\n predictions = self.get_predictions(self.shortarray_to_array(x), nuisance=False)\n m_obj = flavio.Measurement['Pseudo-measurement for FastFit instance: ' + self.name]\n m_obs = m_obj.all_parameters\n prob_dict = m_obj.get_logprobability_all(predictions)\n ll = sum(prob_dict.values())\n return ll", "def eval_exome(sample_name, snpchip_rs2alleles_normal, snpchip_rs2alleles_cancer):\n\n bin = 10\n data_dir = os.path.join('data/all_non_ref_hg19/',\n sample_name)\n mutation_calls = call_class.calls(data_dir, sample_name).data\n evals = {}\n for exome_type in mutation_calls:\n evals[exome_type] = {'consensus_quality':{}}\n# 'gentrain':{}}\n #'min_both':{}}\n for chrpos in mutation_calls[exome_type]:\n if 'snp' in mutation_calls[exome_type][chrpos]:\n snp = mutation_calls[exome_type][chrpos]['snp']\n if snp in snpchip_rs2alleles_normal or snp in snpchip_rs2alleles_cancer:\n if snp in snpchip_rs2alleles_normal:\n chip_calls_normal, gentrain_normal = snpchip_rs2alleles_normal[snp]\n else:\n chip_calls_normal, gentrain_normal = ('NA', float(0))\n if snp in snpchip_rs2alleles_cancer:\n chip_calls_cancer, gentrain_cancer = snpchip_rs2alleles_cancer[snp]\n else:\n chip_calls_cancer, gentrain_cancer = ('NA', float(0))\n normal_call = mutation_calls[exome_type][chrpos]['N']['call']\n cancer_call = mutation_calls[exome_type][chrpos]['T']['call']\n normal_quality = utils.my_round(mutation_calls[exome_type][chrpos]['N']['consensus_quality'], bin)\n cancer_quality = utils.my_round(mutation_calls[exome_type][chrpos]['T']['consensus_quality'], bin)\n # gentrain_quality_normal = utils.my_round(gentrain_normal, bin)\n # gentrain_quality_cancer = utils.my_round(gentrain_cancer, bin)\n # min_both_normal_quality = utils.my_round(min([gentrain, normal_quality]), bin)\n # min_both_cancer_quality = utils.my_round(min([gentrain, cancer_quality]), bin)\n for (a_quality, a_sample, \n a_quality_type, a_gentrain) in ((normal_quality, 'N', 'consensus_quality', gentrain_normal),\n (cancer_quality, 'T', 'consensus_quality', gentrain_cancer)):\n #(gentrain_quality, 'N', 'gentrain'),\n #(gentrain_quality, 'T', 'gentrain')):\n # (min_both_normal_quality, 'N', 'min_both'),\n # (min_both_cancer_quality, 'T', 'min_both')):\n if a_gentrain > float(90):\n if a_sample not in evals[exome_type][a_quality_type]:\n evals[exome_type][a_quality_type][a_sample] = {}\n if a_quality not in evals[exome_type][a_quality_type][a_sample]:\n evals[exome_type][a_quality_type][a_sample][a_quality] = [0,0]\n\n # if 'N' not in evals[exome_type]['consensus_quality']:\n # evals[exome_type]['consensus_quality']['N'] = {}\n # if normal_quality not in evals[exome_type]['consensus_quality']['N']:\n # evals[exome_type]['consensus_quality']['N'][normal_quality] = [0,0]\n\n # if 'T' not in evals[exome_type]['consensus_quality']:\n # evals[exome_type]['consensus_quality']['T'] = {}\n # if cancer_quality not in evals[exome_type]['T']:\n # evals[exome_type]['consensus_quality']['T'][cancer_quality] = [0,0]\n res = yusik_cmp_snpchip_exomeseq.check_nuc(chip_calls_normal, \n normal_call, snp)\n if res != 'NA' and gentrain_normal > float(90):\n if res:\n evals[exome_type]['consensus_quality']['N'][normal_quality][0] += 1\n #evals[exome_type]['gentrain']['N'][gentrain_quality][0] += 1\n #evals[exome_type]['min_both']['N'][min_both_normal_quality][0] += 1\n else:\n evals[exome_type]['consensus_quality']['N'][normal_quality][1] += 1\n #evals[exome_type]['gentrain']['N'][gentrain_quality][1] += 1\n #evals[exome_type]['min_both']['N'][min_both_normal_quality][1] += 1\n res = yusik_cmp_snpchip_exomeseq.check_nuc(chip_calls_cancer, \n cancer_call, snp)\n if res != 'NA' and gentrain_cancer > float(90):\n if res:\n evals[exome_type]['consensus_quality']['T'][cancer_quality][0] += 1\n #evals[exome_type]['gentrain']['T'][gentrain_quality][0] += 1\n #evals[exome_type]['min_both']['T'][min_both_cancer_quality][0] += 1\n else:\n evals[exome_type]['consensus_quality']['T'][cancer_quality][1] += 1\n #evals[exome_type]['gentrain']['T'][gentrain_quality][1] += 1\n #evals[exome_type]['min_both']['T'][min_both_cancer_quality][1] += 1\n\n tmpr = 'tmpr' + str(random.randint(0,1000))\n with open(tmpr, 'w') as f:\n f.write('Exome\\tQuality_type\\tSample\\tQuality\\tPercent_Match\\n')\n for exome_type in evals:\n for quality_type in evals[exome_type]:\n for sample_type in evals[exome_type][quality_type]:\n qualities = evals[exome_type][quality_type][sample_type].keys()\n qualities.sort()\n qualities.reverse()\n sums = [0,0]\n for q in qualities:\n sums[0] += evals[exome_type][quality_type][sample_type][q][0]\n sums[1] += evals[exome_type][quality_type][sample_type][q][1]\n f.write('%s\\t%s\\t%s\\t%.2f\\t%.2f\\n' %\n (exome_type, quality_type, sample_type, q, \n float(100)*float(sums[0])/float(sum(sums))))\n tmpR = 'tmprinput' + str(random.randint(0,1000))\n with open(tmpR, 'w') as f:\n f.write(\"source('funcs.R')\\n\")\n f.write(\"png('plots/yuiri_exome_chip_cmp.png')\\n\")\n f.write(\"snpchip_exome_cmp('\" + tmpr + \"')\\n\")\n f.write('dev.off()\\n')\n f.write('q()\\n')\n os.system('R CMD BATCH --vanilla ' + tmpR + ' tmpLog')\n os.system('rm tmpLog ' + tmpR + ' ' + tmpr)", "def __init__(self):\n BaseType.__init__(self)\n self.type = self.__class__.__name__\n self.name = self.__class__.__name__\n self.acceptsProbability = False #If True the metric needs to be able to handle (value,probability) where value and probability are lists\n self.acceptsDistribution = False #If True the metric needs to be able to handle a passed in Distribution", "def fromgenotype(self):\n\t\tpass", "def read_gmm(read):\n\tmodel = mixture.GMM(n_components=8)\n\tread_main = read.get_read()\n\talignments = read.get_alignments()\n\n\t# Generates observations\n\t# bases are converted to their ascii character values\n\tread_list = [ord(c) for c in read_main]\n\tobservations = [ ]\n\tfor alignment in alignments:\n\t\talignment_list = [ord(c) for c in alignment] \n\t\tobservations.append( alignment_list )\n\t# for base_index, base in enumerate(read_main):\n\t# \tbase_observations = [ord(base)]\n\t# \tfor alignment in alignments:\n\t# \t\tbase_observations.append(ord(alignment[base_index]))\n\n\t# \tobservations.append(base_observations)\n\n\tprint model.fit(observations)\n\tprint np.round(model.means_, 2)\n\t\n\tprint model.predict([read_list])", "def freq_to_genotype(pL,sL,er):\n h = sum(pL) # number of different haplotypes\n L = [ bin(x)[2:] for x in range(1,2**h-1) ] # range from 1 to 2**h-1 because we don't want 0% or 100% allele freq\n M = [ '0'*(len(L[-1])-len(x))+x for x in L ]\n p_freqL = []\n for i in range(len(pL)):\n p_freqL += [sL[i]/pL[i]]*pL[i]\n p_freqA = np.array(p_freqL)\n sA = np.array(sL)\n aD = {} # dict where each key is an expected alternate allele frequency and each value is a list of genotypes consistent with this alternate allele frequency\n for g in M:\n alt_freq = sum(np.array([ int(x) for x in list(g) ])*p_freqL)\n if aD.has_key(alt_freq):\n aD[alt_freq].append(g)\n else:\n aD[alt_freq] = [g]\n aD[er] = ['0'*(len(L[-1])-1) + bin(0)[2:]] # add genotype for 0% alternate allele freq\n aD[1-er] = [bin(2**h-1)[2:]] # add genotype for 100% alternate allele freq\n return aD", "def alleles(gt1, gt2):\n\n\t# Count genotypes 1 and 2\n\tcount = {}\n\tfor g in gt1 :\n\t\tcount[g] = count.get(g, 0) + 1\n\n\tfor g in gt2 :\n\t\tcount[g] = count.get(g, 0) + 1\n\n\t# Find mayor allele (we call it 'ref')\n\tmaxCount = 0\n\tref = ''\n\tfor g in count:\n\t\tif count[g] > maxCount:\n\t\t\tmaxCount = count[g]\n\t\t\tref = g\n\n\t# Find minor allele (we call these one 'alt')\n\talt = ''\n\tfor g in count:\n\t\tif g != ref and g != '0': alt = g\n\n\t# Create a genotype string (VCF style)\n\tgtstr = \"\"\n\tfor i in range(len(gt1)):\n\t\tgtstr += \"\\t\" + gtVcf(ref, alt, gt1[i]) + \"/\" + gtVcf(ref, alt, gt2[i])\n\n\treturn ref, alt, count[alt], gtstr", "def Log_OB(xref,x):\n\n nX = np.shape(x)\n\n m = nX[0]\n n = nX[1]\n t = nX[2]\n\n G = np.zeros((m,n,t))\n\n for r in range(t):\n\n # Correct for permuations\n\n Xout,PiA= CorrectPerm(xref,x[:,:,r])\n\n G[:,:,r] = Xout - np.dot(xref,np.dot(PiA,Xout))\n\n return G", "def blen_read_geom_array_setattr(generator, blen_data, blen_attr, fbx_data, stride, item_size, descr, xform):\n max_idx = len(blen_data) - 1\n print_error = True\n\n def check_skip(blen_idx, fbx_idx):\n nonlocal print_error\n if fbx_idx < 0: # Negative values mean 'skip'.\n return True\n if blen_idx > max_idx:\n if print_error:\n print(\"ERROR: too much data in this layer, compared to elements in mesh, skipping!\")\n print_error = False\n return True\n return False\n\n if xform is not None:\n if isinstance(blen_data, list):\n if item_size == 1:\n def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):\n blen_data[blen_idx] = xform(fbx_data[fbx_idx])\n else:\n def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):\n blen_data[blen_idx] = xform(fbx_data[fbx_idx:fbx_idx + item_size])\n else:\n if item_size == 1:\n def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):\n setattr(blen_data[blen_idx], blen_attr, xform(fbx_data[fbx_idx]))\n else:\n def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):\n setattr(blen_data[blen_idx], blen_attr, xform(fbx_data[fbx_idx:fbx_idx + item_size]))\n else:\n if isinstance(blen_data, list):\n if item_size == 1:\n def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):\n blen_data[blen_idx] = fbx_data[fbx_idx]\n else:\n def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):\n blen_data[blen_idx] = fbx_data[fbx_idx:fbx_idx + item_size]\n else:\n if item_size == 1:\n def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):\n setattr(blen_data[blen_idx], blen_attr, fbx_data[fbx_idx])\n else:\n def _process(blend_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx):\n setattr(blen_data[blen_idx], blen_attr, fbx_data[fbx_idx:fbx_idx + item_size])\n\n for blen_idx, fbx_idx in generator:\n if check_skip(blen_idx, fbx_idx):\n continue\n _process(blen_data, blen_attr, fbx_data, xform, item_size, blen_idx, fbx_idx)", "def _magsamples(self):\n if self._derived_properties[\"magsamples\"] is None:\n if self.lbda is None:\n raise AttributeError(\"lbda not set.\")\n self.derive_magsamples()\n \n return self._derived_properties[\"magsamples\"]", "def generate_bio(name, ocupation, threshold=0):\n\n\tgen_bio = ''\n\tbio_score = -1\n\n\twith gpt2.start_tf_sess() as sess:\n\t\tgpt2.load_gpt2(sess, run_name='old_dates')\n\n\t\t# guarantee valid threshold value\n\t\tif threshold < 0:\n\t\t\tthreshold = 0\n\t\tif threshold >= 1:\n\t\t\t# default to classifier's threshold\n\t\t\tthreshold = 0.5\n\n\t\tprompt = name.title() + ' was a Venetian ' + ocupation.lower() + '. [SEP] '\n\t\t\n\t\twhile bio_score < threshold:\n\t\t\tgen_bio = gpt2_generator.generate_fake_bio(prompt, sess)\n\n\t\t\ts_df = bert_evaluator.evaluate_text(gen_bio)\n\t\t\trealness_vals = s_df['prob_real'].values \n\t\t\tbio_score = np.min(realness_vals)\n\n\t# return tuple (str generated_bio, float confidence)\n\treturn gen_bio, bio_score", "def count_all_bases(sequence):\n # create a set of bases\n bases = set(sequence)\n all_bases = defaultdict(int)\n # iterates in the base set\n for base in bases:\n # count the bases in the sequence\n all_bases[base] = sequence.count(base)\n return all_bases", "def prob(sequences, structure, ordering = None, material = 'rna',\n dangles = 'some', T = 37, multi = True, pseudo = False,\n sodium = 1.0, magnesium = 0.0):\n \n ## Set up command-line arguments and input\n args, cmd_input = \\\n setup_nupack_input(exec_name = 'prob', sequences = sequences, ordering = ordering,\n structure = structure, material = material,\n sodium = sodium, magnesium = magnesium,\n dangles = dangles, T = T, multi = multi, pseudo = pseudo)\n \n ## Perform call\n output, error = call_with_pipe(args, cmd_input)\n\n ## Parse and return output\n if output[-3] != \"% Probability:\" :\n raise ValueError('NUPACK output parsing problem')\n\n return float(output[-2])", "def sample(self, like_params):\n\t\t# Transpose first two dimensions of like_params.\n\t\tlike_params = tuple(tuple(p) for p in zip(*like_params))\n\t\tgen = zip(self.likelihoods,like_params)\n\t\treturn tuple(like.sample(p)[0] for like, p in gen)", "def mixed_prob( means,stds,weights,validt):", "def bio_prop(self, id: int):\n #pocket\n path_protein, _ = self._get_path(id)\n protein_name = self.files_refined[id]\n mol = Molecule(path_protein)\n mol.filter('protein')\n mol = prepareProteinForAtomtyping(mol, verbose = False)\n\n features = getChannels(mol, version=2)\n features = (features[0] > 0).astype(np.float32)\n features = np.asarray(features[:, :-1])\n # print(\"feat shape bio - \", features.shape)\n return features", "def get_gaba_examples_supp(self):\n return np.broadcast_to(\n self.get_t_vec(),\n self.gaba_examples[self.gaba_examples.keys()[0]].shape,\n )", "def get_likelihood(\n self,\n qb,\n inv_fish,\n map_tag=None,\n null_first_cmb=False,\n lmin=33,\n lmax=250,\n mcmc=True,\n alpha_tags=[\"95\", \"150\"],\n beam_tags=[\"95\", \"150\"],\n r_prior=[0, np.inf],\n alpha_prior=[0, np.inf],\n res_prior=None,\n beam_prior=[0, 1],\n betad_prior=[0, 1],\n dust_amp_prior=[0, np.inf],\n dust_ellind_prior=[0, 1],\n num_walkers=50,\n num_steps=20000,\n converge_criteria=0.01,\n reset_backend=None,\n file_tag=None,\n ):\n\n for x in [\n r_prior,\n alpha_prior,\n res_prior,\n beam_prior,\n betad_prior,\n dust_amp_prior,\n dust_ellind_prior,\n ]:\n if x is not None:\n x[:] = [float(x[0]), float(x[1])]\n\n save_name = \"like_mcmc\"\n if not mcmc:\n alpha_prior = None\n res_prior = None\n beam_prior = None\n betad_prior = None\n dust_amp_prior = None\n dust_ellind_prior = None\n\n # no template cleaning if there aren't any templates specified\n if not getattr(self, \"template_cleaned\", False):\n alpha_prior = None\n\n # null out unused priors\n self.template_alpha = getattr(self, \"template_alpha\", None)\n if self.template_alpha is None or all(\n [x is None for x in self.template_alpha.values()]\n ):\n alpha_prior = None\n\n # count alpha parameters to fit\n alpha_tags = [x for x in alpha_tags if x in self.map_tags_orig]\n if not len(alpha_tags):\n alpha_prior = None\n\n num_alpha = 0\n if alpha_prior is not None:\n num_alpha = len(alpha_tags)\n\n # count beam parameters to fit\n beam_tags = [x for x in beam_tags if x in self.map_tags_orig]\n if not len(beam_tags):\n beam_prior = None\n\n num_beam = 0\n if beam_prior is not None:\n num_beam = len(beam_tags)\n\n if not any([k.startswith(\"res_\") for k in qb]):\n res_prior = None\n\n if np.any(\n [\n betad_prior is not None,\n dust_amp_prior is not None,\n dust_ellind_prior is not None,\n ]\n ):\n dust_ell_fit = True\n else:\n dust_ell_fit = False\n\n # bookkeeping: ordered priors\n priors = {\n \"r_prior\": r_prior,\n \"alpha_prior\": alpha_prior,\n \"res_prior\": res_prior,\n \"beam_prior\": beam_prior,\n \"betad_prior\": betad_prior,\n \"dust_amp_prior\": dust_amp_prior,\n \"dust_ellind_prior\": dust_ellind_prior,\n }\n # priors on quantities that affect Dmat_obs or gmat (precalculated)\n obs_priors = [alpha_prior]\n\n # check parameter space\n if all([x is None for x in priors.values()]):\n raise RuntimeError(\"Empty parameter space\")\n\n out = dict(\n r_prior=r_prior,\n alpha_prior=alpha_prior,\n res_prior=res_prior,\n beam_prior=beam_prior,\n betad_prior=betad_prior,\n dust_amp_prior=dust_amp_prior,\n dust_ellind_prior=dust_ellind_prior,\n alpha_tags=alpha_tags,\n num_walkers=num_walkers,\n null_first_cmb=null_first_cmb,\n apply_gcorr=self.apply_gcorr,\n weighted_bins=self.weighted_bins,\n lmin=lmin,\n lmax=lmax,\n )\n\n if mcmc and reset_backend is None:\n ret = self.load_data(\n save_name,\n \"likelihood\",\n bp_opts=True,\n to_attrs=False,\n map_tag=map_tag,\n value_ref=out,\n extra_tag=file_tag,\n )\n if ret is not None and ret.get(\"converged\", False):\n if converge_criteria >= ret.get(\"converge_criteria\", 0.01):\n return ret\n if ret is not None:\n for pname, pval in priors.items():\n if np.all(pval != ret.get(pname, None)):\n ret = None\n # clear chain cache if rerunning, otherwise append to chain by default\n reset_backend = ret is None\n\n out.update(converge_criteria=converge_criteria)\n\n # save state\n if mcmc and reset_backend:\n self.save_data(\n save_name, map_tag=map_tag, extra_tag=file_tag, bp_opts=True, **out\n )\n\n # clear pre-computed quantities\n self.clear_precalc()\n use_precalc = all([x is None for x in obs_priors])\n\n cls_input, cls_noise, cls_debias = self.get_data_spectra()\n\n # extract residual bins, ignoring bins outside of lmin/lmax\n if res_prior is not None:\n bin_def_orig = copy.deepcopy(self.bin_def)\n nbins_res_orig = self.nbins_res\n qb_res = OrderedDict()\n num_res = 0\n for k in list(qb):\n if k.startswith(\"res_\"):\n bd = self.bin_def[k]\n good = np.where((bd[:, 1] > lmin) & (bd[:, 0] < lmax))[0]\n # use all qb res in range lmin, lmax\n self.bin_def[k] = bd[good]\n v = qb.pop(k)[good]\n num_res += len(v)\n\n # use average qb res in good range per map\n # self.bin_def[k] = np.array([[lmin, lmax + 1]])\n # v = np.array([(qb.pop(k)[good]).mean()])\n # num_res += 1\n qb_res[k] = v\n self.nbins_res = num_res\n\n # set CMB model bandpowers to unity, since we are computing\n # the likelihood of this model given the data\n if r_prior is None:\n self.log(\"Computing model spectrum\", \"debug\")\n self.warn(\"Beam variation not implemented for case of no r fit\")\n cbl = self.bin_cl_template(map_tag=map_tag)\n cls_model = self.get_model_spectra(qb, cbl, delta=True, cls_noise=cls_noise)\n else:\n qb = copy.deepcopy(qb)\n for spec in self.specs:\n stags = [\"cmb_{}\".format(spec), \"fg_{}\".format(spec)]\n for stag in stags:\n if stag not in qb:\n continue\n qb[stag] = np.ones_like(qb[stag])\n\n self.log(\"Computing r model spectrum\", \"debug\")\n cls_shape_scalar = self.get_signal_shape(\n r=1.0, save=False, component=\"scalar\"\n )\n\n cls_shape_tensor = self.get_signal_shape(\n r=1.0, save=False, component=\"tensor\"\n )\n\n # load tensor and scalar terms separately\n cbl_scalar = self.bin_cl_template(cls_shape_scalar, map_tag)\n cls_model_scalar = self.get_model_spectra(\n qb, cbl_scalar, delta=True, cls_noise=cls_noise\n )\n cbl_tensor = self.bin_cl_template(cls_shape_tensor, map_tag)\n cls_model_tensor = self.get_model_spectra(\n qb, cbl_tensor, delta=False, res=False\n )\n if beam_prior is not None:\n # load beam error term for tensor and scalar\n cbl_scalar_beam = self.bin_cl_template(\n cls_shape_scalar, map_tag, beam_error=True\n )\n cls_mod_scal_beam = self.get_model_spectra(\n qb, cbl_scalar_beam, delta=True, res=False\n )\n cbl_tensor_beam = self.bin_cl_template(\n cls_shape_tensor, map_tag, beam_error=True\n )\n cls_mod_tens_beam = self.get_model_spectra(\n qb, cbl_tensor_beam, delta=False, res=False\n )\n\n # load foreground shape\n if dust_ell_fit:\n cls_shape_dust = self.get_signal_shape(save=False, component=\"fg\")\n # if dust_ellind_prior is None:\n # # can preload shape since not varying ell index\n cbl_fg = self.bin_cl_template(cls_shape_dust, map_tag=map_tag)\n if beam_prior is not None:\n cbl_fg_beam = self.bin_cl_template(\n cls_shape_dust, map_tag, beam_error=True\n )\n\n cbl = copy.deepcopy(cbl_scalar)\n cls_model = copy.deepcopy(cls_model_scalar)\n\n # XXX TODO\n # how to marginalize over the garbage bin?\n\n def parse_params(theta):\n \"\"\"\n Parse array of parameters into a dict\n \"\"\"\n params = {}\n if r_prior is not None:\n params[\"r\"] = theta[0]\n theta = theta[1:]\n if alpha_prior is not None:\n params[\"alpha\"] = theta[:num_alpha]\n theta = theta[num_alpha:]\n if res_prior is not None:\n params[\"res\"] = theta[:num_res]\n theta = theta[num_res:]\n if beam_prior is not None:\n params[\"beam\"] = theta[:num_beam]\n theta = theta[num_beam:]\n if betad_prior is not None:\n params[\"betad\"] = theta[0]\n theta = theta[1:]\n if dust_amp_prior is not None:\n # param for ee and bb\n params[\"dust_amp\"] = theta[:2]\n theta = theta[2:]\n if dust_ellind_prior is not None:\n params[\"dust_ellind\"] = theta[0]\n theta = theta[1:]\n if len(theta):\n raise ValueError(\"Too many parameters to parse\")\n return params\n\n def log_prior(\n r=None,\n alpha=None,\n res=None,\n beam=None,\n betad=None,\n dust_amp=None,\n dust_ellind=None,\n ):\n \"\"\"\n Log prior function constructed from input options\n \"\"\"\n values = {\n \"r_prior\": r,\n \"alpha_prior\": alpha,\n \"res_prior\": res,\n \"dust_amp_prior\": dust_amp,\n }\n for v, pval in values.items():\n prior = priors[v]\n if pval is not None and prior is not None:\n if np.any(pval < prior[0]) or np.any(pval > prior[1]):\n return -np.inf\n\n values_gauss = {\n \"beam_prior\": beam,\n \"betad_prior\": betad,\n \"dust_ellind_prior\": dust_ellind,\n }\n # for beam and betad, use gaussian prior\n log_prob = 0.0\n for v, pval in values_gauss.items():\n prior = priors[v]\n if pval is not None and prior is not None:\n pval = np.atleast_1d(pval)\n norm = np.log(1.0 / (prior[1] * np.sqrt(2 * np.pi)))\n chi = (pval - prior[0]) / prior[1]\n log_prob += np.sum(norm - chi ** 2 / 2.0)\n\n return log_prob\n\n def log_like(\n r=None,\n alpha=None,\n res=None,\n beam=None,\n betad=None,\n dust_amp=None,\n dust_ellind=None,\n ):\n \"\"\"\n Log likelihood function constructed from input options\n \"\"\"\n cls_model0 = copy.deepcopy(cls_model)\n\n # compute new template subtracted data spectra\n if alpha is None:\n clsi = cls_input\n else:\n self.get_masked_data(template_alpha=OrderedDict(zip(alpha_tags, alpha)))\n clsi = self.get_data_spectra(do_noise=False)\n\n if beam is not None:\n beam = dict(zip(beam_tags, beam))\n beam_coeffs = dict()\n for xname, (m0, m1) in self.map_pairs_orig.items():\n d = {}\n b0, b1 = [beam.get(m, None) for m in (m0, m1)]\n if b0 is not None:\n d[\"b1\"] = b0\n if b1 is not None:\n d[\"b2\"] = b1\n if b0 is not None:\n d[\"b3\"] = b0 * b1\n beam_coeffs[xname] = d\n\n # compute new signal shape by scaling tensor component by r\n if r is not None:\n for stag, d in cls_model0.items():\n comp, spec = stag.split(\"_\", 1)\n if spec not in [\"ee\", \"bb\"] or comp not in [\"cmb\", \"total\"]:\n continue\n ctag = \"cmb_{}\".format(spec)\n for xname, dd in d.items():\n dd[:] = (\n cls_model_scalar[stag][xname]\n + r * cls_model_tensor[ctag][xname]\n )\n\n if beam is None:\n continue\n beam_term = 0\n for bn, bc in beam_coeffs[xname].items():\n beam_term += bc * (\n cls_mod_scal_beam[ctag][xname][bn]\n + r * cls_mod_tens_beam[ctag][xname][bn]\n )\n dd[:] += beam_term\n\n elif beam is not None:\n for stag, d in cls_model0.items():\n comp, spec = stag.split(\"_\", 1)\n if spec not in [\"ee\", \"bb\"] or comp not in [\"cmb\", \"total\"]:\n continue\n ctag = \"cmb_{}\".format(spec)\n for xname, dd in d.items():\n beam_term = 0\n for bn, bc in beam_coeffs[xname].items():\n beam_term += bc * cls_mod_scal_beam[ctag][xname][bn]\n dd[:] = cls_model_scalar[stag][xname] + beam_term\n\n # fg term, including beam modifications. Because mix terms are\n # dependent on dust amp, get model specs here.\n if dust_ell_fit:\n if dust_amp is None:\n qb[\"fg_ee\"][:] = 1\n qb[\"fg_bb\"][:] = 1\n else:\n qb[\"fg_ee\"][:] = dust_amp[0]\n qb[\"fg_bb\"][:] = dust_amp[1]\n if betad is None:\n qb[\"delta_beta\"][:] = 0\n else:\n qb[\"delta_beta\"][:] = betad\n if dust_ellind is not None:\n cbl_fg0 = self.bin_cl_template(\n cls_shape_dust, map_tag=map_tag, fg_ell_ind=dust_ellind\n )\n if beam is not None:\n cbl_fg_beam0 = self.bin_cl_template(\n cls_shape_dust,\n map_tag,\n fg_ell_ind=dust_ellind,\n beam_error=True,\n )\n else:\n cbl_fg0 = cbl_fg\n if beam is not None:\n cbl_fg_beam0 = cbl_fg_beam\n\n cls_model_fg = self.get_model_spectra(\n qb, cbl_fg0, delta=True, res=False\n )\n if beam is not None:\n cls_mod_fg_beam = self.get_model_spectra(\n qb, cbl_fg_beam0, delta=True, res=False\n )\n # add fg field to model, and add fg to total model\n for stag, d in cls_model_fg.items():\n comp, spec = stag.split(\"_\", 1)\n if spec not in [\"ee\", \"bb\"] or comp not in [\"fg\", \"total\"]:\n continue\n ftag = \"fg_{}\".format(spec)\n if stag not in cls_model0:\n cls_model0[stag] = OrderedDict()\n for xname, dd in d.items():\n if xname not in cls_model0[stag]:\n cls_model0[stag][xname] = cls_model_fg[ftag][xname]\n else:\n cls_model0[stag][xname] += cls_model_fg[ftag][xname]\n\n # add beam terms to fg and total fields\n if beam is not None:\n beam_term = 0\n for bn, bc in beam_coeffs[xname].items():\n beam_term += bc * cls_mod_fg_beam[ftag][xname][bn]\n cls_model0[stag][xname] += beam_term\n\n # compute noise model terms\n if res is None:\n clsm = cls_model0\n else:\n res = pt.arr_to_dict(res, qb_res)\n clsm = copy.deepcopy(cls_model0)\n cls_res = self.get_model_spectra(res, cbl)\n for stag, d in cls_res.items():\n if stag not in clsm:\n clsm[stag] = OrderedDict()\n for xname, dd in d.items():\n if xname not in clsm[stag]:\n clsm[stag][xname] = dd\n else:\n clsm[stag][xname] += dd\n\n # compute likelihood\n like = self.fisher_calc(\n qb,\n cbl,\n clsi,\n cls_noise=cls_noise,\n cls_debias=cls_debias,\n cls_model=clsm,\n null_first_cmb=null_first_cmb,\n likelihood=True,\n use_precalc=use_precalc,\n like_lmin=lmin,\n like_lmax=lmax,\n )\n return like\n\n def log_prob(theta):\n \"\"\"\n Log posterior probability from prior and likelihood\n\n Returns log_prior with each step\n \"\"\"\n params = parse_params(theta)\n prior = log_prior(**params)\n if not np.isfinite(prior):\n return -np.inf, -np.inf\n like = log_like(**params)\n if not np.isfinite(like):\n return -np.inf, prior\n return prior + like, prior\n\n # initial values\n x0 = []\n brute_force = True if not mcmc else False # only vary r\n if r_prior is not None:\n x0 += [0.01]\n if alpha_prior is not None:\n alphas = [self.template_alpha[tag] for tag in alpha_tags]\n x0 += [0.01 if a == 0 else a for a in alphas]\n brute_force = False\n if res_prior is not None:\n x0 += list(pt.dict_to_arr(qb_res, flatten=True))\n brute_force = False\n if beam_prior is not None:\n # add a beam term for each frequency\n x0 += [0.01] * len(beam_tags)\n brute_force = False\n if betad_prior is not None:\n x0 += [0.01]\n brute_force = False\n if dust_amp_prior is not None:\n x0 += [1, 1]\n brute_force = False\n if dust_ellind_prior is not None:\n x0 += [0.01]\n brute_force = False\n\n ndim = len(x0)\n if ndim * 2 > num_walkers:\n num_walkers = int(np.round(ndim / float(num_walkers)) * num_walkers * 2)\n self.warn(\n \"Found {} parameters, increasing number of MCMC walkers to {}\".format(\n ndim, num_walkers\n )\n )\n x0 = np.array(x0)[None, :] * (1 + 1e-4 * np.random.randn(num_walkers, len(x0)))\n\n if brute_force or (r_prior is not None and ndim == 1):\n self.log(\"Computing brute-force r profile likelihood\", \"info\")\n likefile = self.get_filename(\n save_name, ext=\".txt\", map_tag=map_tag, extra_tag=file_tag, bp_opts=True\n )\n rs = np.linspace(0, 3, 500)\n likes = np.zeros_like(rs)\n for idx, r in enumerate(rs):\n like = log_like(r=r)\n if idx % 20 == 0:\n self.log(\"r = {:.3f}, loglike = {:.2f}\".format(r, like), \"debug\")\n likes[idx] = like\n header = \"{} r likelihood\\nColumns: r, loglike\".format(\n \"Multi-map\" if map_tag is None else \"Map {}\".format(map_tag)\n )\n np.savetxt(likefile, np.column_stack((rs, likes)), header=header)\n\n if not mcmc:\n return [rs, likes]\n\n # run chains!\n import emcee\n\n # setup sampler output file\n filename = self.get_filename(\n save_name, ext=\".h5\", map_tag=map_tag, extra_tag=file_tag, bp_opts=True\n )\n backend_exists = os.path.exists(filename)\n backend = emcee.backends.HDFBackend(filename)\n if backend_exists and backend.shape != (num_walkers, ndim):\n self.warn(\n \"Expected backend of shape ({}, {}), found {}. Resetting\".format(\n num_walkers, ndim, backend.shape\n )\n )\n reset_backend = True\n if reset_backend:\n backend.reset(num_walkers, ndim)\n\n # initialize sampler\n self.log(\"Initializing sampler\", \"info\")\n sampler = emcee.EnsembleSampler(num_walkers, ndim, log_prob, backend=backend)\n if not reset_backend and backend_exists:\n # grab the last sample if appending to an existing run\n x0 = sampler.run_mcmc(None, 1)\n\n # track autocorrelation time\n old_tau = np.inf\n converged = False\n\n self.log(\n \"Starting {} iterations with {} parameters\".format(num_steps, ndim), \"info\"\n )\n for sample in sampler.sample(x0, iterations=num_steps):\n if not sampler.iteration % 10:\n self.log(\"MCMC iteration {}\".format(sampler.iteration), \"debug\")\n # check convergence every 100 steps\n if sampler.iteration % 100:\n continue\n\n # compute autocorrelation time\n tau = sampler.get_autocorr_time(tol=0)\n\n # check convergence\n converged = np.all(tau / converge_criteria < sampler.iteration)\n converged &= np.all(np.abs(old_tau - tau) / tau < converge_criteria)\n self.log(\n \"MCMC iteration {} autocorr time: mean {:.1f} min {:.1f} max {:.1f}\".format(\n sampler.iteration, np.mean(tau), np.min(tau), np.max(tau)\n ),\n \"info\",\n )\n if converged:\n break\n old_tau = tau\n\n out.update(converged=converged, num_steps=sampler.iteration)\n\n # converged posterior distribution\n if converged:\n self.log(\n \"MCMC converged in {} iterations\".format(sampler.iteration), \"info\"\n )\n tau = sampler.get_autocorr_time()\n burnin = int(2 * np.max(tau))\n thin = int(0.5 * np.min(tau))\n samples = sampler.get_chain(discard=burnin, thin=thin, flat=True)\n out.update(tau=tau, burnin=burnin, thin=thin, samples=samples)\n else:\n self.warn(\"MCMC not converged in {} iterations\".format(num_steps))\n\n if res_prior is not None:\n self.bin_def = bin_def_orig\n self.nbins_res = nbins_res_orig\n\n # save and return\n return self.save_data(\n save_name, map_tag=map_tag, extra_tag=file_tag, bp_opts=True, **out\n )", "def general_gantest(proba, nbr_qubits):\n for m in [4096, 2048]:\n for l in [1, 2, 3]:\n print(\"Easy mode results for m={} and l={}:\".format(m, l))\n Variationer_learn_gan(1000, l, m, proba=proba, n=nbr_qubits, distri_size=0, easy=True)\n print(\"\\n\")\n print(\"Distribution learning results for m={} and l={}:\".format(m, l))\n for d in [256, 512]:\n print(\"For \", d, \": \")\n Variationer_learn_gan(1000, l, m, proba=proba, n=nbr_qubits, distri_size=d, easy=False)\n print(\"Singleton learning results for m={} and l={}:\".format(m, l))\n Variationer_learn_gan(1000, l, m, proba=proba, n=nbr_qubits, distri_size=0, easy=False)", "def get_log_likelihoods(self, short=False):\n if short:\n return self.memory.get('log_likelihoods', self.s, self.e)\n else:\n return np.concatenate(\n (\n self.memory.get('log_likelihoods', self.s, self.e),\n self.tail_batch.log_likelihoods\n ), axis=0\n )", "def naive_bn(data, attributes):\n bn = []\n attr = attributes['attr'].tolist()\n # each attribute is only dependent on the class node\n i = 0\n while (i < len(attr)-1):\n row = [attr[i], attr[-1]]\n bn.append(row)\n i= i + 1\n # frequency table \n freq = counts_table(data, attributes)\n # conditional probabilities and prior probabilities\n cond_probs, prior0, prior1 = conditional_probability(data, attributes, freq)\n\n return bn, cond_probs, prior0, prior1", "def annotate_ISM(data_df, REFERENCE, position_list, reference_genbank_name=\"data/covid-19-genbank.gb\"):\n seq_list = data_df['sequence'].values.tolist()\n \n seq_index = []\n index = 0\n for base in REFERENCE[1]:\n if base == '-':\n seq_index.append(index)\n else:\n index += 1\n seq_index.append(index)\n reference_local_index_map = np.array(seq_index)\n mapped_reference_index = []\n for index, entropy in position_list:\n mapped_reference_index.append((index, reference_local_index_map[index], entropy))\n REFERENCE_ISM = ''.join([REFERENCE[1][item[0]] for item in position_list])\n logging.info('Reference ISM: {}.'.format(REFERENCE_ISM))\n \n gene_dict = load_gene_dict(reference_genbank_name)\n reference_raw = REFERENCE[1].replace('-', '')\n res = OrderedDict()\n res['Ref position'] = []\n res['Entropy'] = []\n res['Gene'] = []\n res['Is silent'] = []\n res['AA position'] = []\n for align_index, ref_index, entropy in mapped_reference_index:\n codon, codon_idx, name, codon_pos = find_SNP(ref_index, gene_dict, reference_raw)\n base_freq = Counter([item[align_index] for item in seq_list]).most_common()\n for alt_base, count in base_freq:\n if alt_base != reference_raw[ref_index-1]:\n break\n if codon is None:\n if_silence = True\n else:\n alt_codon = list(codon)\n alt_codon[codon_idx] = alt_base\n alt_codon = ''.join(alt_codon)\n ref_aa = translate(codon)\n ism_aa = translate(alt_codon)\n if ref_aa == ism_aa:\n if_silence = True\n else:\n if_silence = False\n res['Ref position'].append(ref_index)\n res['Entropy'].append(entropy)\n if name is None:\n name = 'Non-coding'\n res['Gene'].append(name)\n res['Is silent'].append(if_silence)\n if codon_pos is None:\n res['AA position'].append('NaN')\n else:\n res['AA position'].append('{}{}{}'.format(ref_aa, codon_pos, ism_aa))\n annotation_df = pd.DataFrame.from_dict(res)\n return annotation_df", "def __generate_genotype(self):\n if len(self.genotype) < self.__individual_genotype_length:\n gene = ''\n \n while len(self.genotype) < self.__individual_genotype_length:\n gene = str(random.randint(0,1))\n \n self.genotype = self.genotype + gene", "def refseq_gi_wrangler(inpAccessions):\n print(\"processing GenBank Protein GIs\")\n resD = {}\n for inpAccD in inpAccessions:\n queryL = inpAccD[\"RefSeq Protein GI\"].split(\"//\")\n minQ = 999999999999999 # this should be big enough\n for queryI in queryL:\n if queryI == \"\" or queryI == \"-\": continue\n # curQuery = int(queryI.encode(\"ascii\",\"ignore\"))\n curQuery = int(queryI)\n if curQuery < minQ:\n minQ = curQuery\n if minQ == 999999999999999: \n print(\"GI not found in this query:\")\n print(inpAccD)\n else:\n resD[inpAccD[\"InputValue\"]] = minQ \n\n return resD", "def get_likelihood(self, sta, obs):\n # obs (32, obs_num, 3, 24, 24) -> (32*obs_num, 3, 24, 24)\n o = obs.permute(0, 1, 4, 2, 3)\n o = o.view(-1, 3, 24, 24)\n e = self.observation_encoder(o)\n # get e (32*obs_num, 128)\n # get all the combinations of states and observations\n # -> (32, obs_num, 128)\n e = e.view(obs.size()[0], obs.size()[1], -1)\n # -> (32, obs_num, sta_num, 128)\n e = e.view(obs.size()[0], obs.size()[1], 1, e.size()[2]).repeat(1, 1, sta.size()[1], 1)\n # sta (32, sta_num, 3) -> (32, sta_num, 4)\n s = torch.cat(((sta[:, :, :2] - torch.from_numpy(self.means['s'])[:2]) / torch.from_numpy(self.stds['s'])[:2],\n torch.cos(sta[:, :, 2:3]), torch.sin(sta[:, :, 2:3])), -1)\n # -> (32, obs_num, sta_num, 4)\n s = s.view(s.size()[0], 1, s.size()[1], s.size()[2]).repeat(1, obs.shape[1], 1, 1)\n # get all the combinations of states and observations\n # cat_input (32, obs_num, sta_num, 132)\n cat_input = torch.cat((e, s), -1)\n # -> (32*obs_num*sta_num, 132)\n cat_input = cat_input.view(-1, cat_input.size()[-1])\n\n # get w (32*obs_num*sta_num, 1)\n w = self.likelihood_estimator(cat_input)\n # -> (32, obs_num, sta_num)\n w = w.view(sta.size()[0], obs.size()[1], sta.size()[1])\n\n return w", "def _compute_likelihood(self, mus, pmfs):\n expected_counts = pmfs.copy()\n for mu, _p_bin_source in zip(mus, expected_counts):\n _p_bin_source *= mu # Works because of numpy view magic...\n expected_total = np.sum(expected_counts, axis=0)\n\n observed_counts = self.data_events_per_bin.histogram\n\n ret = observed_counts * np.log(expected_total) - expected_total - gammaln(observed_counts + 1.).real\n return np.sum(ret)", "def calculate_pvalues(\n self, which='per_read', log_values=False, \n log_if_values_above=1E9, apply_bh_adjust=True,\n # This cutof_for_consideration value is important. It has to be\n # 5 for the Figure 1 HOMER motifs to be correct, or CELF1 and Rbfox1\n # have their correct motif shift down the list while an incorrect motif\n # takes the top spot. In general, however, it shouldn't be used.\n cutoff_for_consideration=-5.,\n test_mode=False, slow=False):\n self.stats_log = collections.defaultdict(int)\n\n print(\"dict {gene_type (RNA) => {protein=>count}\")\n if which == 'per_read':\n pos_rpg = self.positives.reads_per_million\n neg_rpg = self.negatives.reads_per_million\n elif which == 'raw':\n pos_rpg = self.positives.raw_reads_per_gene\n neg_rpg = self.negatives.raw_reads_per_gene\n elif which == 'per_protein':\n pos_rpg = self.positives.reads_per_protein\n neg_rpg = self.negatives.reads_per_protein\n else:\n raise ValueError(\n f\"which={which}. Possible values are 'per_read', 'raw', and 'per_protein'.\")\n \n pos_counts_by_protein = pos_rpg.df\n neg_counts_by_protein = neg_rpg.df\n \n missing_rnas_in_negatives = set(pos_counts_by_protein.index) - set(neg_counts_by_protein.index)\n for rna in missing_rnas_in_negatives:\n neg_counts_by_protein.loc[rna] = 0\n #neg_counts_by_protein = neg_counts_by_protein.append(\n # pandas.Series(0, index=neg_counts_by_protein.columns), ignore_index=True)\n\n print(f\"Determining p values by negative binomial for {pos_counts_by_protein.shape[0]} RNAs...\")\n\n self.pvals[which] = collections.defaultdict(dict)\n\n self.neg_sub_df = {}\n self.pos_sub_df = {}\n\n # Create dicts of {protein -> dataframe} by subsetting columns.\n # For the negative proteins:\n\n for prot in neg_rpg.proteins():\n self.neg_sub_df[prot] = neg_counts_by_protein.loc[:,\n neg_rpg.columns_for_a_protein(prot)]\n self.neg_sub_df[prot].fillna(value=0, inplace=True)\n lower_bound = min([self.negatives.lowest_positive_vals[which][prot]/10, 1])\n self.neg_sub_df[prot].clip(lower_bound, None, inplace=True)\n self.neg_sub_df[prot] = self.neg_sub_df[prot].mean(axis=1)\n\n # For the positive proteins:\n for prot in pos_rpg.proteins():\n self.pos_sub_df[prot] = pos_counts_by_protein.loc[:,\n pos_rpg.columns_for_a_protein(prot)]\n self.pos_sub_df[prot].fillna(value=0, inplace=True)\n self.pos_sub_df[prot] = self.pos_sub_df[prot].mean(axis=1)\n \n if len(self.neg_sub_df) < 2 or len(self.pos_sub_df) < 1:\n return False # Abort and return false if there aren't enough proteins to compare.\n\n all_proteins = neg_rpg.proteins() | pos_rpg.proteins()\n\n print(f\"Columns for hnRNPC: {pos_rpg.columns_for_a_protein('hnRNPC')}\")\n if test_mode:\n #pos_counts_by_protein['sum'] = pos_counts_by_protein.sum(axis=1)\n #pos_counts_by_protein.sort_values(by=['sum'], inplace=True, ascending=False)\n pos_counts_by_protein = pos_counts_by_protein.head(20)\n\n # For every RNA:\n for gene_type in pos_counts_by_protein.index:\n\n verbose = bool(random.randint(0, 10000) == 1)\n\n neg_vals = {\n prot: self.neg_sub_df[prot].loc[gene_type] for prot in self.neg_sub_df \\\n if (gene_type.split('::')[0] != prot)}\n\n pos_counts_by_protein = {\n prot: self.pos_sub_df[prot].loc[gene_type] for prot in self.pos_sub_df}\n\n #print(\"Neg vals {}\\n pos vals {}\\n\".format(neg_vals, pos_counts_by_protein))\n\n verbose and print(f\"{gene_type}:\")\n\n verbose and print(f\"Calculating p values from neg. binom. fit to randoms: on RNA {len(self.pvals[which])}/{len(neg_counts_by_protein)}.\")\n\n if (np.max([x for x in neg_vals.values()]) < cutoff_for_consideration) and (\n np.max([x for x in pos_counts_by_protein.values()]) < cutoff_for_consideration):\n #print(\"Setting to 1\")\n #self.pvals[which][gene_type] = {prot:1 for prot in all_proteins}\n continue\n\n neg_vals_at_rna = np.array([x for x in neg_vals.values()])\n #print(neg_vals_at_rna)\n #if slow:\n # self.pvals[which][gene_type] = self.pval_at_rna_by_nbinom_slow( \n # pos_counts_by_protein,\n # neg_vals_at_rna,\n # gene_type, log_values=log_values,\n # log_if_values_above=log_if_values_above, which=which)\n #else:\n self.pvals[which][gene_type] = self.pval_at_rna_by_nbinom( \n pos_counts_by_protein,\n neg_vals_at_rna,\n gene_type, log_values=log_values,\n log_if_values_above=log_if_values_above, which=which,\n verbose=verbose)\n\n verbose and print(\n f\"pos={pos_counts_by_protein} neg={neg_vals_at_rna}. pvals={self.pvals[which][gene_type]}\")\n\n #print(self.pvals[which][gene_type])\n\n if not hasattr(self, 'pvals_dfs'):\n self.pvals_dfs = {}\n\n self.pvals_dfs[which] = pandas.DataFrame.from_dict(self.pvals[which], orient='index')\n\n if apply_bh_adjust:\n \n fdr_cols = []\n for prot in self.pvals_dfs[which].columns[:]:\n arr = statsmodels.stats.multitest.fdrcorrection(\n self.pvals_dfs[which][prot], alpha=0.05,\n method='indep', is_sorted=False)\n\n self.pvals_dfs[which]['FDR' + prot] = arr[1]\n fdr_cols.append('FDR' + prot)\n for col in fdr_cols:\n self.pvals_dfs[which][re.sub('FDR', '', col)] = self.pvals_dfs[which][col]\n del self.pvals_dfs[which][col]\n\n print(\"Warning: P values in pvals_df were BH ajusted, but self.pvals were not.\")\n\n print(self.stats_log)\n\n return True # Return True if finished successfully." ]
[ "0.58311933", "0.5258182", "0.51413745", "0.51239264", "0.50704426", "0.5054357", "0.50419044", "0.50068015", "0.49709237", "0.49687204", "0.49641567", "0.4958727", "0.4943448", "0.49398604", "0.49218807", "0.49215811", "0.4891338", "0.48754093", "0.4873586", "0.48568666", "0.48543763", "0.482769", "0.48221993", "0.48171592", "0.4813334", "0.480879", "0.4791081", "0.47846964", "0.47835147", "0.4782735", "0.47761416", "0.4771583", "0.47445154", "0.47376904", "0.47342947", "0.4726381", "0.47241518", "0.4717857", "0.47178516", "0.4713136", "0.47080126", "0.4707902", "0.4701386", "0.4688887", "0.4688606", "0.46784878", "0.467386", "0.46716136", "0.46630287", "0.46598387", "0.4657517", "0.46549582", "0.46428785", "0.46328408", "0.46284747", "0.46227947", "0.46210894", "0.46210894", "0.46117234", "0.46106923", "0.45999599", "0.4597459", "0.45961374", "0.45925954", "0.45867693", "0.45858887", "0.4582656", "0.45795095", "0.45772037", "0.4568801", "0.45643458", "0.45540893", "0.4550117", "0.45392218", "0.45380956", "0.45341548", "0.4531598", "0.45315135", "0.45257917", "0.45213974", "0.45185724", "0.4517224", "0.45139924", "0.45010945", "0.45000538", "0.4496797", "0.44929048", "0.44928855", "0.4491785", "0.44869968", "0.44860277", "0.4478957", "0.44771355", "0.44767535", "0.44692144", "0.44664553", "0.4464986", "0.44604233", "0.4460352", "0.4455469" ]
0.702063
0
The base exception class.
def __init__(self, error_msg): super(SdkException, self).__init__() self.error_msg = error_msg
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exception(self, e):\n pass", "def exception(self, *args, **kwargs):", "def unexpectedException(self):", "def WrappedException(self) -> object:", "def throw(self):\n pass", "def create_exception(self, msg: str):", "def test_exception_class_hierarchy(self) -> None:\n\n try:\n raise CustomDerivedError(state=\"test\")\n except CustomDerivedError as cex:\n assert type(cex) is CustomDerivedError\n assert \"test\" == cex.state\n except CustomError as cex:\n assert False, \"CustomDerivedError should have caught the exception.\"\n except:\n assert False, f\"Unhandled exception: {sys.exc_info()[0]}\"", "def cancelled_exception_class(cls) -> type[BaseException]:", "def unexpected_error(self, exception):", "def __init__(self, level, message):\n Exception.__init__(self)\n self.level = level\n self.msg = message", "def __init__(self, error_msg):\n super(ConnectionException, self).__init__(error_msg)", "def error(self):\n raise NotImplementedError(\"subclasses need to override this method\")", "def __init__(self, message=\"\"):\n super(ApplicationError, self).__init__(message)", "def raise_error(cls, *args):\n raise cls(cls.message)", "def __init__(self, content, status):\n Exception.__init__(self)\n self.status = status\n self.content = content", "def __init__(self, msg, cause=None, *args, **kwargs):\n super(SIEException, self).__init__(*args, **kwargs)\n self.msg = msg\n self.cause = cause", "def exception(self) -> Exception:\n return self._exception", "def __init__(self, message):\n ModelException.__init__(self, message)", "def exceptionType(self):\n return ExceptionType.GeneralException", "def __init__(self, code, reason):\n super(RequestError, self).__init__(code, reason)", "def __init__(self, exception, message=\"Invalid requests parse!\"):\n self.message = message\n self.exception = exception\n super().__init__(self.message)", "def __init__ (self, msg, parent=None, api_object=None, from_log=False) :\n Exception.__init__(self, msg)\n\n self._plain_message = msg\n self._exceptions = [self]\n self._top_exception = self\n self._ptype = type(parent).__name__ # parent exception type\n self._stype = type(self ).__name__ # own exception type \n\n ignore_stack = 2\n if from_log : \n ignore_stack += 1\n\n\n if api_object : \n self._object = weakref.ref (api_object)\n else :\n self._object = None\n\n\n # did we get a parent exception?\n if parent :\n\n # if so, then this exception is likely created in some 'except'\n # clause, as a reaction on a previously catched exception (the\n # parent). Thus we append the message of the parent to our own\n # message, but keep the parent's traceback (after all, the original\n # exception location is what we are interested in).\n #\n if isinstance (parent, SagaException) :\n # that all works nicely when parent is our own exception type...\n self._traceback = parent.traceback\n\n frame = traceback.extract_stack ()[- ignore_stack]\n line = \"%s +%s (%s) : %s\" % frame \n self._message = \" %-20s: %s (%s)\\n%s\" \\\n % (self._stype, msg, line, parent.msg)\n\n else :\n if self._stype != \"NoneType\" :\n # ... but if parent is a native (or any other) exception type,\n # we don't have a traceback really -- so we dig it out of\n # sys.exc_info. \n trace = sys.exc_info ()[2]\n stack = traceback.extract_tb (trace)\n traceback_list = traceback.format_list (stack)\n self._traceback = \"\".join (traceback_list)\n\n # the message composition is very similar -- we just inject the\n # parent exception type inconspicuously somewhere (above that\n # was part of 'parent.message' already).\n frame = traceback.extract_stack ()[- ignore_stack]\n line = \"%s +%s (%s) : %s\" % frame \n self._message = \" %-20s: %s (%s)\\n %-20s: %s\" \\\n % (self._stype, msg, line, self._ptype, parent)\n\n else :\n\n # if we don't have a parent, we are a 1st principle exception,\n # i.e. a reaction to some genuine code error. Thus we extract the\n # traceback from exactly where we are in the code (the last stack\n # frame will be the call to this exception constructor), and we\n # create the original exception message from 'stype' and 'message'.\n stack = traceback.extract_stack ()\n traceback_list = traceback.format_list (stack)\n self._traceback = \"\".join (traceback_list[:-1])\n frame = traceback.extract_stack ()[- ignore_stack -1]\n line = \"%s +%s (%s) : %s\" % frame \n self._message = \"%s (%s)\" % (msg, line)\n\n # we can't do that earlier as _msg was not set up before\n self._messages = [self._message]", "def test_base_exception(self) -> None:\n with pytest.raises(BaseException) as e:\n 1 / 0\n assert isinstance(e.value, ZeroDivisionError)", "def __init__(self, message, fatal, error_num=None):\n Exception.__init__(self, message)\n self.fatal = fatal\n self.errno = error_num", "def __init__(self, msg):\n super(QuitMessageException, self).__init__(msg)", "def __init__(self, msg=\"\", exc=NotImplementedError):\r\n self.msg = msg\r\n self.exc = exc", "def exception(self):\n raise Exception(\"Exception test\")", "def __init__(self, message=\"\"):\n super(AutomationError, self).__init__(message)", "def __call__(self):\r\n raise self", "def __call__(self):\r\n raise self", "def __init__(self, *args):\n\n super(GoveeException, self).__init__()\n\n if args:\n self.message = args[0]\n else:\n self.message = None", "def __init__(self, module, message, _type, exc_message=None, *args, **kwargs):\n logger.error(\"[{}] {} {} {}\".format(module,\n _type,\n '<{}>'.format(exc_message) if exc_message else '',\n message))\n super(CliException, self).__init__(message, *args)\n self.message = message\n self.type = _type\n self.exc_message = exc_message\n self.str_at_error = kwargs.get('str_at_error', None)", "def exception(self, *args, **kwargs):\n return super(Blueprint, self).exception(*args, **kwargs)", "def __init__(self,value,message):\n ValueError.__init__(self,value,message)", "def solid_exception(self) -> Optional[BaseException]:\n return self.op_exception", "def get_exception():\n raise Exception(\"example\")", "def throw(self, type, value=None, traceback=None):\n pass", "def __init__(self, message=\"\"):\n super(DataError, self).__init__(message)", "def __init__(self):\n raise Exception('TODO IMPLEMENT ME !')", "def exception(self) -> exceptions.ErrorMessageException:\n\n return ErrorMessage.ERROR_CODES_TO_EXCEPTIONS.get(\n self.error_code,\n exceptions.GenericException\n )", "def ioException(self) -> \"IOException\":\n raise NotImplementedError", "def __init__(self, *args):\n this = _libSALOME_LifeCycleCORBA.new_SALOME_Exception(*args)\n try: self.this.append(this)\n except: self.this = this", "def except__else(self, exception: BaseException) -> typing.Any:\n raise exception", "def _get_exception(self):\r\n \r\n return self._exception", "def exception(self):\n return self._exception", "def __init__(self, earliest_time, latest_time):\n Exception.__init__(self, earliest_time, latest_time)\n self.earliest_time = earliest_time\n self.latest_time = latest_time", "def _exception_dispatcher(self, e):\n # TODO Currently not doing anything\n raise e", "def __init__(self):\n raise Exception(\"Cannot create this object\")", "def __init__(self):\n raise Exception(\"Cannot create this object\")", "def _gh_exception(exc_cls, status, data):\n try:\n exc = exc_cls(status, data, None)\n except TypeError:\n # Before PyGithub 1.5, GithubException had only two required arguments.\n exc = exc_cls(status, data)\n return exc", "def __init__(self, msg):\n super(CpoSolverException, self).__init__(msg)", "def __init__(self, error = ''):\n IPRO_Error.__init__(self, error)", "def __init__(self, error = ''):\n IPRO_Error.__init__(self, error)", "def __init__(self, error = ''):\n IPRO_Error.__init__(self, error)", "def base(self):\n raise NotImplementedError()", "def __init__ (self, *args, **kw):\n if 0 == len(args) and 'message' in kw:\n args = (kw.pop('message'),)\n self._args = args\n self._kw = kw\n super(PyXBException, self).__init__(*args)", "def __init__(self, exception):\n self.wrapped_exc = exception\n self.status_int = exception.status_int", "def __init__(self):\n raise", "def __init__(self, msg):\n\n super(DBConnectionError, self).__init__(msg)\n self.msg = msg", "def __init__(self, message, text=None, reference=None, contact=None):\n self.openid_message = message\n self.reference = reference\n self.contact = contact\n assert type(message) not in [str, str]\n Exception.__init__(self, text)", "def __init__(self, msg):\n\n super(DBValueError, self).__init__(msg)\n self.msg = msg", "def error(self):\n pass", "def raise_(err):\n raise err", "def __init__(self, message, code, *args):\n self.message = message\n self.code = code\n super(TwitterException, self).__init__(message, code, *args)", "def exception(self):\n exc_type, exc_value, exc_tb = sys.exc_info()\n cui.message(traceback.format_exception_only(exc_type, exc_value)[-1],\n log_message=traceback.format_exc())", "def __init__(self, message=\"\"):\n super(ServerError, self).__init__(message)", "def exception(self) -> str:\n return pulumi.get(self, \"exception\")", "def __init__(self, msg=None, wrapped=None):\n if not msg:\n name= None\n if wrapped:\n msg = \"Access error due to %s: %s\" % \\\n (self._excname(wrapped), str(wrapped))\n else:\n msg = \"Unknown error during blackboard access\"\n \n Exception.__init__(self, msg)\n\n # the wrapped exception\n self.wrapped = None", "def error(self, e):\n return \"{}: {} ({})\".format(e.__class__.__name__, e.__doc__, e.message)", "def test_class_errored(self, cls, exception):", "def __init__(self, msg: str) -> None:\n super().__init__(\n definition=CommonErrorDef.ACTION_FORBIDDEN,\n reason=msg,\n )", "def __init__(self, msg):\n\n super(ConfigError, self).__init__(msg)\n self.msg = msg", "def sample_exception_function(self, a, b):\r\n raise Exception(\"An error has occurred.\")", "def __init__(self, message=None):\n if message is not None:\n super(CryptoritoError, self).__init__(message)\n else:\n super(CryptoritoError, self).__init__()", "def test_type(self):\n assert issubclass(Error, Exception)\n assert Error.__name__ == \"Error\"", "def report_unexpected_exception(self, *args, **kwargs):\n pass", "def what(self):\n return _libSALOME_LifeCycleCORBA.SALOME_Exception_what(self)", "def raise_error(Err):\n raise Err()", "def _create_violation_error(contract: Contract, resolved_kwargs: Mapping[str, Any]) -> BaseException:\n exception = None # type: Optional[BaseException]\n\n if contract.error is None:\n try:\n msg = icontract._represent.generate_message(contract=contract, resolved_kwargs=resolved_kwargs)\n except Exception as err:\n parts = [\"Failed to recompute the values of the contract condition:\\n\"]\n if contract.location is not None:\n parts.append(\"{}:\\n\".format(contract.location))\n\n if contract.description is not None:\n parts.append(\"{}: \".format(contract.description))\n\n parts.append(icontract._represent.represent_condition(condition=contract.condition))\n\n raise RuntimeError(''.join(parts)) from err\n\n exception = ViolationError(msg)\n elif inspect.ismethod(contract.error) or inspect.isfunction(contract.error):\n assert contract.error_arg_set is not None, (\"Expected error_arg_set non-None if contract.error a function.\")\n assert contract.error_args is not None, (\"Expected error_args non-None if contract.error a function.\")\n\n error_kwargs = select_error_kwargs(contract=contract, resolved_kwargs=resolved_kwargs)\n\n exception = cast(BaseException, contract.error(**error_kwargs))\n\n if not isinstance(exception, BaseException):\n raise TypeError(\n \"The exception returned by the contract's error {} does not inherit from BaseException.\".format(\n contract.error))\n elif isinstance(contract.error, type):\n if not issubclass(contract.error, BaseException):\n raise TypeError(\n \"The exception class supplied in the contract's error {} is not a subclass of BaseException.\".format(\n contract.error))\n\n msg = icontract._represent.generate_message(contract=contract, resolved_kwargs=resolved_kwargs)\n exception = contract.error(msg)\n elif isinstance(contract.error, BaseException):\n exception = contract.error\n else:\n raise NotImplementedError(\n (\"icontract does not know how to handle the error of type {} \"\n \"(expected a function, a subclass of BaseException or an instance of BaseException)\").format(\n type(contract.error)))\n\n assert exception is not None\n return exception", "def error(self):\n ...", "def __init__(self, error, status_code, *args, **kwargs):\n super(BusinessException, self).__init__(*args, **kwargs)\n self.error = error\n self.status_code = status_code", "def __init__(self, message):\n self.message = LicenseError.ERROR + message\n\n super(LicenseError, self).__init__(self.message)", "def __init__(self, *args):\n this = _libsbml.new_SBMLExtensionException(*args)\n try: self.this.append(this)\n except: self.this = this", "def exception_class(self, exception):\n\n\t\tcls = type(exception)\n\t\tif cls.__module__ == 'exceptions': # Built-in exception.\n\t\t\treturn cls.__name__\n\t\treturn \"%s.%s\" % (cls.__module__, cls.__name__)", "def __init__(self, from_email):\n self.code = 400\n self.from_email = from_email\n Error.__init__(self)", "def exception(self):\n return self._exc_info[1] if self._exc_info is not None else None", "def __init__(self, assigned_time, task_id, node_type):\n Exception.__init__(self, assigned_time, task_id, node_type)\n self.assigned_time = assigned_time\n self.task_id = task_id\n self.node_type = node_type", "def parse_error(self, message, exc_cls=VisualizerParseError):\n raise exc_cls(\"Error parsing %s '%s' (%s:%i): %s\" % \n (self.tag, self.ref, self.filename, self.lineno, message))", "def rescue(self, instance):\n pass", "def get_error(self):\n return self.e", "def exception(self):\n self.wait() # TODO: If raise_errors=True this will raise the exception when trying to access it?\n return self._exception", "def error(self, msg, *args, **kwargs):\n pass", "def test_invalidargumentBaseModel(self):\n with self.assertRaises(NameError) as e:\n b1 = BaseModel(hi)\n self.assertEqual(str(e.exception), \"name 'hi' is not defined\")", "def ERR(self):", "def exception_alias():\n try:\n #result=1/0\n raise Exception\n except ZeroDivisionError, e:\n print(\"ZeroDivisionError\")\n print(e.message if e.message != \"\" else 'no message')\n except Exception, e:\n print(\"Exception\")\n print(type(e.message)) # <type 'str'>\n print(e.message if e.message != \"\" else 'no message')", "def error(self, *args, **kwargs):", "def user_exception(self, frame, exc_info):\n pass", "def exception_handler(self, exception):\n pass", "def _raise_http_error(self, *args, **kwargs):", "def exception(self, msg, *args, **kwargs):\n ex = sys.exc_info()[1]\n\n if hasattr(ex, '_monocle'):\n args = args + (format_tb(ex),)\n self.logger.error('%s\\n%%s' % msg, *args, **kwargs)\n else:\n super(Adapter, self).exception(msg, *args, **kwargs)" ]
[ "0.71715826", "0.71011615", "0.6953176", "0.6914926", "0.6909566", "0.6883063", "0.68702894", "0.67892045", "0.6786502", "0.678649", "0.67474914", "0.67397016", "0.6701096", "0.67004186", "0.6674386", "0.6668861", "0.6599745", "0.658252", "0.65630275", "0.6521229", "0.65065217", "0.64985085", "0.6495516", "0.6489825", "0.6463263", "0.6442491", "0.643652", "0.6429513", "0.6415407", "0.6415407", "0.639639", "0.6376825", "0.63629603", "0.6353876", "0.63110024", "0.63101286", "0.63041526", "0.63027114", "0.6275428", "0.6255722", "0.6231754", "0.62283057", "0.6226126", "0.62185687", "0.6216998", "0.6211527", "0.61651427", "0.6154369", "0.6154369", "0.6149195", "0.61353725", "0.6135207", "0.6135207", "0.6135207", "0.613478", "0.61337227", "0.61319876", "0.6114688", "0.6102211", "0.609802", "0.60943216", "0.6089558", "0.60888493", "0.60882324", "0.6087031", "0.607754", "0.60768425", "0.6066077", "0.6064136", "0.60574806", "0.60067546", "0.5972583", "0.5965718", "0.5964232", "0.5963571", "0.59592694", "0.59580266", "0.5953281", "0.5953228", "0.594723", "0.5935337", "0.5924084", "0.59226704", "0.5911263", "0.58947873", "0.58941245", "0.5879395", "0.5871043", "0.5869722", "0.5834933", "0.58341205", "0.5829962", "0.58214444", "0.581839", "0.58177143", "0.58129644", "0.57992274", "0.5797131", "0.5787932", "0.57650954" ]
0.6374392
32
The base exception class of connection exceptions.
def __init__(self, error_msg): super(ConnectionException, self).__init__(error_msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, msg):\n\n super(DBConnectionError, self).__init__(msg)\n self.msg = msg", "def _create_exception(self, pgres=None, msg=None, cursor=None):\n assert pgres is None or cursor is None, \\\n \"cannot specify pgres and cursor together\"\n\n if cursor and cursor._pgres:\n pgres, cursor._pgres = cursor._pgres, ffi.NULL\n\n exc_type = exceptions.OperationalError\n code = pgmsg = None\n # _py_enc can be not initialized yet in case of errors when\n # establishing the connection\n err_enc = self._py_enc or 'utf-8'\n\n # If no custom message is passed then get the message from postgres.\n # If pgres is available then we first try to get the message for the\n # last command, and then the error message for the connection\n if pgres:\n pgmsg = libpq.PQresultErrorMessage(pgres)\n pgmsg = ffi.string(pgmsg).decode(err_enc, 'replace') \\\n if pgmsg else None\n\n # Get the correct exception class based on the error code\n code = libpq.PQresultErrorField(pgres, libpq.LIBPQ_DIAG_SQLSTATE)\n if code != ffi.NULL:\n code = bytes_to_ascii(ffi.string(code))\n exc_type = util.get_exception_for_sqlstate(code)\n else:\n code = None\n exc_type = exceptions.DatabaseError\n\n if not pgmsg:\n pgmsg = libpq.PQerrorMessage(self._pgconn)\n pgmsg = ffi.string(pgmsg).decode(err_enc, 'replace') \\\n if pgmsg else None\n\n if msg is None and pgmsg:\n msg = pgmsg\n for prefix in (\"ERROR: \", \"FATAL: \", \"PANIC: \"):\n if msg.startswith(prefix):\n msg = msg[len(prefix):]\n break\n\n # Clear the connection if the status is CONNECTION_BAD (fatal error)\n if self._pgconn and libpq.PQstatus(self._pgconn) == libpq.CONNECTION_BAD:\n self._closed = 2\n\n exc = exc_type(msg)\n exc.pgcode = code\n exc.pgerror = pgmsg\n exc.cursor = cursor\n exc._pgres = pgres\n\n return exc", "def SocketError(self) -> SocketError:", "def handle_demisto_exception(e):\n if 'Proxy Error' in str(e):\n raise ConnectionError(MESSAGES['PROXY_ERROR'])\n elif 'ReadTimeoutError' in str(e):\n raise ConnectionError(MESSAGES['REQUEST_TIMEOUT'])\n elif 'ConnectionError' in str(e) or 'ConnectTimeoutError' in str(e):\n raise ConnectionError(MESSAGES['CONNECTION_ERROR'])\n elif 'SSLError' in str(e):\n raise SSLError(MESSAGES['SSL_CERT_ERROR'])\n else:\n raise e", "def _raise_unknown_error(ex):\n raise MsticpyKqlConnectionError(\n \"Another exception was returned by the service\",\n *ex.args,\n f\"Full exception:\\n{str(ex)}\",\n title=\"connection failed\",\n )", "def exception(self, e):\n pass", "def exception(self) -> Exception:\n return self._exception", "def exceptionType(self):\n return ExceptionType.GeneralException", "def format_connection_exception(e, driver):\n if adodbapi is not None:\n if isinstance(e, OperationalError) and e.args and isinstance(e.args[0], com_error):\n e_comm = e.args[0]\n hresult = e_comm.hresult\n sub_hresult = None\n internal_message = None\n if e_comm.args and len(e_comm.args) == 4:\n internal_args = e_comm.args[2]\n if len(internal_args) == 6:\n internal_message = internal_args[2]\n sub_hresult = internal_args[5]\n base_message, base_conn_err = _lookup_conn_error_and_msg(hresult, internal_message)\n sub_message, sub_conn_err = _lookup_conn_error_and_msg(sub_hresult, internal_message)\n if internal_message == 'Invalid connection string attribute':\n if base_message and sub_message:\n conn_err = sub_conn_err if sub_conn_err else base_conn_err\n return base_message + \": \" + sub_message, conn_err\n else:\n # else we can return the original exception message + lookup the proper\n # ConnectionErrorCode for this issue\n conn_err = sub_conn_err if sub_conn_err else base_conn_err\n return repr(e), conn_err\n else:\n # if not an Operational error, try looking up ConnectionErr type\n # by doing a regex search on the whole exception message\n e_msg = repr(e)\n _, conn_err = _lookup_conn_error_and_msg(0, e_msg)\n return e_msg, conn_err\n\n elif pyodbc is not None:\n e_msg = repr(e)\n _, conn_err = _lookup_conn_error_and_msg(0, e_msg)\n if conn_err == ConnectionErrorCode.driver_not_found:\n installed, drivers = _get_is_odbc_driver_installed(driver)\n if not installed and drivers:\n e_msg += \" configured odbc driver {} not in list of installed drivers: {}\".format(driver, drivers)\n return e_msg, conn_err\n\n return repr(e), ConnectionErrorCode.unknown", "def db_connection_error(error):\n return internal_server_error(error)", "def cancelled_exception_class(cls) -> type[BaseException]:", "def WrappedException(self) -> object:", "def exception(self, *args, **kwargs):", "def clientconnfail(self) :\n\t\ttry :\n\t\t\treturn self._clientconnfail\n\t\texcept Exception as e:\n\t\t\traise e", "def test_exception_class_hierarchy(self) -> None:\n\n try:\n raise CustomDerivedError(state=\"test\")\n except CustomDerivedError as cex:\n assert type(cex) is CustomDerivedError\n assert \"test\" == cex.state\n except CustomError as cex:\n assert False, \"CustomDerivedError should have caught the exception.\"\n except:\n assert False, f\"Unhandled exception: {sys.exc_info()[0]}\"", "def unexpected_error(self, exception):", "def unexpectedException(self):", "def exception(self):\n return self._exception", "def solid_exception(self) -> Optional[BaseException]:\n return self.op_exception", "def exception(self) -> exceptions.ErrorMessageException:\n\n return ErrorMessage.ERROR_CODES_TO_EXCEPTIONS.get(\n self.error_code,\n exceptions.GenericException\n )", "def _get_exception(self):\r\n \r\n return self._exception", "def __init__(self, msg):\n\n super(DBValueError, self).__init__(msg)\n self.msg = msg", "def throw(self):\n pass", "def ConnectByNameError(self) -> _n_0_t_14:", "def what(self):\n return _libSALOME_LifeCycleCORBA.SALOME_Exception_what(self)", "def __init__(self, code, reason):\n super(RequestError, self).__init__(code, reason)", "def systcpconnfail(self) :\n\t\ttry :\n\t\t\treturn self._systcpconnfail\n\t\texcept Exception as e:\n\t\t\traise e", "def __init__(self, content, status):\n Exception.__init__(self)\n self.status = status\n self.content = content", "def connection_lost(self, exc):\n pass", "def __init__(self, message):\n ModelException.__init__(self, message)", "def __enter__(self):\n logger.verbose(\"Establishing connection to {0}:{1}...\"\n .format(self.server, self.port))\n try:\n return super(SmarterConnection, self).__enter__()\n except vim.fault.HostConnectFault as e:\n if not re.search(\"certificate verify failed\", e.msg):\n raise e\n # Self-signed certificates are pretty common for ESXi servers\n logger.warning(e.msg)\n self.UI.confirm_or_die(\"SSL certificate for {0} is self-signed or \"\n \"otherwise not recognized as valid. \"\n \"Accept certificate anyway?\"\n .format(self.server))\n _create_unverified_context = ssl._create_unverified_context\n ssl._create_default_https_context = _create_unverified_context\n return super(SmarterConnection, self).__enter__()\n except requests.exceptions.ConnectionError as e:\n # ConnectionError can wrap another internal error; let's unwrap it\n # so COT can log it more cleanly\n outer_e = e\n inner_message = None\n while e.errno is None:\n inner_e = None\n if hasattr(outer_e, 'reason'):\n inner_e = outer_e.reason\n else:\n for arg in outer_e.args:\n if isinstance(arg, Exception):\n inner_e = arg\n break\n if inner_e is None:\n break\n if hasattr(inner_e, 'strerror'):\n inner_message = inner_e.strerror\n elif hasattr(inner_e, 'message'):\n inner_message = inner_e.message\n else:\n inner_message = inner_e.args[0]\n logger.debug(\"\\nInner exception: {0}\".format(inner_e))\n if hasattr(inner_e, 'errno') and inner_e.errno is not None:\n e.errno = inner_e.errno\n break\n outer_e = inner_e\n if e.strerror is None:\n e.strerror = (\"Error connecting to {0}:{1}: {2}\"\n .format(self.server, self.port, inner_message))\n raise", "def create_exception(self, msg: str):", "def SocketErrorCode(self) -> SocketError:", "def connection_failed(self, connection, error):\n assert False", "def ioException(self) -> \"IOException\":\n raise NotImplementedError", "def error(self):\n raise NotImplementedError(\"subclasses need to override this method\")", "def __init__(self, msg, cause=None, *args, **kwargs):\n super(SIEException, self).__init__(*args, **kwargs)\n self.msg = msg\n self.cause = cause", "def exception(self):\n\n with self._condition:\n self.fetch()\n return self._exception", "def __init__(self, message, fatal, error_num=None):\n Exception.__init__(self, message)\n self.fatal = fatal\n self.errno = error_num", "def except__else(self, exception: BaseException) -> typing.Any:\n raise exception", "def exception(self) -> str:\n return pulumi.get(self, \"exception\")", "def exception(self):\n return self._exc_info[1] if self._exc_info is not None else None", "def clientConnectionFailed(self, err, address: Address):\n if type(err.value) == error.TimeoutError:\n logger.debug(f\"Failed connecting to {address} connection timed out\")\n elif type(err.value) == error.ConnectError:\n ce = err.value\n if len(ce.args) > 0:\n logger.debug(f\"Failed connecting to {address} {ce.args[0].value}\")\n else:\n logger.debug(f\"Failed connecting to {address}\")\n else:\n logger.debug(f\"Failed connecting to {address} {err.value}\")\n self.peers_connecting -= 1\n self.RemoveKnownAddress(address)\n self.RemoveFromQueue(address)\n # if we failed to connect to new addresses, we should always add them to the DEAD_ADDRS list\n self.AddDeadAddress(address)\n\n # for testing\n return err.type", "def raises_conn_error(func):\n @functools.wraps(func)\n def wrapper(*args, **kwargs):\n try:\n return func(*args, **kwargs)\n except exc.InvalidRequestError:\n LOG.exception('Connection error:')\n raise errors.ConnectionError()\n\n return wrapper", "def __repr__(self):\n return 'PortScannerError exception {0}'.format(self.value)", "def get_error(self):\n return self.exc_info", "def _exception_dispatcher(self, e):\n # TODO Currently not doing anything\n raise e", "def exception(self):\n\n try:\n if self.conn1.poll():\n # There is something to read.\n\n # We get and save the exception.\n self._exception_receiver = self.conn1.recv()\n except EOFError:\n pass\n\n self.conn2.close()\n\n return self._exception_receiver", "def _get_integrity_error_type(self) -> Type[Exception]:\n backend = self.schema.db_backend_name()\n\n try:\n if backend == \"sqlite\":\n from sqlite3 import IntegrityError\n elif backend == \"postgresql\":\n from asyncpg import ( # type: ignore\n IntegrityConstraintViolationError as IntegrityError,\n )\n else:\n from pymysql import IntegrityError # type: ignore\n return IntegrityError\n except ImportError:\n return Exception", "def exception(self):\n self.wait() # TODO: If raise_errors=True this will raise the exception when trying to access it?\n return self._exception", "def __init__(self, msg=None, wrapped=None):\n if not msg:\n name= None\n if wrapped:\n msg = \"Access error due to %s: %s\" % \\\n (self._excname(wrapped), str(wrapped))\n else:\n msg = \"Unknown error during blackboard access\"\n \n Exception.__init__(self, msg)\n\n # the wrapped exception\n self.wrapped = None", "def __init__(self, message, text=None, reference=None, contact=None):\n self.openid_message = message\n self.reference = reference\n self.contact = contact\n assert type(message) not in [str, str]\n Exception.__init__(self, text)", "def onConnectError(self, fetcher, error): #$NON-NLS-1$\r", "def connection_failure_reason(self):\n return self._connection_failure_reason", "def error(self, e):\n return \"{}: {} ({})\".format(e.__class__.__name__, e.__doc__, e.message)", "def catch_network_exception(func):\n\n @wraps(func)\n def wrapper(self, *args, **kwargs):\n \"\"\"Wrapper function.\"\"\"\n try:\n if (\n isinstance(self, (Connection, PooledConnection))\n and self.is_server_disconnected()\n ):\n raise InterfaceError(*self.get_disconnected_reason())\n result = func(self, *args, **kwargs)\n if isinstance(result, BaseResult):\n warns = result.get_warnings()\n for warn in warns:\n if warn[\"code\"] in CONNECTION_CLOSED_ERROR:\n error_msg = CONNECTION_CLOSED_ERROR[warn[\"code\"]]\n reason = (\n f\"Connection close: {warn['msg']}: {error_msg}\",\n warn[\"code\"],\n )\n if isinstance(self, (Connection, PooledConnection)):\n self.set_server_disconnected(reason)\n break\n return result\n except (InterfaceError, OSError, RuntimeError, TimeoutError) as err:\n if (\n func.__name__ == \"get_column_metadata\"\n and args\n and isinstance(args[0], SqlResult)\n ):\n warns = args[0].get_warnings()\n if warns:\n warn = warns[0]\n error_msg = CONNECTION_CLOSED_ERROR[warn[\"code\"]]\n reason = (\n f\"Connection close: {warn['msg']}: {error_msg}\",\n warn[\"code\"],\n )\n if isinstance(self, PooledConnection):\n self.pool.remove_connections()\n # pool must be listed as faulty if server is shutting down\n if warn[\"code\"] == 1053:\n PoolsManager().set_pool_unavailable(\n self.pool, InterfaceError(*reason)\n )\n if isinstance(self, (Connection, PooledConnection)):\n self.set_server_disconnected(reason)\n self.disconnect()\n raise InterfaceError(*reason) from err\n self.disconnect()\n raise\n\n return wrapper", "def exception(self, msg, *args, **kwargs):\n ex = sys.exc_info()[1]\n\n if hasattr(ex, '_monocle'):\n args = args + (format_tb(ex),)\n self.logger.error('%s\\n%%s' % msg, *args, **kwargs)\n else:\n super(Adapter, self).exception(msg, *args, **kwargs)", "def raise_error(cls, *args):\n raise cls(cls.message)", "def get_exception():\n raise Exception(\"example\")", "def __init__ (self, msg, parent=None, api_object=None, from_log=False) :\n Exception.__init__(self, msg)\n\n self._plain_message = msg\n self._exceptions = [self]\n self._top_exception = self\n self._ptype = type(parent).__name__ # parent exception type\n self._stype = type(self ).__name__ # own exception type \n\n ignore_stack = 2\n if from_log : \n ignore_stack += 1\n\n\n if api_object : \n self._object = weakref.ref (api_object)\n else :\n self._object = None\n\n\n # did we get a parent exception?\n if parent :\n\n # if so, then this exception is likely created in some 'except'\n # clause, as a reaction on a previously catched exception (the\n # parent). Thus we append the message of the parent to our own\n # message, but keep the parent's traceback (after all, the original\n # exception location is what we are interested in).\n #\n if isinstance (parent, SagaException) :\n # that all works nicely when parent is our own exception type...\n self._traceback = parent.traceback\n\n frame = traceback.extract_stack ()[- ignore_stack]\n line = \"%s +%s (%s) : %s\" % frame \n self._message = \" %-20s: %s (%s)\\n%s\" \\\n % (self._stype, msg, line, parent.msg)\n\n else :\n if self._stype != \"NoneType\" :\n # ... but if parent is a native (or any other) exception type,\n # we don't have a traceback really -- so we dig it out of\n # sys.exc_info. \n trace = sys.exc_info ()[2]\n stack = traceback.extract_tb (trace)\n traceback_list = traceback.format_list (stack)\n self._traceback = \"\".join (traceback_list)\n\n # the message composition is very similar -- we just inject the\n # parent exception type inconspicuously somewhere (above that\n # was part of 'parent.message' already).\n frame = traceback.extract_stack ()[- ignore_stack]\n line = \"%s +%s (%s) : %s\" % frame \n self._message = \" %-20s: %s (%s)\\n %-20s: %s\" \\\n % (self._stype, msg, line, self._ptype, parent)\n\n else :\n\n # if we don't have a parent, we are a 1st principle exception,\n # i.e. a reaction to some genuine code error. Thus we extract the\n # traceback from exactly where we are in the code (the last stack\n # frame will be the call to this exception constructor), and we\n # create the original exception message from 'stype' and 'message'.\n stack = traceback.extract_stack ()\n traceback_list = traceback.format_list (stack)\n self._traceback = \"\".join (traceback_list[:-1])\n frame = traceback.extract_stack ()[- ignore_stack -1]\n line = \"%s +%s (%s) : %s\" % frame \n self._message = \"%s (%s)\" % (msg, line)\n\n # we can't do that earlier as _msg was not set up before\n self._messages = [self._message]", "def print_requests_connectionerror(cls, class_name):\n print(\n f\"{cls.ERROR_PREFIX} {cls.REQUESTS_PACKAGE_CONNECTIONERROR_MESSAGE} '{class_name}'.\"\n )", "def from_http_error(cls, e):\n assert isinstance(e, requests.HTTPError), \"Expected 'requests.HTTPError' object\"\n r = e.response\n if r.status_code == 400:\n raise BadRequest(format_exception(e))\n elif r.status_code == 401:\n raise Unauthorized(format_exception(e))\n elif r.status_code == 403:\n raise Forbidden(format_exception(e))\n elif r.status_code == 404:\n raise NotFound(format_exception(e))\n elif r.status_code == 405:\n raise NoMethod(format_exception(e))\n elif r.status_code == 409:\n raise Conflict(format_exception(e))\n elif r.status_code == 411:\n raise LengthRequired(format_exception(e))\n elif r.status_code == 412:\n raise PreconditionFailed(format_exception(e))\n elif r.status_code == 416:\n raise BadRange(format_exception(e))\n elif r.status_code == 500:\n raise InternalServerError(format_exception(e))\n elif r.status_code == 501:\n raise NotImplemented(format_exception(e))\n elif r.status_code == 502:\n raise BadGateway(format_exception(e))\n else:\n logger.error(\n 'Unhandled HTTPError status code {sc} -- {msg}.'.format(sc=r.status_code, msg=format_exception(e)))\n raise InternalServerError(format_exception(e))", "def __init__(self, message=\"\"):\n super(ApplicationError, self).__init__(message)", "def exception(self, *args, **kwargs):\n return super(Blueprint, self).exception(*args, **kwargs)", "def __init__(self, exception):\n self.wrapped_exc = exception\n self.status_int = exception.status_int", "def __init__(self, level, message):\n Exception.__init__(self)\n self.level = level\n self.msg = message", "def get_error(self):\n return self.e", "def __init__(self,value,message):\n ValueError.__init__(self,value,message)", "def introspectionException(self):\n return self._introspectionException", "def raise_connection_error(api_url, headers, timeout, proxies):\n raise requests.exceptions.ConnectionError", "def raise_connection_error(api_url, headers, timeout, proxies):\n raise requests.exceptions.ConnectionError", "def __init__(self, msg=\"\", exc=NotImplementedError):\r\n self.msg = msg\r\n self.exc = exc", "def rollout_exceptions(self):\n return (mujoco_py.builder.MujocoException)", "def exception_class(self, exception):\n\n\t\tcls = type(exception)\n\t\tif cls.__module__ == 'exceptions': # Built-in exception.\n\t\t\treturn cls.__name__\n\t\treturn \"%s.%s\" % (cls.__module__, cls.__name__)", "def exception(self) -> typing.Optional[Exception]:\n return self._exception", "def handle_exception(e):\n maps = {\n exp.ServiceExp: api_exceptions.ServiceException,\n exp.PermissionExp: api_exceptions.ForbiddenException,\n exp.NotFoundExp: api_exceptions.NotFoundException,\n exp.ValueExp: api_exceptions.BadRequestException,\n exp.BadRequestExp: api_exceptions.BadRequestException,\n }\n raise maps[e.__class__](e.message)", "def __init__(self, exception, message=\"Invalid requests parse!\"):\n self.message = message\n self.exception = exception\n super().__init__(self.message)", "async def default_error_handler(ex, state):\n if not isinstance(ex, psycopg2.Error):\n # Unhandled exception, raise it\n logger.exception(ex)\n return ProgrammingError(ex)\n if isinstance(ex, psycopg2.ProgrammingError):\n logger.warning('psycopg2: ProgrammingError not recoverable')\n logger.exception(ex)\n return QueryError(str(ex))\n if isinstance(ex, psycopg2.DataError):\n logger.warning('psycopg2: DataError not recoverable')\n return QueryError(str(ex))\n if isinstance(ex, psycopg2.OperationalError):\n logger.info('psycopg2: OperationalError occured, recovering')\n if isinstance(ex, psycopg2.InterfaceError):\n logger.info('psycopg2: InterfaceError occured, recovering')\n return None", "def clientConnectionFailed(self, connector, reason):\n\n moduleCoordinator.ModuleCoordinator().putError(\"Error connecting to \" + self.config['botnet'], self.module)", "def __init__(self, error_msg):\n super(SdkException, self).__init__()\n self.error_msg = error_msg", "def handle_error(self):\n self.cmd_channel.debug(\"DTPHandler.handle_error()\")\n try:\n raise\n # if error is connection related we provide a detailed\n # information about it\n except socket.error, err:\n if err[0] in errno.errorcode:\n error = err[1]\n else:\n error = \"Unknown connection error\"\n # an error could occur in case we fail reading / writing\n # from / to file (e.g. file system gets full)\n except EnvironmentError, err:\n error = _strerror(err)\n except:\n # some other exception occurred; we don't want to provide\n # confidential error messages to user so we return a\n # generic \"unknown error\" response.\n logerror(traceback.format_exc()) \n error = \"Unknown error\"\n self.cmd_channel.respond(\"426 %s; transfer aborted.\" %error)\n self.close()", "def link_dashi_exceptions(dashi_conn):\n dashi_conn.link_exceptions(\n custom_exception=epu.exceptions.NotFoundError,\n dashi_exception=dashi.exceptions.NotFoundError)\n dashi_conn.link_exceptions(\n custom_exception=epu.exceptions.WriteConflictError,\n dashi_exception=dashi.exceptions.WriteConflictError)\n dashi_conn.link_exceptions(\n custom_exception=epu.exceptions.BadRequestError,\n dashi_exception=dashi.exceptions.BadRequestError)", "def rescue(self, instance):\n pass", "def __init__(self, msg):\n\n super(ConfigError, self).__init__(msg)\n self.msg = msg", "def _connection_failed(self, link_uri, msg):\n print('Connection to %s failed: %s' % (link_uri, msg))", "def __init__(self, msg):\n super(QuitMessageException, self).__init__(msg)", "def _connection_failed(self, link_uri, msg):\n print \"Connection to %s failed: %s\" % (link_uri, msg)", "def __init__(self, message=\"\"):\n super(DataError, self).__init__(message)", "def test_wrong_conn_param(self):\n self.assertRaises(TypeError, lambda: LDAPConnection(\"wrong\"))\n self.assertRaises(TypeError, lambda: LDAPConnection(LDAPClient(), 1))", "def _raise_adal_error(ex):\n if ex.args[0] == \"Unexpected polling state code_expired\":\n raise MsticpyKqlConnectionError(\n \"Authentication request was not completed.\",\n title=\"authentication timed out\",\n )\n\n err_response = getattr(ex, \"error_response\")\n if err_response and \"error_description\" in ex.error_response:\n ex_mssgs = ex.error_response[\"error_description\"].split(\"\\r\\n\")\n else:\n ex_mssgs = [f\"Full error: {ex}\"]\n raise MsticpyKqlConnectionError(\n *ex_mssgs, title=\"could not authenticate to tenant\"\n )", "def handle_exceptions(self, excp):\r\n try:\r\n if excp:\r\n errorstr = \"Exception: {0}\".format(excp.__class__.__name__)\r\n errorstr = errorstr+\"({0})\".format(excp.message) if \\\r\n hasattr(excp, \"message\") else errorstr\r\n LOGGER.info(errorstr)\r\n raise\r\n # ****** RDMC ERRORS ******\r\n except ConfigurationFileError as excp:\r\n self.retcode = ReturnCodes.CONFIGURATION_FILE_ERROR\r\n UI().error(excp)\r\n sys.exit(excp.errcode)\r\n except CommandNotEnabledError as excp:\r\n self.retcode = ReturnCodes.COMMAND_NOT_ENABLED_ERROR\r\n UI().command_not_enabled(excp)\r\n extensions.Commands['HelpCommand'](rdmc=self).run(\"\")\r\n except InvalidCommandLineError as excp:\r\n self.retcode = ReturnCodes.INVALID_COMMAND_LINE_ERROR\r\n UI().invalid_commmand_line(excp)\r\n except NoCurrentSessionEstablished as excp:\r\n self.retcode = ReturnCodes.NO_CURRENT_SESSION_ESTABLISHED\r\n UI().error(excp)\r\n except NoChangesFoundOrMadeError as excp:\r\n self.retcode = ReturnCodes.NO_CHANGES_MADE_OR_FOUND\r\n UI().invalid_commmand_line(excp)\r\n except StandardBlobErrorHandler as excp:\r\n self.retcode = ReturnCodes.GENERAL_ERROR\r\n UI().standard_blob_error(excp)\r\n except InvalidFileInputError as excp:\r\n self.retcode = ReturnCodes.INVALID_FILE_INPUT_ERROR\r\n UI().invalid_commmand_line(excp)\r\n except InvalidCommandLineErrorOPTS as excp:\r\n self.retcode = ReturnCodes.INVALID_COMMAND_LINE_ERROR\r\n except InvalidFileFormattingError as excp:\r\n self.retcode = ReturnCodes.INVALID_FILE_FORMATTING_ERROR\r\n UI().invalid_file_formatting(excp)\r\n except NoContentsFoundForOperationError as excp:\r\n self.retcode = ReturnCodes.NO_CONTENTS_FOUND_FOR_OPERATION\r\n UI().no_contents_found_for_operation(excp)\r\n except InfoMissingEntriesError as excp:\r\n self.retcode = ReturnCodes.NO_VALID_INFO_ERROR\r\n UI().error(excp)\r\n except (InvalidOrNothingChangedSettingsError, redfish.ris.rmc_helper.\\\r\n IncorrectPropValue) as excp:\r\n self.retcode = ReturnCodes.SAME_SETTINGS_ERROR\r\n UI().error(excp)\r\n except NoDifferencesFoundError as excp:\r\n self.retcode = ReturnCodes.NO_CHANGES_MADE_OR_FOUND\r\n UI().no_differences_found(excp)\r\n except MultipleServerConfigError as excp:\r\n self.retcode = ReturnCodes.MULTIPLE_SERVER_CONFIG_FAIL\r\n UI().multiple_server_config_fail(excp)\r\n except InvalidMSCfileInputError as excp:\r\n self.retcode = ReturnCodes.MULTIPLE_SERVER_INPUT_FILE_ERROR\r\n UI().multiple_server_config_input_file(excp)\r\n except FirmwareUpdateError as excp:\r\n self.retcode = ReturnCodes.FIRMWARE_UPDATE_ERROR\r\n UI().error(excp)\r\n except FailureDuringCommitError as excp:\r\n self.retcode = ReturnCodes.FAILURE_DURING_COMMIT_OPERATION\r\n UI().error(excp)\r\n except BootOrderMissingEntriesError as excp:\r\n self.retcode = ReturnCodes.BOOT_ORDER_ENTRY_ERROR\r\n UI().error(excp)\r\n except NicMissingOrConfigurationError as excp:\r\n self.retcode = ReturnCodes.NIC_MISSING_OR_INVALID_ERROR\r\n UI().error(excp)\r\n except (IncompatibleiLOVersionError, redfish.ris.rmc_helper.\\\r\n IncompatibleiLOVersionError) as excp:\r\n self.retcode = ReturnCodes.INCOMPATIBLE_ILO_VERSION_ERROR\r\n UI().printmsg(excp)\r\n except IncompatableServerTypeError as excp:\r\n self.retcode = ReturnCodes.INCOMPATIBLE_SERVER_TYPE\r\n UI().printmsg(excp)\r\n except IloLicenseError as excp:\r\n UI().printmsg(excp)\r\n self.retcode = ReturnCodes.ILO_LICENSE_ERROR\r\n except InvalidCListFileError as excp:\r\n self.retcode = ReturnCodes.INVALID_CLIST_FILE_ERROR\r\n UI().error(excp)\r\n except PartitionMoutingError as excp:\r\n self.retcode = ReturnCodes.UNABLE_TO_MOUNT_BB_ERROR\r\n UI().error(excp)\r\n except TimeOutError as excp:\r\n self.retcode = ReturnCodes.UPDATE_SERVICE_BUSY\r\n UI().error(excp)\r\n except DownloadError as excp:\r\n self.retcode = ReturnCodes.FAILED_TO_DOWNLOAD_COMPONENT\r\n UI().error(excp)\r\n except UploadError as excp:\r\n self.retcode = ReturnCodes.FAILED_TO_UPLOAD_COMPONENT\r\n UI().error(excp)\r\n except BirthcertParseError as excp:\r\n self.retcode = ReturnCodes.BIRTHCERT_PARSE_ERROR\r\n UI().error(excp)\r\n except ResourceExists as excp:\r\n self.retcode = ReturnCodes.RESOURCE_EXISTS_ERROR\r\n UI().error(excp)\r\n except InvalidKeyError as excp:\r\n self.retcode = ReturnCodes.ENCRYPTION_ERROR\r\n UI().error(\"Invalid key has been entered for \" \\\r\n \"encryption/decryption.\")\r\n except UnableToDecodeError as excp:\r\n self.retcode = ReturnCodes.ENCRYPTION_ERROR\r\n UI().error(excp)\r\n except UnabletoFindDriveError as excp:\r\n self.retcode = ReturnCodes.DRIVE_MISSING_ERROR\r\n UI().error(excp)\r\n UI().printmsg(\"Error occurred while reading device labels.\")\r\n except PathUnavailableError as excp:\r\n self.retcode = ReturnCodes.PATH_UNAVAILABLE_ERROR\r\n if excp:\r\n UI().error(excp)\r\n else:\r\n UI().printmsg(\"Requested path is unavailable.\")\r\n except TaskQueueError as excp:\r\n self.retcode = ReturnCodes.TASKQUEUE_ERROR\r\n UI().error(excp)\r\n # ****** CLI ERRORS ******\r\n except cliutils.CommandNotFoundException as excp:\r\n self.retcode = ReturnCodes.UI_CLI_COMMAND_NOT_FOUND_EXCEPTION\r\n UI().command_not_found(excp)\r\n extensions.Commands['HelpCommand'](rdmc=self).run(\"\")\r\n # ****** RMC/RIS ERRORS ******\r\n except redfish.ris.UndefinedClientError:\r\n self.retcode = ReturnCodes.RIS_UNDEFINED_CLIENT_ERROR\r\n UI().error(\"Please login before making a selection\")\r\n except (redfish.ris.InstanceNotFoundError, redfish.ris.\\\r\n RisInstanceNotFoundError) as excp:\r\n self.retcode = ReturnCodes.RIS_INSTANCE_NOT_FOUND_ERROR\r\n UI().printmsg(excp)\r\n except redfish.ris.CurrentlyLoggedInError as excp:\r\n self.retcode = ReturnCodes.RIS_CURRENTLY_LOGGED_IN_ERROR\r\n UI().error(excp)\r\n except redfish.ris.NothingSelectedError as excp:\r\n self.retcode = ReturnCodes.RIS_NOTHING_SELECTED_ERROR\r\n UI().nothing_selected()\r\n except redfish.ris.NothingSelectedFilterError as excp:\r\n self.retcode = ReturnCodes.RIS_NOTHING_SELECTED_FILTER_ERROR\r\n UI().nothing_selected_filter()\r\n except redfish.ris.NothingSelectedSetError as excp:\r\n self.retcode = ReturnCodes.RIS_NOTHING_SELECTED_SET_ERROR\r\n UI().nothing_selected_set()\r\n except redfish.ris.InvalidSelectionError as excp:\r\n self.retcode = ReturnCodes.RIS_INVALID_SELECTION_ERROR\r\n UI().error(excp)\r\n except redfish.ris.rmc_helper.UnableToObtainIloVersionError as excp:\r\n self.retcode = ReturnCodes.INCOMPATIBLE_ILO_VERSION_ERROR\r\n UI().error(excp)\r\n except redfish.ris.IdTokenError as excp:\r\n if excp.message:\r\n UI().printmsg(excp.message)\r\n else:\r\n UI().printmsg(u\"Logged-in account does not have the privilege \"\\\r\n \" required to fulfill the request or a required \"\\\r\n \" token is missing.\"\\\r\n \"\\nEX: biospassword flag if bios password present \"\\\r\n \"or tpmenabled flag if TPM module present.\")\r\n self.retcode = ReturnCodes.RIS_MISSING_ID_TOKEN\r\n except redfish.ris.SessionExpired as excp:\r\n self.retcode = ReturnCodes.RIS_SESSION_EXPIRED\r\n self.app.logout()\r\n UI().printmsg(\"Current session has expired or is invalid, \"\\\r\n \"please login again with proper credentials to continue.\\n\")\r\n except redfish.ris.ValidationError as excp:\r\n self.retcode = ReturnCodes.RIS_VALIDATION_ERROR\r\n except redfish.ris.ValueChangedError as excp:\r\n self.retcode = ReturnCodes.RIS_VALUE_CHANGED_ERROR\r\n except redfish.ris.ris.SchemaValidationError as excp:\r\n UI().printmsg(\"Error found in schema, try running with the \"\\\r\n \"--latestschema flag.\")\r\n self.retcode = ReturnCodes.RIS_SCHEMA_PARSE_ERROR\r\n # ****** RMC/RIS ERRORS ******\r\n except redfish.rest.v1.RetriesExhaustedError as excp:\r\n self.retcode = ReturnCodes.V1_RETRIES_EXHAUSTED_ERROR\r\n UI().retries_exhausted_attemps()\r\n except redfish.rest.v1.InvalidCredentialsError as excp:\r\n self.retcode = ReturnCodes.V1_INVALID_CREDENTIALS_ERROR\r\n UI().invalid_credentials(excp)\r\n except redfish.rest.v1.JsonDecodingError as excp:\r\n self.retcode = ReturnCodes.JSON_DECODE_ERROR\r\n UI().error(excp)\r\n except redfish.rest.v1.ServerDownOrUnreachableError as excp:\r\n self.retcode = \\\r\n ReturnCodes.V1_SERVER_DOWN_OR_UNREACHABLE_ERROR\r\n UI().error(excp)\r\n except redfish.rest.v1.ChifDriverMissingOrNotFound as excp:\r\n self.retcode = ReturnCodes.V1_CHIF_DRIVER_MISSING_ERROR\r\n UI().printmsg(\"Chif driver not found, please check that the \" \\\r\n \"chif driver is installed.\")\r\n except redfish.rest.v1.SecurityStateError as excp:\r\n self.retcode = ReturnCodes.V1_SECURITY_STATE_ERROR\r\n if isinstance(excp.message, int):\r\n UI().printmsg(\"High security mode [%s] has been enabled. \" \\\r\n \"Please provide credentials.\" % excp.message)\r\n else:\r\n UI().error(excp)\r\n except redfish.hpilo.risblobstore2.ChifDllMissingError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_CHIF_DLL_MISSING_ERROR\r\n UI().printmsg(\"iLOrest Chif dll not found, please check that the \"\\\r\n \"chif dll is present.\")\r\n except redfish.hpilo.risblobstore2.UnexpectedResponseError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_UNEXPECTED_RESPONSE_ERROR\r\n UI().printmsg(\"Unexpected data received from iLO.\")\r\n except redfish.hpilo.risblobstore2.HpIloError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_ILO_ERROR\r\n UI().printmsg(\"iLO returned a failed error code.\")\r\n except redfish.hpilo.risblobstore2.Blob2CreateError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_CREATE_BLOB_ERROR\r\n UI().printmsg(\"Blob create operation failed.\")\r\n except redfish.hpilo.risblobstore2.Blob2ReadError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_READ_BLOB_ERROR\r\n UI().printmsg(\"Blob read operation failed.\")\r\n except redfish.hpilo.risblobstore2.Blob2WriteError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_WRITE_BLOB_ERROR\r\n UI().printmsg(\"Blob write operation failed.\")\r\n except redfish.hpilo.risblobstore2.Blob2DeleteError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_BLOB_DELETE_ERROR\r\n UI().printmsg(\"Blob delete operation failed.\")\r\n except redfish.hpilo.risblobstore2.Blob2OverrideError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_BLOB_OVERRIDE_ERROR\r\n UI().error(excp)\r\n UI().printmsg(\"\\nBlob was overwritten by another user. Please \" \\\r\n \"ensure only one user is making changes at a time locally.\")\r\n except redfish.hpilo.risblobstore2.BlobRetriesExhaustedError as excp:\r\n self.retcode = ReturnCodes.REST_BLOB_RETRIES_EXHAUSETED_ERROR\r\n UI().printmsg(\"\\nBlob operation still fails after max retries.\")\r\n except redfish.hpilo.risblobstore2.Blob2FinalizeError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_BLOB_FINALIZE_ERROR\r\n UI().printmsg(\"Blob finalize operation failed.\")\r\n except redfish.hpilo.risblobstore2.BlobNotFoundError as excp:\r\n self.retcode = ReturnCodes.REST_ILOREST_BLOB_NOT_FOUND_ERROR\r\n UI().printmsg(\"Blob not found with key and namespace provided.\")\r\n except redfish.ris.rmc_helper.InvalidPathError as excp:\r\n self.retcode = ReturnCodes.RIS_REF_PATH_NOT_FOUND_ERROR\r\n UI().printmsg(\"Reference path not found.\")\r\n except redfish.ris.rmc_helper.IloResponseError as excp:\r\n self.retcode = ReturnCodes.RIS_ILO_RESPONSE_ERROR\r\n except redfish.ris.rmc_helper.UserNotAdminError as excp:\r\n UI().user_not_admin()\r\n self.retcode = ReturnCodes.USER_NOT_ADMIN\r\n except redfish.hpilo.rishpilo.HpIloInitialError as excp:\r\n UI().error(excp)\r\n self.retcode = ReturnCodes.RIS_ILO_INIT_ERROR\r\n except redfish.hpilo.rishpilo.HpIloWriteError as excp:\r\n UI().error(excp)\r\n self.retcode = ReturnCodes.RESOURCE_ALLOCATION_ISSUES_ERROR\r\n except redfish.hpilo.rishpilo.HpIloReadError as excp:\r\n UI().error(excp)\r\n self.retcode = ReturnCodes.RESOURCE_ALLOCATION_ISSUES_ERROR\r\n # ****** RIS OBJECTS ERRORS ******\r\n except redfish.ris.ris.BiosUnregisteredError as excp:\r\n self.retcode = ReturnCodes.RIS_RIS_BIOS_UNREGISTERED_ERROR\r\n UI().bios_unregistered_error()\r\n # ****** GENERAL ERRORS ******\r\n except SystemExit:\r\n self.retcode = ReturnCodes.GENERAL_ERROR\r\n raise\r\n except Exception as excp:\r\n self.retcode = ReturnCodes.GENERAL_ERROR\r\n sys.stderr.write('ERROR: %s\\n' % excp)\r\n\r\n if self.opts.debug:\r\n traceback.print_exc(file=sys.stderr)", "def serious_error(self, e):\n pass", "def raise_socket_error(timeout=None):\n try:\n raise\n\n except _socket.timeout:\n if timeout is not None:\n raise TimeoutError, \"Timed out after %s seconds\" % timeout, \\\n _sys.exc_info()[2]\n raise TimeoutError, \"Timed out\", _sys.exc_info()[2]\n\n except _socket.gaierror, e:\n # pylint: disable = E1101\n raise AddressError, \"Address Information Error: %s (%s)\" % \\\n (raise_socket_error.EAIS.get(e[0], e[0]), e[1]), \\\n _sys.exc_info()[2]\n\n except _socket.herror, e:\n raise AddressError, \"Host Resolution Error %s: %s\" % \\\n (e[0], e[1]), _sys.exc_info()[2]\n\n except _socket.sslerror, e:\n raise SSLError, \"Socket SSL Error: %s\" % str(e), _sys.exc_info()[2]\n\n except _socket.error, e:\n if len(e.args) == 1:\n raise SocketError, \"Socket Error: %s\" % \\\n (e[0],), _sys.exc_info()[2]\n else:\n raise SocketError, \"Socket Error %s: %s\" % \\\n (_errno.errorcode.get(e[0], e[0]), e[1]), _sys.exc_info()[2]\n\n except IOError, e:\n raise SocketError, \"Socket Error %s: %s\" % \\\n (_errno.errorcode.get(e[0], e[0]), str(e)), \\\n _sys.exc_info()[2]", "def exception(self):\n exc_type, exc_value, exc_tb = sys.exc_info()\n cui.message(traceback.format_exception_only(exc_type, exc_value)[-1],\n log_message=traceback.format_exc())", "def raise_exc(self, exctype):\n\t\t_async_raise(self._get_my_tid(), exctype)", "def handle_connection_lost(self, exc: Optional[Exception]) -> None:", "def _retry_on_connection_error(exc):\n\n if isinstance(exc, db_exception.DBConnectionError):\n LOG.warning(\"Connection error detected. Retrying...\")\n return True\n return False", "def _publish_error(self, exc_info, parent=None):\n exc_type, exception, traceback = exc_info\n\n content = {\n \"ename\": exc_type.__name__,\n \"evalue\": str(exception),\n \"traceback\": format_tb(traceback),\n }\n self.session.send(\n self.iopub_socket,\n \"error\",\n content,\n parent=parent,\n ident=self._topic(\"error\"),\n )", "def exception_handler(self, exception):\n pass", "def sample_exception_function(self, a, b):\r\n raise Exception(\"An error has occurred.\")" ]
[ "0.6604351", "0.6386638", "0.6341203", "0.6303432", "0.62695044", "0.62493557", "0.6177012", "0.6173146", "0.6118006", "0.60822475", "0.60723776", "0.60721236", "0.6066645", "0.60590744", "0.60276735", "0.60103536", "0.59800124", "0.5904314", "0.5867853", "0.58674383", "0.58631456", "0.5806314", "0.5789059", "0.57713217", "0.57563686", "0.57542354", "0.57522684", "0.5751538", "0.5709271", "0.5696378", "0.56951046", "0.56901956", "0.56867135", "0.56501216", "0.564076", "0.56247634", "0.5590876", "0.55908155", "0.55589616", "0.5554841", "0.55344665", "0.5523844", "0.55073625", "0.55055887", "0.5504874", "0.55034584", "0.54994905", "0.5470767", "0.5470109", "0.54691476", "0.5468793", "0.54327804", "0.54327005", "0.5410177", "0.54096276", "0.540918", "0.54072356", "0.5400863", "0.5399425", "0.539614", "0.53900546", "0.53752786", "0.5371952", "0.53566104", "0.5355035", "0.53495705", "0.5349568", "0.5344034", "0.53366804", "0.5334935", "0.5334935", "0.5331735", "0.53309727", "0.5323751", "0.53212243", "0.53146887", "0.5307915", "0.5307484", "0.5292025", "0.5290984", "0.5288986", "0.5286172", "0.5275244", "0.5274636", "0.5273499", "0.5243176", "0.5233937", "0.52334774", "0.52299", "0.5226787", "0.5226667", "0.522664", "0.5219611", "0.52149117", "0.52146727", "0.52113885", "0.51991165", "0.5193207", "0.5188044", "0.51872605" ]
0.7421417
0
The base exception class of service response exceptions.
def __init__(self, status_code, sdk_error): super(ServiceResponseException, self).__init__(sdk_error.error_msg) self.status_code = status_code self.error_code = sdk_error.error_code self.request_id = sdk_error.request_id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generic_service_exception(*args):\n exception_tuple = LambdaErrorResponses.ServiceException\n\n return BaseLocalService.service_response(\n LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.SERVICE_ERROR, \"ServiceException\"),\n LambdaErrorResponses._construct_headers(exception_tuple[0]),\n exception_tuple[1],\n )", "def exceptions(e):\n # NOTE: add log entry\n str(getattr(e, \"code\", \"unavailable\"))\n log_error_code = str(getattr(e, \"code\", \"unavailable\"))\n service_log.error(\n f\"{request.remote_addr} {request.method} {request.scheme} {request.full_path}\\n\"\n f\"Error code: {log_error_code}\\n\"\n f\"Stack trace: {traceback.format_exc()}\"\n )\n\n # NOTE: craft user messages\n if hasattr(e, \"code\"):\n code = int(e.code)\n\n # NOTE: return an http error for methods with no body allowed. This prevents undesired exceptions.\n NO_PAYLOAD_METHODS = \"HEAD\"\n if request.method in NO_PAYLOAD_METHODS:\n return Response(status=code)\n\n error: ServiceError\n if code == 400:\n error = ProgramHttpRequestError(e)\n elif code == 404:\n error = ProgramHttpMissingError(e)\n elif code == 405:\n error = ProgramHttpMethodError(e)\n elif code == 408:\n error = ProgramHttpTimeoutError(e)\n else:\n error = ProgramHttpServerError(e, code)\n\n return error_response(error)\n\n # NOTE: Werkzeug exceptions should be covered above, the following line is for\n # unexpected HTTP server errors.\n return error_response(e)", "def __init__(self, code, reason):\n super(RequestError, self).__init__(code, reason)", "def handle_exception(e):\n maps = {\n exp.ServiceExp: api_exceptions.ServiceException,\n exp.PermissionExp: api_exceptions.ForbiddenException,\n exp.NotFoundExp: api_exceptions.NotFoundException,\n exp.ValueExp: api_exceptions.BadRequestException,\n exp.BadRequestExp: api_exceptions.BadRequestException,\n }\n raise maps[e.__class__](e.message)", "def format_exception(self):\n if isinstance(self.message, dict):\n return self.message, self.status_code\n return Request.format_exception(self.message, self.status_code)", "def from_http_error(cls, e):\n assert isinstance(e, requests.HTTPError), \"Expected 'requests.HTTPError' object\"\n r = e.response\n if r.status_code == 400:\n raise BadRequest(format_exception(e))\n elif r.status_code == 401:\n raise Unauthorized(format_exception(e))\n elif r.status_code == 403:\n raise Forbidden(format_exception(e))\n elif r.status_code == 404:\n raise NotFound(format_exception(e))\n elif r.status_code == 405:\n raise NoMethod(format_exception(e))\n elif r.status_code == 409:\n raise Conflict(format_exception(e))\n elif r.status_code == 411:\n raise LengthRequired(format_exception(e))\n elif r.status_code == 412:\n raise PreconditionFailed(format_exception(e))\n elif r.status_code == 416:\n raise BadRange(format_exception(e))\n elif r.status_code == 500:\n raise InternalServerError(format_exception(e))\n elif r.status_code == 501:\n raise NotImplemented(format_exception(e))\n elif r.status_code == 502:\n raise BadGateway(format_exception(e))\n else:\n logger.error(\n 'Unhandled HTTPError status code {sc} -- {msg}.'.format(sc=r.status_code, msg=format_exception(e)))\n raise InternalServerError(format_exception(e))", "def handle_exception(self, e):\n if isinstance(e, exceptions.APIException):\n return e.get_response(self.request)\n else:\n exc = exceptions.OtherException(self.request)\n return exc.get_response(self.request)", "def raise_error(self, err_code, response):\n clsname = str(self.__class__).split('.')[-1].split(\"'\")[0]\n raise ERROR_CODES[err_code](\n 'Response Type: \"%s\"\\tResponse: %s' % (\n clsname, response))", "def exception(self) -> exceptions.ErrorMessageException:\n\n return ErrorMessage.ERROR_CODES_TO_EXCEPTIONS.get(\n self.error_code,\n exceptions.GenericException\n )", "def WrappedException(self) -> object:", "def __init__(self, content, status):\n Exception.__init__(self)\n self.status = status\n self.content = content", "def exceptionType(self):\n return ExceptionType.GeneralException", "def cancelled_exception_class(cls) -> type[BaseException]:", "def exception(self) -> Exception:\n return self._exception", "def __init__(self, error, status_code, *args, **kwargs):\n super(BusinessException, self).__init__(*args, **kwargs)\n self.error = error\n self.status_code = status_code", "def exception(self, *args, **kwargs):", "def _raise_http_error(self, *args, **kwargs):", "def _ExceptionResponse(args_dict=None):\n if args_dict is None:\n args_dict = {}\n args_dict[\"code\"] = \"Exception\"\n return CGateway._DumpResponse(args_dict)", "def unexpected_error(self, exception):", "def exception(self, e):\n pass", "def exception(self) -> str:\n return pulumi.get(self, \"exception\")", "def raise_best_exception(self, json_response):\n exceptions = {\n 206: CannotParseError,\n 400: BadRequestError,\n 401: NotAuthorizedError,\n 403: ForbiddenError,\n 404: NotFoundError,\n 500: ServerError,\n 503: UnavailableError,\n }\n try:\n err = json_response['response']['error']\n raise exceptions[err['code']](err['code'],err['message'])\n except IndexError:\n raise UnexpectedError('','Unexpected error.')", "def __init__(self, error_msg):\n super(SdkException, self).__init__()\n self.error_msg = error_msg", "def __init__(self, exception, message=\"Invalid requests parse!\"):\n self.message = message\n self.exception = exception\n super().__init__(self.message)", "def exception(self):\n return self._exception", "def create_exception(self, msg: str):", "def test_exception_class_hierarchy(self) -> None:\n\n try:\n raise CustomDerivedError(state=\"test\")\n except CustomDerivedError as cex:\n assert type(cex) is CustomDerivedError\n assert \"test\" == cex.state\n except CustomError as cex:\n assert False, \"CustomDerivedError should have caught the exception.\"\n except:\n assert False, f\"Unhandled exception: {sys.exc_info()[0]}\"", "def exception_handler(result, name=\"\"):\n try:\n response_content = result.json()\n # pylint: disable=broad-except\n except Exception:\n response_content = result.text\n\n exc_map = {\n 300: SFDC_MoreThanOneRecord,\n 400: SFDC_MalformedRequest,\n 401: SFDC_ExpiredSession,\n 403: SFDC_RefusedRequest,\n 404: SFDC_ResourceNotFound,\n }\n exc_cls = exc_map.get(result.status_code, SFDC_GeneralError)\n\n raise exc_cls(result.url, result.status_code, name, response_content)", "def raise_exception(self, code, rebrandly_response):\n if code == 200:\n return {\n 'status': 'ok',\n 'code': 200,\n 'response': rebrandly_response\n }\n # Everything went well, continue.\n elif code == 400:\n raise exc.BadRequestError(rebrandly_response.code, rebrandly_response.message)\n elif code == 401:\n raise exc.NotAuthorizedError(rebrandly_response.code, rebrandly_response.message)\n elif code == 403:\n if rebrandly_response.code == 'AlreadyExists':\n raise exc.AlreadyExistsError(rebrandly_response.code, rebrandly_response.message)\n else:\n raise exc.InvalidFormatError(rebrandly_response.code, rebrandly_response.message)\n if code == 404:\n raise exc.NotFoundError(rebrandly_response.code, rebrandly_response.message)\n if code == 500:\n raise exc.InternalServerError(rebrandly_response.code, rebrandly_response.message)\n if code == 502:\n raise exc.BadGatewayError(rebrandly_response.code, rebrandly_response.message)\n if code == 503:\n raise exc.APIUnavailableError(rebrandly_response.code, rebrandly_response.message)\n if code == 504:\n raise exc.APITimeoutError(rebrandly_response.code, rebrandly_response.message)", "def __init__(self, exception):\n self.wrapped_exc = exception\n self.status_int = exception.status_int", "def exception_handler(res):\n try:\n res_data = res.json()\n error_code = res_data['status']\n error_msg = build_error_msg(res_data['errors'])\n exception = DemistoException(ERROR_TITLES.get(error_code, '') + error_msg)\n\n except Exception:\n exception = DemistoException(f'Error in API call [{res.status_code}] - {res.reason}')\n\n raise exception", "def _rest_error(self, status_code, error_code, message):\n return {\"status_code\": status_code, \"error_code\": error_code, \"message\": message}", "def raise_for_response(self, response):\n try:\n code = response.errors[0][0]._code\n\n if code == 'invalidRecipient':\n raise InvalidRecipientException()\n elif code == 'recipientBlocked':\n raise RecipientBlockedException()\n elif code == 'emptyMessageContent':\n raise EmptyMessageContentException()\n elif code == 'other':\n raise OtherMMSOAPException()\n else:\n pass\n\n except AttributeError:\n pass", "def custom_exception_handler(exc, context):\n\n if isinstance(exc, exceptions.APIException):\n headers = {}\n if getattr(exc, 'auth_header', None):\n headers['WWW-Authenticate'] = exc.auth_header\n if getattr(exc, 'wait', None):\n headers['Retry-After'] = '%d' % exc.wait\n\n if isinstance(exc.detail, (list, dict)):\n # Use the manually set message if it exists.\n if hasattr(exc, \"message\"):\n message = exc.message or ''\n # Otherwise construct the message from the details.\n else:\n message = ''\n for key in exc.detail:\n try:\n if isinstance(exc.detail[key], str):\n message += exc.detail[key] + ' '\n else:\n for error in exc.detail[key]:\n # Exclude duplicates.\n if error not in message:\n message += error + ' '\n except TypeError:\n if key == 'non_field_errors':\n message = exc.detail[key][0]\n else:\n message = _('Invalid request.')\n\n # Remove trailing whitespace.\n if message.endswith(' '):\n message = message[:-1]\n\n data = OrderedDict([\n ('status', 'error'), ('message', message), ('data', exc.detail)\n ])\n else:\n data = OrderedDict([('status', 'error'), ('message', exc.detail)])\n\n set_rollback()\n return Response(data, status=exc.status_code, headers=headers)\n\n elif isinstance(exc, Http404):\n msg = _('Not found.')\n data = {'status': 'error', 'message': msg}\n\n set_rollback()\n return Response(data, status=status.HTTP_404_NOT_FOUND)\n\n elif isinstance(exc, PermissionDenied):\n msg = _('Permission denied.')\n data = {'status': 'error', 'message': msg}\n\n set_rollback()\n return Response(data, status=status.HTTP_403_FORBIDDEN)\n\n elif isinstance(exc, DjangoBaseException):\n data = {'status': 'error', 'message': exc.default_detail}\n\n set_rollback()\n return Response(data, status=exc.status_code)\n\n # If debug is false return a formatted error and raise an internal error.\n if not settings.DEBUG:\n logger.exception(exc)\n exc = DjangoBaseException()\n return Response(\n {'status': 'error', 'message': exc.default_detail},\n status=exc.status_code\n )\n\n # Note: Unhandled exceptions will raise a 500 error.\n return None", "def _get_exception(self):\r\n \r\n return self._exception", "def response_class(self):\n raise NotImplementedError()", "def exceptionhandler(e):\n response = e.get_response()\n response.data = json.dumps({\n \"code\" : e.code,\n \"name\": e.name,\n \"description\": e.description\n })\n response.content_type = \"application/json\"\n\n return response", "def http_exception(error):\n data = {'error': str(error)}\n return app.response_class(\n response=json.dumps(data),\n status=error.code,\n mimetype='application/json'\n )", "def except__else(self, exception: BaseException) -> typing.Any:\n raise exception", "def handle_demisto_exception(e):\n if 'Proxy Error' in str(e):\n raise ConnectionError(MESSAGES['PROXY_ERROR'])\n elif 'ReadTimeoutError' in str(e):\n raise ConnectionError(MESSAGES['REQUEST_TIMEOUT'])\n elif 'ConnectionError' in str(e) or 'ConnectTimeoutError' in str(e):\n raise ConnectionError(MESSAGES['CONNECTION_ERROR'])\n elif 'SSLError' in str(e):\n raise SSLError(MESSAGES['SSL_CERT_ERROR'])\n else:\n raise e", "def _wrap_exceptions(self):\n try:\n yield\n except OSError as err:\n if is_permission_err(err):\n raise AccessDenied(\n pid=None, name=self._name,\n msg=\"service %r is not querable (not enough privileges)\" %\n self._name)\n elif err.winerror in (cext.ERROR_INVALID_NAME,\n cext.ERROR_SERVICE_DOES_NOT_EXIST):\n raise NoSuchProcess(\n pid=None, name=self._name,\n msg=\"service %r does not exist)\" % self._name)\n else:\n raise", "def unexpectedException(self):", "def error_handling(\n self,\n tapi_exception,\n error_message,\n repeat_number,\n response,\n request_kwargs,\n api_params,\n **kwargs\n ):\n raise tapi_exception", "def __init__(self, error_msg):\n super(ConnectionException, self).__init__(error_msg)", "def _handle_api_error(self, error):\n status_code = error.response.status_code\n message = error.message\n\n if 403 == status_code:\n raise NewRelicInvalidApiKeyException(message)\n elif 404 == status_code:\n raise NewRelicUnknownApplicationException(message)\n elif 422 == status_code:\n raise NewRelicInvalidParameterException(message)\n else:\n raise NewRelicApiException(message)", "def __init__(self, context=None, status_code=400):\n\n AppExceptionCase.__init__(self, status_code, context)", "def exception(self, *args, **kwargs):\n return super(Blueprint, self).exception(*args, **kwargs)", "def make_json_error(ex):\n if isinstance(ex, HTTPException):\n return ex;\n elif isinstance(ex, ResourceException):\n info = ex.to_dict()\n status_code = ex.http_status\n info[\"type\"] = \"exception\"\n else:\n message = \"There was an internal server error. Please try again later.\"\n info = {\"code\": \"internal_server_error\", \"message\": message, \"type\": \"exception\"}\n status_code = 500\n # generally we should log these 500 errors with the stacktrace somewhere -- we used splunk at Box.\n\n response = jsonify(**info)\n response.status_code = status_code\n return response", "def handle_exception(self,exc):\n logger.error(f\"Exception in request: {traceback.format_exc()}\")\n status_obj = status.HTTP_400_BAD_REQUEST\n if type(exc) is response.Http404:\n status_obj = status.HTTP_404_NOT_FOUND\n return Response(\n MediaUtil.generate_error_image(\n status_obj,\n str(exc),\n self.request.accepted_renderer.format),\n status=status_obj)", "def __init__(self, message, full_response={}):\r\n super(ThunderdomeQueryError, self).__init__(message)\r\n self._full_response = full_response", "def handle_exception(e):\r\n # start with the correct headers and status code from the error\r\n response = e.get_response()\r\n # replace the body with JSON\r\n response.data = json.dumps({\r\n \"code\": e.code,\r\n \"name\": e.name,\r\n \"description\": e.description,\r\n })\r\n response.content_type = \"application/json\"\r\n return response", "def generic_exception_handler( exc, context ):\n # Call REST framework's default exception handler first,\n # to get the standard error response.\n response = exception_handler( exc, context )\n\n if isinstance( exc, Http_error ):\n response = Response( exc.context, status=exc.status_code )\n set_rollback()\n\n return response", "def exception_handler(exc):\n if isinstance(exc, exceptions.APIException):\n headers = {}\n if getattr(exc, 'auth_header', None):\n headers['WWW-Authenticate'] = exc.auth_header\n if getattr(exc, 'wait', None):\n headers['X-Throttle-Wait-Seconds'] = '%d' % exc.wait\n\n return Response({'error_code': CustomSerializer.get_api_code(exc.detail),\n 'error_message': exc.detail,\n 'errors': []},\n status=exc.status_code,\n headers=headers)\n\n elif isinstance(exc, Http404):\n return Response({'error_code': CustomSerializer.get_api_code('Not found'),\n 'error_message': 'Not found',\n 'errors': []},\n status=status.HTTP_404_NOT_FOUND)\n\n elif isinstance(exc, PermissionDenied):\n return Response({'error_code': CustomSerializer.get_api_code('You do not have permission to perform this action.'),\n 'error_message': 'You do not have permission to perform this action.',\n 'errors': []},\n status=status.HTTP_403_FORBIDDEN)\n\n # Note: Unhandled exceptions will raise a 500 error.\n return None", "def exceptions(e):\n ts = strftime('[%Y-%b-%d %H:%M]')\n tb = format_exc()\n app.logger.error('%s %s %s %s %s 5xx INTERNAL SERVER ERROR\\n%s',\n ts,\n request.remote_addr,\n request.method,\n request.scheme,\n request.full_path,\n tb)\n return jsonify(message=\"Internal Server Error\"), 500", "def raise_on_error(self):\n if not self._status.success:\n cls = UrlApi.InfraHTTPError if self._infra_step else UrlApi.HTTPError\n raise cls('HTTP status (%d)' % (self.status_code,), self)", "def error(self, http_error):\n return HTTPResponse(str(http_error), status=http_error.status)", "def throw(self):\n pass", "def from_d(d):\n return SMRTServiceBaseError(\n d['httpCode'], d['errorType'], d['message'])", "def __init__(self, message=\"\"):\n super(ServerError, self).__init__(message)", "def testResponseException(self):\n self.rpc_mapper1.build_request(\n self.handler, Request1).AndReturn(self.request)\n\n self.rpc_mapper1.build_response(\n self.handler, mox.IsA(Response1)).AndRaise(\n service_handlers.ResponseError)\n\n self.ExpectRpcError(self.rpc_mapper1,\n remote.RpcState.SERVER_ERROR,\n 'Internal Server Error')\n\n self.mox.ReplayAll()\n\n self.handler.handle('POST', '/my_service', 'method1')\n\n self.VerifyResponse('500', 'Internal Server Error', '')\n\n self.mox.VerifyAll()", "def handle_api_error(self, response):\n code = response.status_code\n self.__log(f'Handling API error with status code {code}.', 'error')\n if code == 401:\n self.__log(f'Invalid credentials. Please make sure your token is correct.', 'error')\n raise InvalidCredentialsError\n if code == 404:\n self.__log(f'File not found on query. Make sure query URL is correct and retry.', 'error')\n raise FileNotFoundError\n if code == 422:\n content = json.loads(response.content)\n for error in content['errors']:\n self.__log(f'API could not process the request. Message: {error[\"message\"]}.', 'error')\n raise UnprocessableRequestError(f'Issue with field {error[\"field\"]}: {error[\"message\"]}')\n if code == 429:\n self.__log(f'Monthly request limits exceeded. Upgrade billing or change token.', 'error')\n raise MonthlyRequestLimitExceededError\n self.__log(f'Response for code: \"{code}\" was unhandled by wrapper. Sorry to not be more helpful.', 'error')\n raise UnknownApiError(\"An unhandled API exception occurred\")", "def custom_exception_handler(exc, context):\n response = exception_handler(exc, context)\n\n return Response(\n str(exc),\n status=response.status_code if response is not None else HTTP_500_INTERNAL_SERVER_ERROR,\n )", "def handling_unknown_err(e):\n app.logger.exception(e)\n return resp_json(BaseResp.err(e.name))", "def __init__(self, message=\"\"):\n super(ApplicationError, self).__init__(message)", "def __init__(self, exception):\n self.wrapped_exc = exception\n for key, value in self.wrapped_exc.headers.items():\n self.wrapped_exc.headers[key] = str(value)\n self.status_int = exception.status_int", "def error(self):\n raise NotImplementedError(\"subclasses need to override this method\")", "def __init__(self, context=\"Resource not found\"):\n status_code = 404\n AppExceptionCase.__init__(self, status_code, context)", "def exception_handler_v20(status_code, error_content):\r\n error_dict = None\r\n if isinstance(error_content, dict):\r\n error_dict = error_content.get('NeutronError')\r\n # Find real error type\r\n bad_neutron_error_flag = False\r\n if error_dict:\r\n # If Neutron key is found, it will definitely contain\r\n # a 'message' and 'type' keys?\r\n try:\r\n error_type = error_dict['type']\r\n error_message = error_dict['message']\r\n if error_dict['detail']:\r\n error_message += \"\\n\" + error_dict['detail']\r\n except Exception:\r\n bad_neutron_error_flag = True\r\n if not bad_neutron_error_flag:\r\n # If corresponding exception is defined, use it.\r\n client_exc = getattr(exceptions, '%sClient' % error_type, None)\r\n # Otherwise look up per status-code client exception\r\n if not client_exc:\r\n client_exc = exceptions.HTTP_EXCEPTION_MAP.get(status_code)\r\n if client_exc:\r\n raise client_exc(message=error_message,\r\n status_code=status_code)\r\n else:\r\n raise exceptions.NeutronClientException(\r\n status_code=status_code, message=error_message)\r\n else:\r\n raise exceptions.NeutronClientException(status_code=status_code,\r\n message=error_dict)\r\n else:\r\n message = None\r\n if isinstance(error_content, dict):\r\n message = error_content.get('message')\r\n if message:\r\n raise exceptions.NeutronClientException(status_code=status_code,\r\n message=message)\r\n\r\n # If we end up here the exception was not a neutron error\r\n msg = \"%s-%s\" % (status_code, error_content)\r\n raise exceptions.NeutronClientException(status_code=status_code,\r\n message=msg)", "def _get_failure_from_exception(\n e: BaseException) -> TransactionResult.Failure:\n\n try:\n if isinstance(e, IconServiceBaseException):\n if e.code == ExceptionCode.SCORE_ERROR or isinstance(e, ScoreErrorException):\n Logger.warning(e.message, ICON_SERVICE_LOG_TAG)\n else:\n Logger.exception(e.message, ICON_SERVICE_LOG_TAG)\n\n code = int(e.code)\n message = str(e.message)\n else:\n Logger.exception(e, ICON_SERVICE_LOG_TAG)\n Logger.error(e, ICON_SERVICE_LOG_TAG)\n\n code: int = ExceptionCode.SERVER_ERROR.value\n message = str(e)\n except:\n code: int = ExceptionCode.SERVER_ERROR.value\n message = 'Invalid exception: code or message is invalid'\n\n return TransactionResult.Failure(code, message)", "def odata_error(self, request, environ, start_response, sub_code,\n message='', code=400):\n response_headers = []\n e = core.Error(None)\n e.add_child(core.Code).set_value(sub_code)\n e.add_child(core.Message).set_value(message)\n response_type = self.content_negotiation(\n request, environ, self.ErrorTypes)\n if response_type is None:\n # this is an error response, default to text/plain anyway\n response_type = params.MediaType.from_str(\n 'text/plain; charset=utf-8')\n elif response_type == \"application/atom+xml\":\n # even if you didn't ask for it, you get application/xml in this\n # case\n response_type = \"application/xml\"\n if response_type == \"application/json\":\n data = str(''.join(e.generate_std_error_json()))\n else:\n data = str(e)\n data = data.encode('utf-8')\n response_headers.append((\"Content-Type\", str(response_type)))\n response_headers.append((\"Content-Length\", str(len(data))))\n start_response(\"%i %s\" % (code, sub_code), response_headers)\n return [data]", "def __init__(self, message):\n ModelException.__init__(self, message)", "def failure_exception(cls, state, exception):\r\n return PlatformMessage(method=\"__reply__\", kwargs={\"__result__\": \"fail\", \"state\": state, \"errcode\": -2,\r\n \"e\": exception})", "def exception_class(self, exception):\n\n\t\tcls = type(exception)\n\t\tif cls.__module__ == 'exceptions': # Built-in exception.\n\t\t\treturn cls.__name__\n\t\treturn \"%s.%s\" % (cls.__module__, cls.__name__)", "def process_exception(self, request, exception):\n logging.error(\"ERROR\")\n logging.error(traceback.format_exc())\n response = set_response(\"Internal server error\", False, 500, {})\n return JsonResponse(response, status=response[\"http_code\"])", "def get_exception():\n raise Exception(\"example\")", "def jsonify_exception(error: HTTPException) -> Response:\n exc_resp = error.get_response()\n response: Response = jsonify(reason=error.description)\n response.status_code = exc_resp.status_code\n return response", "def exception_handler(exc, context):\n if isinstance(exc, NotFoundException):\n exc = exceptions.NotFound()\n elif isinstance(exc, UnauthorizedException):\n exc = exceptions.PermissionDenied()\n elif isinstance(exc, exceptions.NotAuthenticated):\n exc = NotAuthenticated()\n\n if isinstance(exc, exceptions.APIException):\n headers = {}\n if getattr(exc, 'auth_header', None):\n headers['WWW-Authenticate'] = exc.auth_header\n if getattr(exc, 'wait', None):\n headers['Retry-After'] = '%d' % exc.wait\n\n if isinstance(exc.detail, (list, dict)):\n data = exc.detail\n else:\n data = {'detail': exc.detail}\n\n set_rollback()\n return Response(data, status=exc.status_code, headers=headers)\n\n return None", "def status_error_to_exception(message: dict) -> None:\n\n if message[\"status\"] == \"ok\" or \"reason\" in message:\n return None # This message does not contain an error\n\n reason = message[\"reason\"]\n\n if reason == ErrorReason.RATE_LIMIT:\n raise WebsocketRateLimitError(reason)\n\n elif reason == ErrorReason.MESSAGE_TOO_LARGE:\n raise MessageSizeError(reason)\n\n elif ErrorReason.API_KEY_MISSING.value in reason:\n raise MissingAPIKeyError(reason)\n\n elif reason == ErrorReason.API_VERSION:\n raise InvalidAPIVersionError(reason)\n\n elif ErrorReason.EVENT_RATE_LIMIT in reason:\n raise EventRateLimitError(reason)\n\n elif ErrorReason.SIMULATED_RATE_LIMIT in reason:\n raise SimulatedEventRateLimitError(reason)\n\n elif ErrorReason.API_KEY_INVALID.value in reason:\n raise InvalidAPIKeyError(reason)\n else:\n raise SDKError(reason)", "def handle_exception(e):\r\n # start with the correct headers and status code from the error\r\n response = e.get_response()\r\n # replace the body with JSON\r\n response.data = json.dumps({\r\n \"code\": e.code,\r\n \"name\": e.name,\r\n \"description\": e.description,\r\n })\r\n response.content_type = \"application/json\"\r\n return response", "def solid_exception(self) -> Optional[BaseException]:\n return self.op_exception", "def exception_handler(exc, context):\n headers = None\n if isinstance(exc, APIException):\n headers = {}\n if getattr(exc, 'auth_header', None):\n headers['WWW-Authenticate'] = exc.auth_header\n if getattr(exc, 'wait', None):\n headers['Retry-After'] = '%d' % exc.wait\n\n data = exc.detail\n if type(data) is ErrorDetail:\n data = str(data)\n status_code = exc.status_code\n set_rollback()\n\n elif isinstance(exc, Http404):\n data = \"Not Found\"\n status_code = status.HTTP_404_NOT_FOUND\n set_rollback()\n\n else:\n data = str(exc)\n status_code = status.HTTP_500_INTERNAL_SERVER_ERROR\n\n return smart_response(data, status_code=status_code, headers=headers)", "def error(self, e):\n return \"{}: {} ({})\".format(e.__class__.__name__, e.__doc__, e.message)", "def what(self):\n return _libSALOME_LifeCycleCORBA.SALOME_Exception_what(self)", "def __init__(self, from_email):\n self.code = 400\n self.from_email = from_email\n Error.__init__(self)", "def introspectionException(self):\n return self._introspectionException", "def exception(self) -> typing.Optional[Exception]:\n return self._exception", "def error_handler(response, **kwargs):\n if 400 <= response.status_code <= 499:\n message = response.json()['error_description'] \\\n if 'error_description' in response.json() \\\n else response.json()['error_detail']\n raise ClientError(response, message)\n\n elif 500 <= response.status_code <= 599:\n raise ServerError(response)\n\n return response", "def handle_exception(e):\n # start with the correct headers and status code from the error\n response = e.get_response()\n # replace the body with JSON\n response.data = json.dumps({\n \"code\": e.code,\n \"name\": e.name,\n \"description\": e.description,\n })\n response.content_type = \"application/json\"\n return response", "def gateway_error_response(self, exc):\n if hasattr(exc, \"get_stacks\"):\n # Process potentially multiple stacks.\n full_error, exc_stacks = \"\", exc.get_stacks()\n for i in range(len(exc_stacks)):\n full_error += exc_stacks[i][0] + \"\\n\"\n if i == 0:\n full_error += \"\".join(traceback.format_exception(*sys.exc_info()))\n else:\n entry = ApplicationException.format_stack(exc_stacks[i][1])\n full_error += entry + \"\\n\"\n\n exec_name = exc.__class__.__name__\n else:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n exec_name = exc_type.__name__\n full_error = \"\".join(traceback.format_exception(*sys.exc_info()))\n\n status_code = getattr(exc, \"status_code\", 400)\n if self.log_errors:\n if self.develop_mode:\n if status_code == 401:\n log.warn(\"%s: %s\", exec_name, exc)\n else:\n log.error(full_error)\n else:\n if status_code == 401:\n log.info(\"%s: %s\", exec_name, exc)\n else:\n log.info(full_error)\n\n result = {\n GATEWAY_ERROR_EXCEPTION: exec_name,\n GATEWAY_ERROR_MESSAGE: str(exc.message),\n GATEWAY_ERROR_EXCID: getattr(exc, \"exc_id\", \"\") or \"\"\n }\n if self.develop_mode:\n result[GATEWAY_ERROR_TRACE] = full_error\n\n if RETURN_MIMETYPE_PARAM in request.args:\n return_mimetype = str(request.args[RETURN_MIMETYPE_PARAM])\n return self.response_class(result, mimetype=return_mimetype)\n\n self._log_request_error(result, status_code)\n\n resp = self.json_response({GATEWAY_ERROR: result, GATEWAY_STATUS: status_code})\n # Q: Should HTTP status be the error code of the exception?\n resp.status_code = status_code\n return resp", "def custom_exception_handler(exc, context):\n response = exception_handler(exc, context)\n if isinstance(exc, Http404):\n response.data = {\n 'message': 'No data available' # custom exception message\n }\n return response\n try:\n print(\"Exception\", exc.get_codes())\n if 'email' in exc.get_codes() and 'unique' in exc.get_codes()['email']:\n response.data = {\n 'message': 'This email already exists.' # custom exception message\n }\n return response\n if 'mobile_number' in exc.get_codes() and 'unique' in exc.get_codes()['mobile_number']:\n response.data = {\n 'message': 'This mobile number already exists.' # custom exception message\n }\n return response\n if 'dev_id' in exc.get_codes() and 'unique' in exc.get_codes()['dev_id']:\n response.data = {\n 'message': 'This device already registered with other account.' # custom exception message\n }\n return response\n return response\n except:\n return response", "def handle_error_response(resp):\n error_message = ''\n error_message_with_reason = ''\n try:\n error_message = (\n resp.json()\n .get('fireeyeapis', {})\n .get('description', '')\n .strip()\n )\n error_message = error_message.replace('\\n', '')\n if error_message:\n error_message_with_reason = f'Reason: {error_message}'\n except ValueError: # ignoring json parsing errors\n pass\n if resp.headers.get('Content-Type', '') == CONTENT_TYPE_ZIP:\n error_message = error_message_with_reason = resp.text\n\n status_code_messages = {\n 400: f\"{MESSAGES['BAD_REQUEST_ERROR']} {error_message_with_reason}\",\n 401: MESSAGES['AUTHENTICATION_ERROR'],\n 403: error_message,\n 404: error_message,\n 406: error_message,\n 407: MESSAGES['PROXY_ERROR'],\n 500: MESSAGES['INTERNAL_SERVER_ERROR'],\n 503: MESSAGES['INTERNAL_SERVER_ERROR'],\n }\n\n if resp.status_code in status_code_messages:\n demisto.debug(\n f'Response Code: {resp.status_code}, Reason: {status_code_messages[resp.status_code]}'\n )\n raise DemistoException(status_code_messages[resp.status_code])\n else:\n raise DemistoException(resp.raise_for_status())", "def _process_error(self, result):\n self.error = result\n if result['errorCode'] == 901:\n raise Exceptions.APIKeyInvalid\n elif result['errorCode'] == 902:\n raise Exceptions.APISecretInvalid\n elif result['errorCode'] == 903:\n raise Exceptions.InvalidRequestToken\n elif result['errorCode'] == 904:\n raise Exceptions.RequestTokenExpired\n elif result['errorCode'] == 905:\n raise Exceptions.InvalidAccessToken\n elif result['errorCode'] == 906:\n raise Exceptions.TokenExpired(self.access.expire)\n elif result['errorCode'] == 907:\n raise Exceptions.ParameterMissing\n elif result['errorCode'] == 908:\n raise Exceptions.ParameterNotFormatted\n elif result['errorCode'] == 909:\n raise Exceptions.FeatureNotSupported\n elif result['errorCode'] == 910:\n raise Exceptions.EndPointNotSupported\n else:\n raise Exceptions.UnknownJsonError(result)", "def Error(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def _handle_response(self, response):\n if response.status_code >= 500:\n raise ServerError(response.content, response.status_code)\n elif response.status_code >= 300:\n raise ClientError(response.json(), response.status_code)\n\n return Response(response)", "def general_exception(err):\n current_app.logger.exception(err)\n return 'Internal server error (unknown). Check logs.', 500", "def handle_error(self, err): # pragma: no cover\n # log every exception raised in the application\n print('we ended up in the API handle_error()', err, err.__class__)\n\n # catch other HTTP errors\n if isinstance(err, HTTPException):\n original = getattr(err, \"original_exception\", None)\n return jsonify({\n 'success': False,\n 'error': err.code,\n \"message\": getattr(err.error, 'message')\n }), err.code\n\n # if 'message' attribute isn't set, assume it's a core Python exception\n if not getattr(err, 'message', None):\n original = getattr(err, \"original_exception\", None)\n return jsonify({\n 'message': 'Server has encountered an unknown error'\n }), 500\n\n # Handle application-specific custom exceptions\n return jsonify(**err.kwargs), err.http_status_code", "def _exception_dispatcher(self, e):\n # TODO Currently not doing anything\n raise e", "def httperror( status_code=500, message=b'' ):", "def handle_exception(self, exc):\n if isinstance(exc, exceptions.Throttled):\n # Throttle wait header\n self.headers['X-Throttle-Wait-Seconds'] = '%d' % exc.wait\n\n if isinstance(exc, (exceptions.NotAuthenticated,\n exceptions.AuthenticationFailed)):\n # WWW-Authenticate header for 401 responses, else coerce to 403\n auth_header = self.get_authenticate_header(self.request)\n\n if auth_header:\n self.headers['WWW-Authenticate'] = auth_header\n else:\n exc.status_code = status.HTTP_403_FORBIDDEN\n\n if isinstance(exc, exceptions.MethodNotAllowed):\n return Response(codes.get('invalid_request_method'),\n status=exc.status_code,\n exception=True)\n\n elif isinstance(exc, CsrfError):\n return Response(codes.get('csrf_invalid'),\n status=exc.status_code,\n exception=True)\n\n elif isinstance(exc, exceptions.ParseError):\n return Response(codes.get('parse_error'),\n status=exc.status_code,\n exception=True)\n\n elif isinstance(exc, exceptions.AuthenticationFailed):\n return Response(codes.get('authentication_failed'),\n status=exc.status_code,\n exception=True)\n\n elif isinstance(exc, exceptions.NotAuthenticated):\n return Response(codes.get('not_authenticated'),\n status=exc.status_code,\n exception=True)\n\n elif isinstance(exc, exceptions.PermissionDenied):\n return Response(codes.get('permission_denied'),\n status=exc.status_code,\n exception=True)\n\n elif isinstance(exc, exceptions.NotAcceptable):\n return Response(codes.get('not_acceptable'),\n status=exc.status_code,\n exception=True)\n\n elif isinstance(exc, exceptions.UnsupportedMediaType):\n return Response(codes.get('unsupported_media_type'),\n status=exc.status_code,\n exception=True)\n\n elif isinstance(exc, exceptions.Throttled):\n return Response(codes.get('throttled'),\n status=exc.status_code,\n exception=True)\n\n elif isinstance(exc, Http404):\n return Response(codes.get('not_found'),\n status=status.HTTP_404_NOT_FOUND,\n exception=True)\n\n elif isinstance(exc, PermissionDenied):\n return Response(codes.get('permission_denied'),\n status=status.HTTP_403_FORBIDDEN,\n exception=True)\n raise", "def _gh_exception(exc_cls, status, data):\n try:\n exc = exc_cls(status, data, None)\n except TypeError:\n # Before PyGithub 1.5, GithubException had only two required arguments.\n exc = exc_cls(status, data)\n return exc" ]
[ "0.6928329", "0.6750525", "0.649088", "0.64422554", "0.6438557", "0.64162624", "0.63747215", "0.6354716", "0.63448566", "0.6343094", "0.6330691", "0.6311043", "0.6306453", "0.62536937", "0.6228462", "0.61860454", "0.61826175", "0.6169226", "0.6168609", "0.6157098", "0.6143242", "0.6082627", "0.607246", "0.6031625", "0.6031184", "0.6018922", "0.6017758", "0.600437", "0.6001816", "0.6001798", "0.6001204", "0.5997113", "0.5997075", "0.59719884", "0.5963046", "0.5956429", "0.59551233", "0.5952133", "0.59403133", "0.5933885", "0.59333557", "0.59302145", "0.5894606", "0.5894001", "0.5889158", "0.5885631", "0.58755684", "0.5846012", "0.5844517", "0.5837187", "0.5831214", "0.5827677", "0.5827055", "0.57906234", "0.578618", "0.5784252", "0.5782538", "0.5781847", "0.577776", "0.5772505", "0.57684946", "0.57680815", "0.5765395", "0.57634616", "0.57606", "0.5743925", "0.572001", "0.5711848", "0.57059675", "0.5704972", "0.56908613", "0.56901544", "0.56891924", "0.56864876", "0.568531", "0.5677901", "0.5666076", "0.5665308", "0.5657184", "0.5655813", "0.5643046", "0.56232893", "0.5619473", "0.5618733", "0.56141776", "0.55914986", "0.5587733", "0.5579177", "0.5572299", "0.5570126", "0.55688673", "0.55646014", "0.5563409", "0.55538267", "0.5553289", "0.5550515", "0.55471146", "0.55444235", "0.55342555", "0.553117" ]
0.6739744
2
The base exception class of timeout exceptions.
def __init__(self, error_msg): super(RequestTimeoutException, self).__init__(error_msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_timeout_elapsed_exception(self):\n deadline = Deadline(-MS)\n with self.assertRaises(TimeoutError):\n deadline.timeout()", "def cancelled_exception_class(cls) -> type[BaseException]:", "def test_wait_timeout_inheritance():\n # confirm subclassed from pypyr root error\n err = WaitTimeOut()\n assert isinstance(err, PypyrAwsError)\n assert isinstance(err, PlugInError)\n assert isinstance(err, PypyrError)", "def _timeout(signum, frame):\n # Raise TimeoutException with system default timeout message\n raise TimeoutException()", "def raise_timeout(self, *args, **kwargs):\n\n self.log.error(\"Task timeout encountered.\")\n raise TimeoutError", "def exception(self, timeout=None):\n start_time = time()\n with self._done_condition:\n res = self.__exception()\n if res:\n return res\n self._done_condition.wait(timeout)\n if timeout and start_time + timeout < time():\n raise TimeoutError()\n return self.__exception()", "def raise_timeout_error(api_url, headers, timeout, proxies):\n raise requests.exceptions.Timeout", "def raise_timeout_error(api_url, headers, timeout, proxies):\n raise requests.exceptions.Timeout", "def test_timeout(self):\n # Attempt connection with short timeout\n with self.assertRaises(requests.exceptions.ReadTimeout):\n a = api.InvenTreeAPI(SERVER, username=USERNAME, password=PASSWORD, timeout=0.001) # noqa: F841", "def raise_timeout_exception(self, _result=None, _timeout=None):\n raise RosTimeoutError(\"No service response received\")", "def test_timeoutRaises(self):\n\n @self.eventloop.wait_for(timeout=0.5)\n def times_out():\n return Deferred().addErrback(lambda f: f.trap(CancelledError))\n\n start = time.time()\n self.assertRaises(TimeoutError, times_out)\n self.assertTrue(abs(time.time() - start - 0.5) < 0.1)", "def testTimeout(self):\n\n class TimeoutTestCase(cros_test_lib.TestCase):\n \"\"\"Test case that raises a TimeoutError because it takes too long.\"\"\"\n\n TEST_CASE_TIMEOUT = 1\n\n def testSleeping(self):\n \"\"\"Sleep for 2 minutes. This should raise a TimeoutError.\"\"\"\n time.sleep(2 * 60)\n raise AssertionError('Test case should have timed out.')\n\n # Run the test case, verifying it raises a TimeoutError.\n test = TimeoutTestCase(methodName='testSleeping')\n self.assertRaises(timeout_util.TimeoutError, test.testSleeping)", "def test_wait_timeout_raises():\n with pytest.raises(WaitTimeOut) as err_info:\n raise WaitTimeOut(\"this is error text right here\")\n\n assert str(err_info.value) == \"this is error text right here\"", "def test_timeout(self, mocker, mock_timedelta):\n\n tid = 289466\n site = \"mysite\"\n\n exception_response = self.generate_task_dictionary(\n tid, state=\"started\", completed=None\n )\n\n responses = [{\"json\": exception_response}]\n url = (\n \"https://cloudapi.acquia.com/v1/\"\n \"sites/prod:{site}/tasks/{tid}.json\".format(tid=tid, site=site)\n )\n\n mocker.register_uri(\"GET\", url, responses)\n\n with self.assertRaises(exceptions.AcquiaCloudTimeoutError):\n self.client.site(site).task(tid).wait(0)", "def _handle_timeout(self, frame=None, **_):\n\n raise TimeOut.TimeOutError(self, frame)", "def timer_object_timeout(seconds=5):\n def raise_timeout_exception():\n raise TimeoutReachedException(seconds=seconds)\n\n return Timer(seconds, raise_timeout_exception)", "def __init__(self, message=\"Remote operation timeout\"):\n super(SshTimeout, self).__init__(message)", "def raise_socket_error(timeout=None):\n try:\n raise\n\n except _socket.timeout:\n if timeout is not None:\n raise TimeoutError, \"Timed out after %s seconds\" % timeout, \\\n _sys.exc_info()[2]\n raise TimeoutError, \"Timed out\", _sys.exc_info()[2]\n\n except _socket.gaierror, e:\n # pylint: disable = E1101\n raise AddressError, \"Address Information Error: %s (%s)\" % \\\n (raise_socket_error.EAIS.get(e[0], e[0]), e[1]), \\\n _sys.exc_info()[2]\n\n except _socket.herror, e:\n raise AddressError, \"Host Resolution Error %s: %s\" % \\\n (e[0], e[1]), _sys.exc_info()[2]\n\n except _socket.sslerror, e:\n raise SSLError, \"Socket SSL Error: %s\" % str(e), _sys.exc_info()[2]\n\n except _socket.error, e:\n if len(e.args) == 1:\n raise SocketError, \"Socket Error: %s\" % \\\n (e[0],), _sys.exc_info()[2]\n else:\n raise SocketError, \"Socket Error %s: %s\" % \\\n (_errno.errorcode.get(e[0], e[0]), e[1]), _sys.exc_info()[2]\n\n except IOError, e:\n raise SocketError, \"Socket Error %s: %s\" % \\\n (_errno.errorcode.get(e[0], e[0]), str(e)), \\\n _sys.exc_info()[2]", "def cancelled_to_request_timed_out_error(value, timeout):\n if isinstance(value, failure.Failure):\n value.trap(CancelledError)\n raise RequestTimedOutError()\n return value", "def assert_timeout(self) -> None:\n if self._cancelled:\n raise asyncio.TimeoutError from None", "def test_set_timeout_value_error(self, timeout):\n self.assertRaises(ValueError, self.root.set_timeout, timeout)", "def __init__(self, timeout_time):\n self.timeout_time = timeout_time", "def timeout_element_error(self, selector, name):\n\t\tBasePage.LOGGER.error(\"Timeout - < {1} > element not found: {0} \\n\".format(selector, name))\n\t\traise Exception(\"Timeout - < {1} > element not found: {0}\".format(selector, name))", "def test_timed_context_exception(self):\n class ContextException(Exception):\n pass\n\n def func(self):\n with self.statsd.timed('timed_context.test.exception'):\n time.sleep(0.5)\n raise ContextException()\n\n # Ensure the exception was raised.\n with pytest.raises(ContextException):\n func(self)\n\n # Ensure the timing was recorded.\n packet = self.recv(2).split(\"\\n\")[0] # ignore telemetry packet\n name_value, type_ = packet.split('|')\n name, value = name_value.split(':')\n\n self.assertEqual('ms', type_)\n self.assertEqual('timed_context.test.exception', name)\n self.assert_almost_equal(0.5, float(value), 0.1)", "def WrappedException(self) -> object:", "def test_timeout(self):\n start = time.time()\n dr = EventualResult(Deferred(), None)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.03)\n # be a little lenient for slow computers:\n self.assertTrue(abs(time.time() - start) < 0.05)", "def __init__(self, earliest_time, latest_time):\n Exception.__init__(self, earliest_time, latest_time)\n self.earliest_time = earliest_time\n self.latest_time = latest_time", "def raise_timeout_error_upload(api_url, headers, data, timeout, proxies):\n raise requests.exceptions.Timeout", "def __init__(self, timeout=120):\n self.m_timeout = timeout", "def test_timer_context_exceptions():\n sc = _client()\n\n with assert_raises(socket.timeout):\n with sc.timer('foo'):\n raise socket.timeout()\n\n _timer_check(sc, 1, 'foo', 'ms')", "def test_timeout_exceeded():\n connection = FakeBaseConnection(session_timeout=10)\n start = time.time() - 11\n try:\n connection._timeout_exceeded(start)\n except NetmikoTimeoutException as exc:\n assert isinstance(exc, NetmikoTimeoutException)\n return\n\n assert False", "def _check_timeouts(self, chunk_timeout, total_timeout):\n cur_time = time()\n\n if chunk_timeout is not None and cur_time > self._chunk_time + chunk_timeout:\n raise ChunkTimeout('Item timeout expired.')\n elif total_timeout is not None and cur_time > self._total_time + total_timeout:\n raise TotalTimeout('Total timeout expired.')", "def test_timeout_elapsed_no_exception(self):\n deadline = Deadline(-MS)\n timeout = deadline.timeout(raise_if_elapsed=False)\n self.assertGreater(timeout, -2 * MS)\n self.assertLess(timeout, -MS)", "def exceptionType(self):\n return ExceptionType.GeneralException", "def __init__(self, assigned_time, task_id, node_type):\n Exception.__init__(self, assigned_time, task_id, node_type)\n self.assigned_time = assigned_time\n self.task_id = task_id\n self.node_type = node_type", "def test_timeout_invalid_start():\n connection = FakeBaseConnection(session_timeout=10)\n assert not connection._timeout_exceeded(start=0)", "def get_timeout(self) -> int:", "def timeout(order):\n return ResultProxy(TaskResult(TaskTimedout('A task has timedout'), order))", "def timeout(time_limit):\n\n class TimeoutException(Exception):\n \"\"\" Subclass Exception to catch timer expiration during search \"\"\"\n pass\n\n def handler(*args, **kwargs):\n \"\"\" Generic handler to raise an exception when a timer expires \"\"\"\n raise TimeoutException(\"Test aborted due to timeout. Test was \" +\n \"expected to finish in less than {} second(s).\".format(time_limit))\n\n def wrapUnitTest(testcase):\n\n @wraps(testcase)\n def testWrapper(self, *args, **kwargs):\n\n signal.signal(signal.SIGALRM, handler)\n signal.alarm(time_limit)\n\n try:\n return testcase(self, *args, **kwargs)\n finally:\n signal.alarm(0)\n\n return testWrapper\n\n return wrapUnitTest", "def timeout(seconds, error_message=\"Time out.\"):\n import signal\n def decorator(func):\n\n def __timiout_handler(signum, frame):\n raise TimeoutError(error_message)\n\n @functools.wraps(func)\n def wrapper(*arg, **kw):\n signal.signal(signal.SIGALRM, __timiout_handler)\n signal.alarm(seconds)\n ret = \"\"\n try:\n ret = func(*arg, **kw)\n except TimeoutError,e:\n print \"TimeoutError: \", e\n print \"{name} ran more than {seconds}s.\".format(name=func.__name__, seconds=seconds)\n except Exception,e:\n print \"Error: \",e\n finally:\n signal.alarm(0)\n return ret\n return wrapper\n return decorator", "def assert_timeout(self) -> None:", "def exception(self):\n self.wait() # TODO: If raise_errors=True this will raise the exception when trying to access it?\n return self._exception", "def test_timeoutCancels(self):\n result = Deferred()\n error = []\n result.addErrback(error.append)\n\n @self.eventloop.wait_for(timeout=0.0)\n def times_out():\n return result\n\n self.assertRaises(TimeoutError, times_out)\n self.assertIsInstance(error[0].value, CancelledError)", "def timeout(self) -> str:\n return pulumi.get(self, \"timeout\")", "def throw(self):\n pass", "def get_timeout(self):\n return self.timeout", "def test_timeout_twice(self):\n dr = EventualResult(Deferred(), None)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.01)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.01)", "def test_set_timeout_wrong_args(self):\n context = Context(SSLv23_METHOD)\n with pytest.raises(TypeError):\n context.set_timeout(None)", "async def timeout(self, failed: bool = False) -> None:\n raise NotImplementedError()", "def __init__(self, timeout=129600):\n self.timeout = timeout", "def unexpectedException(self):", "def test_timeout_not_exceeded():\n connection = FakeBaseConnection(session_timeout=10)\n start = time.time()\n assert not connection._timeout_exceeded(start)", "def __call__(self, domain_file, problem_file, timeout=10):\n raise NotImplementedError(\"Override me!\")", "def SendTimeout(self) -> int:", "def SendTimeout(self) -> int:", "def test_default_maximum_conflict(self):\n\n self.set_options(timeouts=True, timeout_maximum=1, timeout_default=10)\n task = self.create_task(self.context())\n with self.assertRaises(ErrorWhileTesting):\n task.execute()", "def timeout(time_limit):\n\n def wrapUnitTest(testcase):\n\n @wraps(testcase)\n def testWrapper(self):\n\n queue = Queue()\n\n try:\n p = Thread(target=handler, args=(self, testcase, queue))\n p.daemon = True\n p.start()\n err, res = queue.get(timeout=time_limit)\n p.join()\n if err:\n raise err[0](err[1]).with_traceback(err[2])\n return res\n except QueueEmptyError:\n raise TimeoutError(\"Test aborted due to timeout. Test was \" +\n \"expected to finish in less than {} second(s).\".format(time_limit))\n\n return testWrapper\n\n return wrapUnitTest", "def exception(self, *args, **kwargs):", "def test_exception_class_hierarchy(self) -> None:\n\n try:\n raise CustomDerivedError(state=\"test\")\n except CustomDerivedError as cex:\n assert type(cex) is CustomDerivedError\n assert \"test\" == cex.state\n except CustomError as cex:\n assert False, \"CustomDerivedError should have caught the exception.\"\n except:\n assert False, f\"Unhandled exception: {sys.exc_info()[0]}\"", "def ioException(self) -> \"IOException\":\n raise NotImplementedError", "def __init__(self, default_timeout = 300.0):\n dict.__init__(self)\n self._timeouts = {}\n self._default_timeout = default_timeout\n self._lock = threading.RLock()", "def unexpected_error(self, exception):", "def exception(self) -> Exception:\n return self._exception", "async def test_timeout_synchronization(self):\n async def wait_synchronized(account_id: str, instance_index: str, application_pattern: str,\n timeout_in_seconds: float, application: str = None):\n await asyncio.sleep(0.1)\n raise TimeoutException('timeout')\n\n client.wait_synchronized = wait_synchronized\n try:\n await api.wait_synchronized(0.09)\n raise Exception('TimeoutError is expected')\n except Exception as err:\n assert err.__class__.__name__ == 'TimeoutException'", "def test_timeout_retries(self):\n\n batch = Batch(Mock())\n self.check_instance(batch=batch)\n\n self.assertEqual(batch.timeout_retries, 0)\n self.check_instance(batch, timeout_retries=0)\n\n batch.timeout_retries = 10\n self.assertEqual(batch.timeout_retries, 10)\n self.check_instance(batch, timeout_retries=10)\n\n batch.timeout_retries = 0\n self.assertEqual(batch.timeout_retries, 0)\n self.check_instance(batch, timeout_retries=0)\n\n batch.timeout_retries = 1\n self.assertEqual(batch.timeout_retries, 1)\n self.check_instance(batch, timeout_retries=1)\n\n # exceptions\n ## error messages\n value_error = \"'timeout_retries' must be positive, i.e. greater or equal that zero (>=0).\"\n type_error = f\"'timeout_retries' must be of type {int}.\"\n\n #######################################################################\n # test wrong value\n with self.assertRaises(ValueError) as error:\n batch.timeout_retries = -1\n self.assertEqual(batch.timeout_retries, 1)\n self.check_instance(batch, timeout_retries=1)\n check_error_message(self, error, value_error)\n\n #######################################################################\n # test wrong type\n with self.assertRaises(TypeError) as error:\n batch.timeout_retries = True\n self.assertEqual(batch.timeout_retries, 1)\n self.check_instance(batch, timeout_retries=1)\n check_error_message(self, error, type_error)\n\n with self.assertRaises(TypeError) as error:\n batch.timeout_retries = '2'\n self.assertEqual(batch.timeout_retries, 1)\n self.check_instance(batch, timeout_retries=1)\n check_error_message(self, error, type_error)", "def test_snmp_timeout():\n for command in snmp_commands:\n with pytest.raises(SNMPTimeout) as excinfo:\n command(ipaddress='10.0.0.1', oid='IF-MIB::ifTable', timeout='1')\n assert 'Timeout' in str(excinfo.value)\n\n with pytest.raises(SNMPTimeout) as excinfo:\n snmpgetsome(ipaddress='10.0.0.1', oids=['IF-MIB::ifTable'], timeout='1')\n assert 'Timeout' in str(excinfo.value)\n\n with pytest.raises(SNMPTimeout) as excinfo:\n snmpset(community='public', ipaddress='10.0.0.1',\n oid='IF-MIB::ifTable', value_type='s',\n value='random string', timeout='1')\n assert 'Timeout' in str(excinfo.value)", "def __init__(self, error_msg):\n super(ConnectionException, self).__init__(error_msg)", "def timeout(self, value):\n if isinstance(value, timedelta):\n value = value.days * 3600 * 24 + value.seconds\n self._timeout = value # noqa", "def __init__(self, code, reason):\n super(RequestError, self).__init__(code, reason)", "def retry(self, times):\n return Retry((requests.ConnectionError, requests.Timeout), times)", "def throw(self, type, value=None, traceback=None):\n pass", "def __init__(self, timeout, tries):\r\n self._timeout = timeout\r\n self._tries = tries", "def __init__( self, timeout = 60.0 ):\n\n self.timeout = timeout\n self.alive = None", "def timeout(self, value):\n minimum = -2147483648\n maximum = 2147483647\n conditions = [validate_range(value, minimum, maximum)]\n if all(conditions):\n self._update_values('timeout', value)\n else:\n raise InvalidValue(f'{value} is invalid, must be between {minimum} and {maximum}')", "def set_retry_timeout(self, retry_timeout):", "def __init__(self, content, status):\n Exception.__init__(self)\n self.status = status\n self.content = content", "def handler(*args, **kwargs):\n raise TimeoutException(\"Test aborted due to timeout. Test was \" +\n \"expected to finish in less than {} second(s).\".format(time_limit))", "def timeout_change(self, timedelta):\n pass # pylint: disable=unnecessary-pass\n # For backward compatibility only.", "def exception(self, e):\n pass", "def test_too_many_requests(self):\n try:\n self._mock_time_series(error=fitbit_exceptions.HTTPTooManyRequests,\n error_attrs={'retry_after_secs': 35})\n except fitbit_exceptions.HTTPTooManyRequests:\n self.assertEqual(sys.exc_info()[1].retry_after_secs, 35)\n else:\n assert False, 'Should have thrown exception'", "def set_timeout(self, timeout):\n self.timeout = timeout", "def raise_timeout_error_get_votes(api_url, headers, params, timeout, proxies):\n raise requests.exceptions.Timeout", "def test_class_errored(self, cls, exception):", "def timeout(self):\n return self._timeout", "def timeout(self):\n return self._timeout", "def timeout(self):\n return self._timeout", "def timeout(self):\n return self._timeout", "def _timeout_retry(func, *args, **kwargs):\n tried = kwargs.pop('_____retires', 0)\n try:\n q = func(*args, **kwargs)\n except (TimeoutError, TableParseError) as exc:\n if tried >= MAX_RETRIES_TIMEOUT:\n raise TimeoutError(f'TimeOut obtained in {MAX_RETRIES_TIMEOUT}'\n ' tries, aborting.') from exc\n return _timeout_retry(func, *args, **kwargs, _____retires=tried+1)\n return q", "def test_socket_timeout():\n schema = vol.Schema(cv.socket_timeout)\n\n with pytest.raises(vol.Invalid):\n schema(0.0)\n\n with pytest.raises(vol.Invalid):\n schema(-1)\n\n assert schema(None) == _GLOBAL_DEFAULT_TIMEOUT\n\n assert schema(1) == 1.0", "def pytest_exception_interact(node):\n hooks = node.config.pluginmanager.hook\n hooks.pytest_timeout_cancel_timer(item=node)", "def test_timeout(self) -> 'outputs.DurationResponse':\n return pulumi.get(self, \"test_timeout\")", "def test_timeout(self) -> 'outputs.DurationResponse':\n return pulumi.get(self, \"test_timeout\")", "def socket_timeout(self):\n return self.__socket_timeout", "def SocketError(self) -> SocketError:", "def test_no_timeout(self):\n deadline = Deadline(None)\n timeout = deadline.timeout()\n self.assertIsNone(timeout, None)", "def test_fetch_url_timeout():\n with patch(\"cheddar.index.remote.get\") as mocked:\n mocked.side_effect = Timeout\n with assert_raises(NotFoundError):\n fetch_url(\"http://example.com\", TIMEOUT, getLogger())", "def StepTimeout(self):\n return recipe_api.StepTimeout", "def finrsttimeout(self) :\n\t\ttry :\n\t\t\treturn self._finrsttimeout\n\t\texcept Exception as e:\n\t\t\traise e", "def test_RPC_TIMEOUT(self):\n self.assertIsInstance(constants.RPC_TIMEOUT, int,\n \"constants.RPC_TIMEOUT must be an integer.\")", "def timeout(self, timeout):\n assert timeout is None or timeout > 0\n self._timeout = timeout" ]
[ "0.69130844", "0.6805417", "0.6787926", "0.6741076", "0.6446688", "0.6242404", "0.6146727", "0.6146727", "0.6110175", "0.61099267", "0.6053876", "0.59915215", "0.5957881", "0.59220225", "0.59039843", "0.58413595", "0.58341813", "0.58033925", "0.5766664", "0.5763779", "0.5750541", "0.57445276", "0.5717332", "0.5692959", "0.5652564", "0.5647469", "0.5646244", "0.5631001", "0.5625123", "0.55953586", "0.5566651", "0.55559367", "0.5549352", "0.55185664", "0.5497785", "0.54948324", "0.5472107", "0.54589415", "0.54583615", "0.5449412", "0.54238147", "0.5415333", "0.5410302", "0.53994954", "0.5395611", "0.53754383", "0.53481936", "0.5335855", "0.532927", "0.53175133", "0.5302402", "0.52959555", "0.5293292", "0.5286442", "0.5286442", "0.5279204", "0.52762455", "0.5264788", "0.5250252", "0.5247836", "0.521372", "0.5209319", "0.5205811", "0.52056456", "0.5199728", "0.51877654", "0.51763964", "0.51672214", "0.51625836", "0.5159177", "0.5154671", "0.51534325", "0.51323795", "0.51233894", "0.5120489", "0.5094716", "0.5089008", "0.5088666", "0.50766397", "0.5070839", "0.5061777", "0.5058425", "0.5049344", "0.5047714", "0.5047714", "0.5047714", "0.5047714", "0.50466716", "0.50441915", "0.5041265", "0.503712", "0.503712", "0.503243", "0.5024908", "0.5013356", "0.50090295", "0.5007203", "0.5006542", "0.5005792", "0.5005506" ]
0.6499626
4
Returns a string representation of a path
def render_path(path_to_item): result = "" for pth in path_to_item: if isinstance(pth, six.integer_types): result += "[{0}]".format(pth) else: result += "['{0}']".format(pth) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def path_str(path):\n\toutput = \"PATH: \"\n\tif path:\n\t\tfor i in path:\n\t\t\toutput += str(i.data) + \" -> \"\n\telse:\n\t\toutput += \"Empty\"\n\treturn output", "def path_to_str(path):\n if hasattr(path, '__fspath__'):\n path = as_str_any(path.__fspath__())\n return path", "def as_string(path: pathlib.Path) -> str:\n return path.as_posix()", "def _path_to_string(path):\n return '.'.join(path)", "def getpath(self, path):\n return self._join(path)", "def _path_to_str(var):\n if not isinstance(var, (Path, str)):\n raise ValueError(\"All path parameters must be either strings or \"\n \"pathlib.Path objects. Found type %s.\" % type(var))\n else:\n return str(var)", "def path(self):\n\t\treturn os.path.join(*self._string_values(limit=4))", "def printPath(path):\r\n result = ''\r\n for i in range(len(path)):\r\n result = result + str(path[i])\r\n if i != len(path) - 1:\r\n result = result + '->'\r\n return result", "def _purepath_to_str(\n self, path: Union[Path, PurePath, str]\n ) -> Union[Path, PurePath, str]:\n if isinstance(path, PurePath):\n path = str(path)\n return path", "def path_to_string(path: Path) -> str:\n assert_continuous(path)\n\n pieces = [\"M {} {}\".format(path[0].p0[0], path[0].p0[1])]\n for curve in iter(path): # iter cast not strictly necessary\n piece = \"C {} {} {} {} {} {}\".format(\n int(round(curve.c0[0])), int(round(curve.c0[1])),\n int(round(curve.c1[0])), int(round(curve.c1[1])),\n int(round(curve.p1[0])), int(round(curve.p1[1]))\n )\n pieces.append(piece)\n\n return \" \".join(pieces)", "def __fspath__(self):\n return str(self)", "def printPath(path):\n result =''\n for i in range(len(path)):\n result = result + str(path[i])\n if i != len(path) -1:\n result = result + '->'\n return result", "def _GeneratePathStr(path):\n return ((len(path) - 1) * ' ') + path[-1] if path else ''", "def printPath(path):\n result = ''\n for i in range(len(path)):\n result = result + str(path[i])\n if i != len(path) - 1:\n result = result + '->'\n return result", "def __str__(self):\n return \"\\n\".join(self.path)", "def str_single_path(self, node, _path_str=\"\"):\r\n while node.parent:\r\n _path_str = self.str_single_path(node.parent, _path_str)\r\n break\r\n\r\n _path_str += \" -> \"\r\n _path_str += str(node.get_info_in_tuple())\r\n\r\n return _path_str", "def path(self) -> str:\n return pulumi.get(self, \"path\")", "def path(self) -> str:\n return pulumi.get(self, \"path\")", "def __str__(self):\n return '{0}'.format(self.path.name[2:])", "def __str__(self):\n return '{0}'.format(self.path.name[8:], )", "def path(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"path\")", "def format_path(path):\n return path if path.endswith('/') else path + '/'", "def format_path(path):\n if not path:\n return path\n\n path = re.sub(r'/+', '/', path)\n\n if path == '/':\n return (u\"\" if isinstance(path, unicode) else \"\")\n else:\n return '/' + path.strip('/')", "def _pretty_path(path: Sequence[BaseField]) -> str:\n # pylint: disable=protected-access\n return \"< \" + \" -> \".join(f\"'{field._resolve_field_name()}' ({type(field).__name__})\" for field in path) + \" >\"", "def _isolated_path_format(self, path):\n if self._root_dir.is_parent_of(path):\n return '%s:%s' % (\n self._root_dir,\n self._api.path.join(*path.pieces[len(self._root_dir.pieces):])\n )\n else:\n assert path == self._root_dir, \\\n \"isolated path must be equal to or within %s\" % self._root_dir\n return '%s:.' % self._root_dir", "def path_to_string(path, separator):\n i = 0\n path_string = \"Path :\" + separator + \"[\"\n while i < len(path):\n if isinstance(path[i], Firewall.Firewall):\n path_string += path[i].hostname\n elif isinstance(path[i], Ip.Ip):\n path_string += path[i].to_string()\n\n if i < len(path) - 1:\n path_string += \",\" + separator\n i += 1\n path_string += \"]\"\n\n return path_string", "def path_filename_representation(path):\n # Strip leading / and replace / with .\n return re.sub(r\"^/(.*)$\", r\"\\1\", path).replace(\"/\", \".\")", "def __str__(self):\n return self.strpath", "def get_path(self, path):\n return abspath(join(self.origin, *path))", "def path(self):\n # type: () -> string_types\n return self._path", "def completePath(path):\n return os.getcwd() + convertString(path)", "def pretty_path(path):\n return path.replace(REPO_DIR + '/', '')", "def __str__(self):\n # TODO: Curently this just stores/returns the file path.\n return unicode(self.path).encode('utf-8')", "def stringyfy(path):\n try:\n # Pathlib support\n path = path.__fspath__()\n except AttributeError:\n pass\n if hasattr(path, 'name'): # passed in a file\n path = path.name\n if isinstance(path, str):\n return path\n raise ValueError(f'Cannot convert {path} to a path')", "def path(self) -> str:\n return self._path", "def path(self) -> str:\n return self._path", "def path(self) -> str:\n return self._path", "def path(self) -> str:\n return self._path", "def get_full_path(self):\n return self.path_display", "def format_path(s,\n path=None,\n replace_long_filename=False):\n # TODO: could possibly simplify by moving representation logic to FileNode\n replaced_path_name = False\n if path is not None:\n if s.startswith(path):\n replaced_path_name = True\n s = s[len(path)+1:]\n if replace_long_filename:\n head, tail = os.path.split(s)\n name_prefix = head.replace('/','_')\n if '/' in head and len(name_prefix) > 0:\n s = s.replace(name_prefix, '. . . ')\n if replaced_path_name:\n s = \"[DIR]/\" + s\n return \"/\\\\n\".join(s.split('/'))", "def getPath(self):\n path = '/'.join(self.getPhysicalPath())\n return path", "def path2str(a,b,path):\n (s1,s2) = zip(*path)\n line1 = \" \"+\"\".join([get_char(s1,a,x) for x in range(1,len(s1))])\n line2 = \" \"+\"\".join([get_char(s2,b,x) for x in range(1,len(s2))])\n return \"%s\\n%s\"%(line1,line2)", "def path(self, *path):\n path = list(filter(None, path))\n path = self.remove_prefix(path)\n items = [self.prefix_] + path\n return self.delimiter.join(items)", "def resource_path(self, resource):\n return str(self.path.joinpath(resource))", "def resource_path(self, resource):\n return str(self.path.joinpath(resource))", "def _make_path(self) -> str:\r\n path_ = Path(path.join(conf.instance.output_path, self.path_prefix, self.name))\r\n if self.is_identifier_in_paths:\r\n path_ = path_ / self.identifier\r\n return path_", "def get_full_path(self, reg_path: str, reg_id: str) -> str:\n return '{}.{}'.format(reg_path, reg_id)", "def path(self):\n return '/%s' % (self.full_name)", "def path(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"path\")", "def path(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"path\")", "def abspath(path: str) -> str:\n pass", "def __str__(self):\n url = '{}/{}'.format(self.root, self.path)\n return url", "def path_serializer(obj: PurePath, **_: Any) -> str:\n return obj.as_posix()", "def path(self):\n\n if os.path.isabs(self._value):\n return pathlib.Path(self._value)\n raise RuntimeError('RequestString.path not supported.')", "def format_path (in_path):\n return os.path.realpath(os.path.expanduser(in_path))", "def convert_path(path: str, path_type: PathType = PathType.AUTO) -> str:\r\n path_template = get_template_from_path(path)\r\n path = get_path_from_template(path_template, path_type)\r\n return path", "def path_name(self, path):\r\n ind = path.rfind(\"/\") + 1\r\n return (path[:ind], path[ind:])", "def path(self):\n p = self\n\n name = [p.name()]\n offsets = set([p._offset])\n while p.has_parent_key():\n p = p.parent_key()\n if p._offset in offsets:\n name.append(\"[path cycle]\")\n break\n name.append(p.name())\n offsets.add(p._offset)\n return '\\\\'.join(reversed(name))", "def path(self) -> Optional[str]:\n return pulumi.get(self, \"path\")", "def path(self) -> Optional[str]:\n return pulumi.get(self, \"path\")", "def path(self) -> Optional[str]:\n return pulumi.get(self, \"path\")", "def path(self, toNative=True):\n return self.text(toNative=toNative)", "def composePath(self,splitedPath):\n # 027 It is used anywhere?? Nope!! Remove!\n\n self.debug.printHeader()\n return os.sep.join(splitedPath)", "def __str__(self):\n return str(self.path.relative_to(os.getcwd()))", "def pathToFileName(self, path):\n\t\t# Find the path, and strip the leading slash.\n\t\tpath =urlparse.urlparse(self.path)[2].lstrip(\"/\")\n\t\t# Process url escape codes, and normalize the path.\n\t\tpath = os.path.normpath(urllib2.unquote(path))\n\t\t# normpath strips the last slash\n\t\tif os.path.isdir(path):\n\t\t\treturn path + '/'\n\t\telse:\n\t\t\treturn path", "def get_path(self):\n\n if not self.path:\n Settings.err_print(\"missing file path\")\n return \"\"\n return self.path", "def get_path(self):\n return self.path", "def get_path(self, path):\n if path.startswith('/') and not path.startswith('~/'):\n return os.getcwd() + '/' + path\n else:\n return path", "def get_actual_path(self, path):\n if self._params.path_to_dir[-1] != '/':\n if path:\n path = self._params.path_to_dir + '/' + path\n path = path.replace('//', '/')\n return path", "def full_path(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"full_path\")", "def format_path(file: str) -> str:\n return os.path.abspath([file.replace('/', os.path.sep)][0])", "def path(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"path\")", "def getPath(self, date, sep = '/'):\n\n return sep.join( [self.getDirName(date), self.getFileName(date)] )", "def get_path(self):\n return self.path", "def get_path(self):\n node_list = []\n node = self\n while node is not None:\n node_list.append(node)\n node = node.parent\n\n path = \"\"\n for i in range(len(node_list) - 1, -1, -1):\n path += node_list[i].__repr__()\n return path", "def __get_path(self):\n return self.path", "def path(self) -> str:\r\n path = []\r\n path.append(self._item[\"text\"])\r\n current_item: str = self._id\r\n\r\n while (parent := self._tree.parent(current_item)) != \"\":\r\n tree_item = self._tree.item(parent)\r\n path.append(tree_item[\"text\"])\r\n current_item = parent\r\n\r\n return REGISTRY_PATH_SEPARATOR.join(reversed(path))", "def saved_file_path_string(self):\n return self.saved_file_path.as_posix()", "def resolved_path(path):\n path = os.path.abspath(path)\n elements = path_elements(path)\n result = \"\"\n for element in elements:\n segment = element\n segment_path = os.path.join(result, segment)\n if os.path.islink(segment_path):\n segment = os.readlink(segment_path)\n result = os.path.join(result, segment)\n result = os.path.normpath(result)\n return result", "def get_path(self) -> Optional[str]:\n return self.path", "def _ref_name_from_path(self, path: str) -> str:\n prefix = \"%s/\" % self._path\n assert path.startswith(prefix)\n return path[len(prefix) :]", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self._path", "def path(self):\n return self.path", "def path(self) :\n return self.m_path", "def full_path(self):\n fullpath = os.path.join(self.path, self.name)\n if self.path == \"\":\n fullpath = self.name\n return fullpath", "def displaypath():\n\n import pathlib\n pth = pathlib.Path('./')\n pth.is_dir()\n pth.absolute()", "def convertString(path):\n if (\"win\" in sys.platform):\n return path.replace(\"/\",\"\\\\\")\n elif (\"linux\" in sys.platform):\n return path.replace(\"\\\\\",\"/\")", "def path_format(self):\n return '{}{}'.format(\n self.config['serve_at'],\n self.sub_base_pod_path)", "def _path_join(self, path):\n return os.path.join(self._path, path)", "def path_value(self, **kwargs):\n s = \"\"\n show_meta = kwargs.get(\"show_meta\", self.SHOW_META)\n show_path = show_meta and kwargs.get(\"show_path\", self.SHOW_PATH)\n if show_path:\n call_info = self.reflect.info\n if call_info:\n s = \"({}:{})\".format(self._get_path(call_info['file']), call_info['line'])\n return s" ]
[ "0.79950243", "0.77772427", "0.76012766", "0.75008774", "0.7465539", "0.73858505", "0.724589", "0.7177473", "0.71564364", "0.70764357", "0.70681226", "0.70649594", "0.70468634", "0.70299315", "0.6943011", "0.69423383", "0.6906489", "0.6906489", "0.69057655", "0.68910974", "0.686698", "0.6855079", "0.68383306", "0.683481", "0.6747693", "0.6722795", "0.6706666", "0.6686415", "0.6673883", "0.66704744", "0.6656784", "0.664926", "0.66281086", "0.65954816", "0.6582077", "0.6582077", "0.6582077", "0.6582077", "0.6558884", "0.65559906", "0.6525508", "0.6496141", "0.6459259", "0.645216", "0.645216", "0.64508295", "0.64505935", "0.6447761", "0.6441342", "0.6441342", "0.6406435", "0.6402241", "0.64007485", "0.64006805", "0.6399603", "0.63751245", "0.63545376", "0.63525766", "0.63395715", "0.63395715", "0.63395715", "0.6329269", "0.63216746", "0.6290353", "0.6270553", "0.6268803", "0.6268549", "0.6266405", "0.6263666", "0.62543905", "0.62478113", "0.6223247", "0.62112945", "0.6210859", "0.6193772", "0.6168795", "0.616583", "0.6156606", "0.6140135", "0.61388373", "0.61359894", "0.6134406", "0.6134406", "0.6134406", "0.6134406", "0.6134406", "0.6134406", "0.6134406", "0.6134406", "0.6134406", "0.6134406", "0.6134406", "0.6134406", "0.61328006", "0.61265504", "0.61261874", "0.611818", "0.6106609", "0.6091622", "0.6091224", "0.608659" ]
0.0
-1
Raises an exception for TypeErrors
def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None): self.path_to_item = path_to_item self.valid_classes = valid_classes self.key_type = key_type full_msg = msg if path_to_item: full_msg = "%s at %s" % (msg, render_path(path_to_item)) super(ApiTypeError, self).__init__(full_msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_type_error(self):\n self._error_test(TypeError)", "def test__specification_type_to_python_type_unsupported_type(self):\n with self.assertRaises(TypeError):\n _specification_type_to_python_type(\"unsupported_type\")", "def throw(self, type, value=None, traceback=None):\n pass", "def test_value_error_for_computing_missing_type():\n with pytest.raises(ValueError):\n compute_type(\"missing_type\", {})", "def test_raises_type_error(self):\n wrong_type = dict()\n self.assertRaises(\n TypeError, util.convert_protobuf_to_proto_plus, wrong_type\n )", "def test_type(self):\n assert issubclass(Error, Exception)\n assert Error.__name__ == \"Error\"", "def test_unexpectedType(self):\n self.assertRaises(TypeError, nativeString, 1)", "def test_new_invalid(self) -> None:\n with pytest.raises(TypeError) as excinfo:\n RunwayTestDefinition({}) # type: ignore\n assert str(excinfo.value).startswith(\"expected data of type\")", "def test_validation_can_fail():\n\n @type_checked\n def _run_test(something:int): pass\n\n with pytest.raises(TypeError) as error:\n _run_test(\"abc\")\n\n assert \"abc is of type str, expecting int.\" in error.value.args", "def failure(self: _UnwrappableType) -> _SecondType:", "def test_type_errors():\n\n\ttry:\n\t\ttransmissions = compute_transmissions(cal_directory, lines = 3.0)\n\texcept TypeError:\n\t\ttry:\n\t\t\ttransmissions = compute_transmissions(cal_directory, calibrator = 300.0)\n\t\texcept TypeError:\n\t\t\tassert True\n\t\telse:\n\t\t\tassert False\n\telse:\n\t\tassert False", "def test_incorrect_arg_type(self):\n\n with pytest.raises(TypeError) as exc_info:\n upper_incomplete_gamma(a='A', z=0.3)\n\n expected_error_msg = (\n 'type of argument \"a\" must be one of (int, float); got str instead'\n )\n assert str(exc_info.value) == expected_error_msg", "def missing_types():\n\n return ...", "def test_etype__invalid(self):\n\n for etype in (\"SyntaxError\", self):\n self.assertRaises(TypeError, encode_string, \"test\", etype=etype)", "def test_etype__invalid(self):\n\n for etype in (\"SyntaxError\", self):\n self.assertRaises(TypeError, encode_file_path, \"test\", etype)", "def test_encode_errors(self):\n if self._cls == 'MetaschemaType':\n if self._invalid_validate:\n self.assert_raises(NotImplementedError, self.import_cls.encode,\n self._invalid_validate[0], self.typedef)\n else:\n if self._invalid_validate:\n self.assert_raises((ValueError,\n jsonschema.exceptions.ValidationError),\n self.import_cls.encode,\n self._invalid_validate[0], self.typedef)", "def test_should_raise_error_if_type_is_invalid(self):\r\n with self.assertRaises(ValueError):\r\n self.spec_parser.parse_statement({'type': 'sugar'})", "def unexpected_error(self, exception):", "def CheckType(self, *args, **kwargs):\n pass", "def _abort_invalid_data_type(\n self, field: str, field_type: str, value: Any) -> None:\n self.abort(400, msg=(\n f'Bad value for field {field} of type {field_type}: {value}'))", "def try_wrong_types(self, p, name, type_):\n for x in (1, 1.0, \"x\", True, np.ndarray,):\n if type(x) != type_:\n with self.assertRaises(TypeError, msg=f\"{name} {type_} {x}\"):\n setattr(p, name, x)", "def test_validate_on_invalid_data_type(self):\n args = (self.bytes_a, 'invalid')\n self.assertRaises(TypeError, objects.OpaqueObject, *args)", "def test_type_error(self):\n with self.assertRaises(TypeError):\n function_inclusion_filter_builder(5)", "def checkType(self, value):\n pass", "def report_unexpected_exception(self, *args, **kwargs):\n pass", "def test_flow_must_define_type(self):\n flow = MockFlowWithoutType()\n pytest.raises(\n ValueError, flow.authentication_flow_document, 'argument'\n )", "def check_type(self):\n return True", "def _TypeMismatch(a, b):\n return 'Types do not match, %s v. %s' % (str(a), str(b))", "def test_proto_plus_to_protobuf_raises_type_error(self):\n wrong_type = dict()\n self.assertRaises(\n TypeError, util.convert_proto_plus_to_protobuf, wrong_type\n )", "def test_prevent_wrong_type(self):\n self.assertRaises(cinv.host.Error, self.wrong_host_type)", "def unexpectedException(self):", "def check(self, value: Any) -> None:\n if not isinstance(value, self.oktype):\n raise TypeError(value)", "def test_from_object_fail(self):\n class InvalidClass(object):\n pass\n Invalid_object = InvalidClass()\n with self.assertRaises(TypeError):\n BaseDataClass.from_object(Invalid_object)", "def raise_error(error_type: str) -> None:\n error: Type[AirVisualError]\n try:\n error = next((v for k, v in ERROR_CODES.items() if k in error_type))\n except StopIteration:\n error = AirVisualError\n raise error(error_type)", "def test_exit_on_wrong_type(self):\n with self.assertRaises(SystemExit):\n pyint = Interpreter()\n pyint.run(file=WRONG_TYPE)", "def test_wrong_type_param():\n from scraper import get_inspection_page\n with pytest.raises(TypeError):\n get_inspection_page(Violation_Points=0, City='Seattle')", "def test_01_float(self):\n with self.assertRaises(TypeError) as x:\n r = Rectangle(float(1.2), float(2.2), 1)\n self.assertEqual(\"width must be an integer\", str(x.exception))", "def throw_method(type, value=None, traceback=None): # pylint: disable=redefined-builtin\n raise type, value, traceback", "def test_missing_generic_args(self):\n import System\n #TODO specify clearly which exception is appropriate here\n self.assertRaises(Exception, System.Collections.Generic.List)", "def test_wrong_type(self):\n msg = 'Widget type is not valid. Valid widget types are: ' + \\\n 'basic, default, formula, histogram, category, animation, time-series.'\n\n with pytest.raises(ValueError) as e:\n Widget({'type': 'xxx'}).get_info()\n assert str(e.value) == msg", "def _raise_argument_validation_exception(typedef, value, detail, expected_tokens=None):\n typedef_name = typedef.get('help-name')\n if typedef_name is None:\n typedef_name = typedef.get('name')\n if typedef_name is None:\n typedef_name = typedef.get('field')\n if typedef_name is None:\n typedef_name = '<unknown-type>'\n if detail is None:\n detail = ''\n validation_error_format = typedef.get('validation-error-format',\n 'Invalid %(typedef)s: %(value)s; %(detail)s')\n validation_error = (validation_error_format %\n {'typedef': typedef_name, 'value': str(value), 'detail': detail})\n raise error.ArgumentValidationError(validation_error, expected_tokens)", "def test04(self):\n self.assertRaises(TypeError, robustApply, oneArgument, \"this\", blah=\"that\")", "def check_bad_types(ctx, stmt):\n elemtype = stmt.search_one(\"type\")\n if elemtype is None or elemtype.arg not in BAD_TYPES:\n return\n\n err_add(ctx.errors, stmt.pos, \"OC_BAD_TYPE\",\n (elemtype.arg))", "def test_type_builder_raises_exception_on_invalid_schema_item_type():\n\n class UnknownSchemaItem(SchemaItem):\n pass\n\n schema = [\n SchemaObject(\n name=\"FakeObject\", properties=[UnknownSchemaItem(name=\"objectUnknown\")]\n )\n ]\n\n with pytest.raises(ValueError):\n _ = build_types(schema)", "def test_badsizevaluefloats(self):\n Rectangle.reset_objects()\n with self.assertRaises(TypeError) as e:\n r1 = Square(float(1), 1, 2, 3)\n self.assertEqual(str(e.exception), 'width must be an integer')", "def check_error_raises(type, argument):\n with pytest.raises(type) as error:\n argument()", "def provoke_and_handle_TypeError():\n try:\n print(\"loetungdusohn\" + 3)\n except TypeError as te:\n print(f\"Sorry! {te}\")", "def testUnsupportedType(self) -> None:\n fake_node = typing.cast(result_output.NodeType, 'a')\n with self.assertRaises(RuntimeError):\n result_output._RecursiveHtmlToFile(fake_node, self.output_file)", "def test_type_error_raised(self):\n with self.assertRaises(TypeError):\n authenticate(username=\"test\", password=\"test\")", "def test_wrong_input_on_creation(self):\r\n\r\n self.assertRaises(TypeError, TypedListType, None)", "def test_foo(self):\n foo = Foo(value=1)\n with self.assertRaises(TypeError):\n foo.foo()", "def test_not_supported_requirement(self, space_each_type):\n with pytest.raises(TypeError) as exc:\n build_required_space(space_each_type, type_requirement=\"fasdfasf\")\n assert \"Unsupported\" in str(exc.value)", "def negative_test_type(self):\n self.assertRaises(TypeError, MyClass().my_func, \"a\", [], \"a\")\n self.assertRaises(TypeError, MyClass().my_func, 1, 1, \"a\")\n self.assertRaises(TypeError, MyClass().my_func, 1, [], [])\n self.assertRaises(TypeError, MyClass().my_func, a=\"a\", b=[], c=\"a\")\n self.assertRaises(TypeError, MyClass().my_func, a=1, b=1, c=\"a\")\n self.assertRaises(TypeError, MyClass().my_func, a=1, b=[], c=[])", "def _check_type(self, new_value):\n raise NotImplementedError", "def test_should_return_error_if_stmt_contains_no_type(self):\r\n with self.assertRaises(TypeError):\r\n self.spec_parser.parse_statement({'name': 'todd'})", "def _assert_type(type):\n if isinstance(type, str):\n o, v, p, t = type.split('.')\n if not ontologies.is_supported(o, v, p, t):\n rt.throw(\"Type {0}.v{1}.{2} is unsupported.\".format(o, v, p, t))\n elif type not in ontologies.get_types():\n rt.throw(\"Type {0} is unsupported.\".format(type))", "def test_data_type(self):\n self.assertRaises(TypeError, Square, 'hello', 3, 2)\n self.assertRaises(TypeError, Square, 3, True, 2)\n self.assertRaises(TypeError, Square, 3, 2, 3.45)", "def test_get_other_typeerror(self):\n v = versions.Version(name='foo', version='1.2.3')\n self.assertRaises(TypeError, v._get_other, 3.4)", "def test_snmpset_non_existant_type():\n with pytest.raises(SNMPWriteError) as excinfo:\n snmpset(ipaddress=SNMP_SRV_ADDR, community='public',\n oid='SNMPv2-MIB::sysName.0', value_type='z',\n value='Test Description', port=SNMP_SRV_PORT)\n assert str(excinfo.value) == 'The type value you specified does not ' \\\n 'match one of the accepted type codes.\\n' \\\n 'Valid type codes are one of ' \\\n '(i|u|t|a|o|s|x|d|b)'", "def test_no_coercion():\n\n @type_checked(coerce=False)\n def _run_test(something:str): pass\n\n with pytest.raises(TypeError) as error:\n _run_test(1234)\n\n assert \"1234 is of type int, expecting str.\" in error.value.args", "def type_error(func_name, expect_tp, got_tp,\n arg_name=None, ret=False) -> TypeError:\n msg = func_name\n if ret:\n msg += f\"'s return\"\n elif arg_name:\n msg += f\"'s parameter '{arg_name}'\"\n msg += f\" expect type {expect_tp}, got {got_tp}.\"\n return TypeError(msg)", "def test_base_class_expection():\n with pytest.raises(TypeError):\n cardinal.CardinalPoints()", "def test_incorrect_type(self):\n body = json.dumps({\n \"first_name\": 200,\n \"last_name\": \"Holmes\",\n \"email\": \"sherlock@example.com\",\n \"password\": \"ilovek@ndA!\"\n })\n\n errorObject = {\n \"error\": \"Bad request\",\n \"field_errors\": {\n \"first_name\": [\"Invalid field type\"]\n }\n }\n\n result = self.simulate_post('/', body=body, headers=headers)\n\n self.assertEqual(result.status_code, 400)\n self.assertEqual(result.json, errorObject)", "def _validate_impropertype(contype):\n if contype is None:\n warnings.warn(\"Non-parametrized Improper detected\")\n elif not isinstance(contype, ImproperType):\n raise GMSOError(\"Supplied non-ImproperType {}\".format(contype))\n return contype", "def raise_error(Err):\n raise Err()", "def test_ensure_valid_model_type(self):\n # Note the \"valid\" type strings for our test\n test_types = [\"bar\", \"foo\", \"Sreeta\", \"Feras\"]\n # Note a set of invalid type strings for the test\n bad_types = [\"Tim\", \"Sam\"]\n\n # Alias the function to be tested\n func = pylogit.pylogit.ensure_valid_model_type\n\n # Make note of part of the error message that should be raised\n partial_error_msg = \"The specified model_type was not valid.\"\n\n # Perform the requisite tests\n for good_example in test_types:\n self.assertIsNone(func(good_example, test_types))\n for bad_example in bad_types:\n self.assertRaisesRegexp(ValueError,\n partial_error_msg,\n func,\n bad_example,\n test_types)\n\n return None", "def test_record_with_invalid_type_should_fail(self):\n zone = Zone('test.example.com')\n with self.assertRaises(InvalidRecordType) as e:\n Record(zone, 'test-record', {'type': 'FOO', 'ttl': 300})\n self.assertEqual('Type FOO is not supported', str(e.exception))", "def test_get_other_typeerror_2(self):\n v = versions.Version(name='foo', version='1.2.3')\n self.assertRaises(TypeError, v._get_other, '1')", "def type_error(var, types):\n\n divisor = None\n if len(types) == 2:\n divisor = \" or \"\n elif len(types) > 2:\n divisor = \", \"\n\n raise TypeError(\n \"'{var_name}' must be {type}, received '{var_type}'\"\n .format(var_name=RaiseIfNot._get_name(var),\n type=divisor.join(map(\n lambda x: \"'\" + x + \"'\",\n types)), var_type=type(var)))", "def test_invalid_argument_type(self):\n t = TruthTable('A or B')\n\n with self.assertRaises(InvalidArgumentTypeError):\n t.equivalent_to(float())\n\n with self.assertRaises(InvalidArgumentTypeError):\n t.equivalent_to(None)", "def test_read_type_error():\n filename = {}\n with pytest.raises(TypeError):\n read_file(filename)", "def _validate_type(data, type, err): # lint-amnesty, pylint: disable=redefined-builtin\n if not isinstance(data, type):\n raise errors.AccountDataBadType(err)", "def test_class_errored(self, cls, exception):", "def test_parse_obj_invalid(self) -> None:\n with pytest.raises(ValidationError):\n RunwayTestDefinition.parse_obj({\"type\": \"invalid\"})", "def test_arg_type(args, arg, arg_type):\n try:\n arg_type(args[arg])\n except Exception:\n raise GaiaException('Required argument {} must be of type {}'\n .format(arg, arg_type))", "def test__validate_arg_type(parameter_name, parameter, expected_type, raises) :\n\n if raises is not None : \n # We expect this to raise an error\n with pytest.raises(raises) :\n _validate_arg_type(parameter_name, parameter, expected_type)\n else :\n _validate_arg_type(parameter_name, parameter, expected_type)", "def __raise_clean_exception(exc_type, exc_value, exc_traceback):\n if exc_type.__name__ not in dir(napalm.exceptions) and \\\n exc_type.__name__ not in __builtins__.keys():\n epilog = (\"NAPALM didn't catch this exception. Please, fill a bugfix on \"\n \"https://github.com/napalm-automation/napalm/issues\\n\"\n \"Don't forget to include this traceback.\")\n print(epilog)\n raise exc_type, exc_value, exc_traceback", "def Invalid(\r\n self, s: str = \"\", e: Type[BaseException] = None, fail: bool = False\r\n) -> None:\r\n ...", "def test_exception(self):\n self.assertRaises(TypeError, lambda: self.init_model())", "def test_invalid_data_types(self):\n response=self.check_invalid_data_type()\n result = json.loads(response.data.decode('utf-8'))\n self.assertEqual(result['Error'],\"Require int or float type\")\n self.assertEqual(response.status_code, 200)", "def test_badxvaluewithfloats(self):\n Rectangle.reset_objects()\n with self.assertRaises(TypeError) as e:\n r1 = Square(1, float(1), 2, 3)\n self.assertEqual(str(e.exception), 'x must be an integer')", "def test_unknown_type(testdir: Testdir) -> None:\n schema = '''\n datasource db {{\n provider = \"postgres\"\n url = env(\"POSTGRES_URL\")\n }}\n\n generator db {{\n provider = \"coverage run -m prisma\"\n output = \"{output}\"\n {options}\n }}\n\n model User {{\n id String @id\n meta Json\n }}\n '''\n with pytest.raises(subprocess.CalledProcessError) as exc:\n testdir.generate(schema=schema)\n\n assert 'Unknown scalar type: Json' in str(exc.value.output, 'utf-8')", "def test_types(self):\n self.assertRaises(TypeError, max_integer, None)\n self.assertRaises(TypeError, max_integer, 1234)", "def generate_datatype_error(variable):\n dtype = variable.dtype\n\n if isinstance(dtype, NativeBool):\n precision = ''\n if isinstance(dtype, NativeComplex):\n precision = '{} bit '.format(variable.precision * 2 * 8)\n else:\n precision = '{} bit '.format(variable.precision * 8)\n\n message = '\"Argument must be {precision}{dtype}\"'.format(\n precision = precision,\n dtype = variable.dtype)\n return PyErr_SetString('PyExc_TypeError', message)", "def testSlopeBadType(self):\n def setSlope():\n self.cc.slope = 'ban'\n\n self.assertRaises(\n TypeError,\n setSlope\n )", "def testProtocolSetBadType(self):\n def setProtocol():\n self.mr.protocol = 12345\n\n self.assertRaises(\n TypeError,\n setProtocol\n )", "def test_value_init17(self):\n with self.assertRaises(TypeError) as err:\n r1 = Rectangle(1, 2, 3, \"hi\")\n msg = \"y must be an integer\"\n self.assertEqual(str(err.exception), msg)", "def throw(self):\n pass", "def test_badxvaluewithstring(self):\n Rectangle.reset_objects()\n with self.assertRaises(TypeError) as e:\n r1 = Square(1, \"foo\", 2, 3)\n self.assertEqual(str(e.exception), 'x must be an integer')", "def test_ticket_type_change_error_bad_type(self):\n test_name = sys._getframe().f_code.co_name\n rv, output = self._execute('ticket_type change bad_type changed_type')\n self.assertEqual(2, rv)\n self.assertEqual(self.expected_results[test_name], output)", "def test_invalid_expression_type(self, parse_input_mocked_metadata):\n with pytest.raises(TypeError, match=r\"not of declared type int\"):\n parse_input_mocked_metadata(\"int Beta = -0.231e-6+5.21e-2j\")", "def catch_typeerror(func):\n def f(self, *args, **kwargs):\n try:\n return func(self, *args, **kwargs)\n except TypeError, exn:\n #log.exception('catch_typeerror')\n if hasattr(func, 'api') and func.api in argcounts:\n # Assume that if the argument count was wrong and if the\n # exception was thrown inside this file, then it is due to an\n # invalid call from the client, otherwise it's an internal\n # error (which will be handled further up).\n expected = argcounts[func.api]\n actual = len(args) + len(kwargs)\n if expected != actual:\n tb = sys.exc_info()[2]\n try:\n sourcefile = traceback.extract_tb(tb)[-1][0]\n if sourcefile == inspect.getsourcefile(BNVMAPI):\n return xen_api_error(\n ['MESSAGE_PARAMETER_COUNT_MISMATCH',\n func.api, expected, actual])\n finally:\n del tb\n raise\n except BNAPIError, exn:\n return xen_api_error(exn.get_api_error())\n\n return f", "def check_r_type(r):\n if type(r) is str:\n raise TypeError('Get Error message.')", "def test_tolerate_dumb_signature(self, exception_class):\n\n try:\n i_live_but_why = exception_class(616)\n except Exception as exc:\n pytest.fail(str(exc))\n\n assert isinstance(i_live_but_why, exception_class)", "def _check_type_compatibility(self, type_name1, type_name2,\n operation):\n if type_name1 != type_name2:\n raise TypeCompatibilityError(type_name1, type_name2, operation)", "def test_empty_dict_failure():\n\n @type_checked\n def _run_test(thing:{}): pass\n\n with pytest.raises(TypeError):\n _run_test(1)", "def test_badyvaluewithfloats(self):\n Rectangle.reset_objects()\n with self.assertRaises(TypeError) as e:\n r1 = Square(1, 2, float(1), 3)\n self.assertEqual(str(e.exception), 'y must be an integer')", "def test_badyvaluewithstring(self):\n Rectangle.reset_objects()\n with self.assertRaises(TypeError) as e:\n r1 = Square(1, 2, \"foo\", 3)\n self.assertEqual(str(e.exception), 'y must be an integer')", "def ensure_type(var, *expected_types, _up_extra_frames=0):\n for t in expected_types:\n if isinstance(var, t):\n return True\n type_err = f'Type-checking caller: {get_caller_function(_up_extra_frames=_up_extra_frames)}(): For object (value = {var}), ' \\\n f'expected type(s) was {expected_types} but instead found {type(var)}'\n logger.error(type_err)\n raise TypeError(type_err)", "def test_bad_input(alice):\n with pytest.raises(TypeError):\n alice.pack(\"blah\")", "def test_type_check(ExampleComponentClass):\n\n instance = ExampleComponentClass()\n\n configure(instance, {\"a\": 4.5}, name=\"x\")\n\n # Attempting to access the field should now raise a type error.\n with pytest.raises(\n TypeError,\n match=\"Field 'a' of component 'x' is annotated with type '<class 'int'>', which is not satisfied by value 4.5.\",\n ):\n instance.a" ]
[ "0.80643433", "0.7329728", "0.72639966", "0.7262278", "0.7229491", "0.72008634", "0.71786124", "0.6970523", "0.6896439", "0.688244", "0.6782924", "0.6753176", "0.67404634", "0.6738435", "0.67235684", "0.66774726", "0.6669857", "0.6658878", "0.6629325", "0.6617021", "0.6593539", "0.65928453", "0.65463495", "0.6534613", "0.6474196", "0.6463541", "0.645909", "0.64273983", "0.64249355", "0.6423662", "0.64182675", "0.6407249", "0.6365898", "0.63569885", "0.6355569", "0.6313971", "0.63070166", "0.6305967", "0.6297683", "0.6297416", "0.62962866", "0.62893736", "0.62848514", "0.6281904", "0.6280031", "0.62665397", "0.6266437", "0.62661743", "0.6260787", "0.6252566", "0.6248182", "0.6243766", "0.6240076", "0.6223235", "0.6220008", "0.6215615", "0.6211937", "0.6196232", "0.6188302", "0.6187799", "0.61817145", "0.61631143", "0.61618197", "0.61538595", "0.6145359", "0.61383593", "0.61300665", "0.61282146", "0.61234355", "0.6123308", "0.6099262", "0.6096756", "0.609479", "0.60944283", "0.6094169", "0.6093842", "0.6091072", "0.6090621", "0.60855097", "0.6084265", "0.6077815", "0.6077136", "0.6069214", "0.6068849", "0.606634", "0.6052533", "0.6051068", "0.6048765", "0.6044212", "0.60389364", "0.6038584", "0.6033797", "0.6028296", "0.60094225", "0.60006535", "0.5987757", "0.59863025", "0.59785384", "0.5978219", "0.59731257", "0.5969695" ]
0.0
-1
Reloads the Polls file.
def reloadpolls(self, irc, msg, args): try: self.polls = yaml.load(open(self.pollFile, 'r'), Loader=yamlordereddictloader.Loader) except FileNotFoundError as e: log.warning("Couldn't open file: %s" % e) raise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reloadfile(self, ):\n self.loadfile()", "def refresh(self):\n self.config.read(self.filename)\n self.loadRecentFiles()", "def reload(self):\n puts('Reloading application...')\n local('touch ../reload.txt')", "def reload(self):\n\n pass", "def reload(self):", "def reload(self):", "def handleReload(self, confInfo=None):", "def reload(self):\n with open(self._config) as f:\n self.data = json.load(f)", "def reload(self):\n self.read(self._cfg_path)", "def reload(self):\n if len(self.files) > 0:\n self.load(self.files, regfiles=self.regions)", "def reload(self):\n if self.filename is not None:\n self.channels.clear()\n try:\n self.open(self.filename)\n except EnvironmentError, e:\n log.warning('ChannelsDictionary.reload failed: %s', e)\n else:\n log.warning('ChannelsDictionary.reload without self.filename.')", "def reload(self, filename = None):\r\n if self.config.get('world', 'autosave'):\r\n self.save()\r\n self.load(filename or self.filename)", "def reload_cookies(self):\n\n if os.path.exists(self.location_of_cookies):\n with open(self.location_of_cookies, 'rb') as f:\n cookies = pickle.load(f)\n self.load_cookies(cookies, self.cookie_domain)\n \n f.close()", "async def poll_refresh(self) -> None:\n await self._send_message_get_response(OutgoingMessage(OutgoingMessageType.poll_refresh))", "def refresh(self):\n self.update_from_file()\n self.update_from_env()", "async def reload(ctx, name):\n await unload_extension(name, channel=ctx.channel)\n await load_extension(name, channel=ctx.channel)", "def reload_config(self):\n pass", "def refresh(self) -> None:\n self.data = {}\n self.load_settings_file(self.default_settings_path / \"settings.yaml\", file_key=\"internal\")\n self.load_systems(self.default_settings_path / \"systems\")\n self.load_settings_file(self.personal_dir / \"settings.yaml\", file_key=\"user\")\n self.load_systems(self.personal_dir / \"systems\")", "def load(self, filepath=''):\n sleep(20)\n pass", "def refresh(self, btn=None):\n\n if self.visible and self.filename:\n self.load_file(self.filename, self.title)", "def reload(self):\n self.nextId = 0\n self.users.clear()\n self._nameCache.clear()\n self._hostmaskCache.clear()\n if self.filename is not None:\n try:\n self.open(self.filename)\n except EnvironmentError, e:\n log.warning('UsersDictionary.reload failed: %s', e)\n else:\n log.error('UsersDictionary.reload called with no filename.')", "def reload(self) -> None: # pragma: no cover\n raise NotImplementedError()", "async def giveaway_reload(self, ctx):\n self._load_games()\n await ctx.send(\n f\"Reloaded list of games ({len(self.steam_keys)} games)\")", "def refresh(self, list_of_tables):\n self.dismod_file.refresh(list_of_tables)", "def refresh(self, url, args, cancellationSignal):\n pass", "def receive_reload_request(self, _: EmptyMsg):\n self.update()", "def trigger_reloading(self) -> None:\n self.trigger_signal(\"reloading\")", "def reload(self):\n self.rpc.call(MsfRpcMethod.CoreReloadModules)", "def reload_info(self):\n self.__loop.run_until_complete(self.__reload_info())", "def reload( self ):\n\t\tCORE.info( 'Reloading resources: modules, categories' )\n\t\tmoduleManager.load()\n\t\tcategoryManager.load()\n\t\tRESOURCES.info( 'Reloading UCR variables' )\n\t\tucr.load()", "def reload(self) -> None:\n parsed, combined, fragments = self._stateless_reload(self._updates)\n self._set_state(parsed, combined, fragments, self._updates)", "def refresh(self) -> None:\n pass", "def refresh(self) -> None:\n pass", "def refresh(self) -> None:\n pass", "def refresh(self):\n\n self._refreshed_on = time.time() * 1000", "def refresh(self, filename, template_dir, cache_dir):\n\t\tself.update(filename, template_dir, cache_dir)\n\t\tself.tab_dep=1\n\t\tself.page=[]", "def reload(self):\n self.restore()", "def refresh(self):\n pass", "def refresh(self):\n pass", "def refresh(self):\n\t\tself._raven_list = self._findRaven()\n\t\tif self._raven:\n\t\t\tself.close()\n\t\tif len(self._raven_list):\n\t\t\tself._raven = serial.Serial(self._raven_list[0].device,\n\t\t\t\t\t\t\tbaudrate=115200)\n\t\telse:\n\t\t\tself._raven = None", "def refresh(*args, currentView: bool=True, fileExtension: AnyStr=\"\", filename: AnyStr=\"\",\n force: bool=True, suspend: bool=True, **kwargs)->None:\n pass", "def reload(self):\n\n fn = self[\"~filename\"]\n saved = {}\n for key in self:\n if key in MIGRATE: saved[key] = self[key]\n self.clear()\n self[\"~filename\"] = fn\n self.__init__(fn)\n self.update(saved)", "def repoll(self, date, time, files=None, event='store close'):\n # Assuming no action were performed after the window opened\n # The calendar should be set to today's date\n self._pick_calendar_date(date)\n\n # Select the event\n if not mws.select_radio(event.title()):\n self.log.error(f\"Unable to select the target event '{event.title()}'\")\n return False\n\n # Select time period\n if not mws.select(\"Choose a Period of Time\", time):\n self.log.error(f\"Unable to select a target time period {time}\")\n return False\n\n # Check the target files\n if not files:\n # All files are to be repolled\n if not mws.select_radio(\"Repoll All Configured Files\"):\n self.log.error(f\"Unable to select 'Repoll All Configured Files' option.\")\n return False\n else:\n # Some files are to be repolled\n # Convert the list to the dictionary\n files_dic = {}\n for file in files:\n files_dic[file] = True\n\n # Reset the current status of the files in the list and select target files\n if not ( mws.select_radio(\"Repoll All Configured Files\") and mws.select_radio(\"Repoll Selected Files\") ):\n self.log.error(f\"Unable to select 'Repoll Selected Files' option.\")\n return False\n \n if not mws.config_flexgrid(\"XML Document List\", files_dic, 500):\n self.log.error(f\"Unable to set the repoll status for the provided files\")\n return False\n \n # Click Ok and we are done (probably)\n try:\n mws.click_toolbar(\"Save\", main=True, main_wait=3)\n except mws.ConnException:\n # Check for top bar message\n top_bar_message = mws.get_top_bar_text()\n if top_bar_message:\n self.log.error(\"Unable to save \")\n self.log.error(f\"Unexpected top bar message is '{top_bar_message}'\")\n system.takescreenshot()\n mws.click_toolbar(\"Ok\")\n \n return True", "def refresh(self):\n self.__refresh()", "def reload(self):\n try:\n with open(FileStorage.__file_path) as f:\n objs = json.load(f)\n for obj in objs.values():\n name = obj['__class__']\n del obj['__class__']\n self.new(eval(name)(**obj))\n except FileNotFoundError:\n return", "def on_refreshButton_clicked(self):\n self.buttonBox.button(QDialogButtonBox.Close).setEnabled(False)\n \n self.buttonBox.button(QDialogButtonBox.Save).setEnabled(False)\n self.refreshButton.setEnabled(False)\n \n self.start(self.__filename, diffMode=self.__diffMode, refreshable=True)", "def reopen():", "def reload():\n if not _status_apf():\n return __apf_cmd(\"-r\")", "def refresh(self):\n self._refresh_method()", "def Reload(self):\n self._inspector_backend.Navigate(self.url, None, 10)", "def reload_state(self):\n if len(s.item) > 0:\n for item in s.item:\n self.items_list.insert(\n \"\", \"end\", values=(item.get(\"item\"), item.get(\"url\"), \" \")\n )\n # Update with saved settings\n # Update the refresh interval\n if s.setting != \"\":\n self.interval_entry.delete(0, \"end\")\n self.interval_entry.insert(0, s.setting)\n if s.LaunchAtStartup == \"True\":\n self.launch_at_start_up.select()\n else:\n self.launch_at_start_up.deselect()\n if s.Minimize == \"True\":\n self.minimize_to_system_tray.select()\n else:\n self.minimize_to_system_tray.deselect()\n # If the email alert is included in the state file\n if \"Email\" in s.alert:\n self.email_alert_box.select()\n # Display the email address\n emaddress = s.email\n self.email_addr_entry.insert(0, emaddress)", "def refresh(self):\n\t\tself.driver.refresh()", "def reload_cache(self):\n self.data = self.read_data_cache()", "async def reload_all(ctx):\n await ext_manager.reload_all()\n await ctx.send(\"Successfully reloaded.\")", "def reloadData(self):\n self.dto.readFromData()\n print(\"Record reloaded.\")", "def onReload(self, event):\n\n\t\tself.wv.Reload()", "def refresh_plugin(self):\n pass", "def refresh(self):\n self.fetch(False)", "def refresh(self):\n self._policies = self._get_policies()", "def refresh_config(self):\n with open(config_name, 'rb') as f:\n self.CONFIG = simplejson.load(f)\n\n return self", "def reload_data(self):\n self._avro_payload.reload_data()", "def refresh(conf):\n try:\n if 'variable' in conf.keys():\n proxy_list = []\n for variable in conf['variable'] :\n proxy_list.extend(parse(conf['url'] % variable, conf))\n else:\n proxy_list = parse(conf)\n\n to_file(proxy_list, conf)\n except Exception , e:\n raise e", "def reload(bot, event, *args):\n\n yield from bot.coro_send_message(event.conv, \"<b>reloading config.json</b>\")\n bot.config.load()\n\n yield from bot.coro_send_message(event.conv, \"<b>reloading memory.json</b>\")\n bot.memory.load()", "async def reload():\n global DF\n DF = load_data()\n return True", "def on_reload_button_cicked_(self):\n self.pause_subscriber = True\n\n self._load_robot_description()\n controllers = self.get_current_controllers()\n\n self.joints = self._create_joints(controllers)\n\n self.synergy = self._create_synergy(controllers)\n\n self.delete_old_sliders_()\n\n # self._widget.sliderReleaseCheckBox.setCheckState(Qt.Unchecked)\n\n self.load_new_synergy_sliders_()\n\n # self.load_new_sliders_()\n\n self._update_synergy_viewer()\n\n self.pause_subscriber = False", "def on_click_reload(self):\n with suppress_errors():\n self.load_imdb()\n self.load_exp()", "def refresh(self):\n f = open(self._filepath, 'r')\n self._raw_sysfs_data = f.read()\n f.close()\n self._process_raw_data()", "def notify_file_transfer_completed(self):\n self.presentation.load() if len(self.presentation.presentation_elements) == 0 else self.presentation.reload()", "async def reload(self, ctx, extension_name: str):\n if await ctx.bot.is_owner(ctx.message.author):\n unload = ctx.bot.get_command('unload')\n load = ctx.bot.get_command('load')\n await ctx.invoke(unload, extension_name=extension_name)\n await ctx.invoke(load, extension_name=extension_name)\n else:\n await ctx.send(dis())", "def reload(self):\n try:\n with open(self.__file_path, mode=\"r\", encoding='UTF-8') as f:\n readit = json.load(f)\n for v in readit.values():\n from ..base_model import BaseModel\n from ..user import User\n from ..state import State\n from ..city import City\n from ..amenity import Amenity\n from ..place import Place\n from ..review import Review\n\n a = eval(\"{}(**v)\".format(v[\"__class__\"]))\n self.new(a)\n\n except FileNotFoundError:\n \"\"\"\n No file has been found so pass\n \"\"\"\n pass", "def Refresh(self):\n pass", "async def async_trigger_reloading(self) -> None:\n await self.async_trigger_signal(\"reloading\")", "def rehash(self):\n logging.info(\"Rehashing started\")\n modules = self.cmd_plugins.get_modules()\n CommandBot.pause(self)\n PlugBot.stop(self)\n\n logging.info(\"Reloading config file\")\n self.botconfig = self.load_config(self.config_file)\n for module in modules:\n reload(module)\n CommandBot.reset(self)\n\n PlugBot.start(self)\n CommandBot.resume(self)\n self.join_rooms()", "def refresh_status(self):\n\n pass", "def reloadMode(self): \n\t\tpass", "def syncrepl_refreshdone(self):\n pass", "def reload(self):\n\t\toldlayers = self.layers\n\t\tself.layers = []\n\t\tfor cp, filename, fp in oldlayers:\n\t\t\tcp = cp # pylint\n\t\t\tif fp is None:\n\t\t\t\tself.read(filename)\n\t\t\telse:\n\t\t\t\tself.readfp(fp, filename)", "def reload(bot, event, *args):\n bot.config.load()\n bot.memory.load()", "def reload(self):\n if file_exist(self.__file_path):\n with open(self.__file_path, \"r\", encoding=\"UTF-8\") as file:\n data = read_data(file)\n for key, value in data.items():\n instance = BaseModel(**value)\n FileStorage.__objects[key] = instance", "def ForceReload():\n url = 'http://icfpc2013.cloudapp.net/myproblems?auth=0017eB6c6r7IJcmlTb3v4kJdHXt1re22QaYgz0KjvpsH1H'\n reader = urllib2.urlopen(url)\n problems_string = reader.read()\n reader.close()\n try:\n fout = open(GetModelFilename(), mode='w')\n fout.write(problems_string)\n fout.close()\n except IOError:\n sys.stderr.write('failed to write to model file.')\n return", "def reload(self):\n self.load_config()\n # Seems we need to explicitly refresh this\n if self.main_instance:\n self.main_instance.config = self.config", "def refresh_configuration(self):\n pass", "def refresh(self) :\n if not self.running:\n self.running = True\n self.strip.show()\n self.running = False\n self.refreshTimer.expired = True\n self.refreshTimer.isrunning = False", "def refresh(self):\n hasChanged = self.hasChanged()\n if hasChanged: self.loadIni()\n if len(self.loadFiles) > 255:\n del self.loadFiles[255:]\n self.safeSave()\n return hasChanged", "def reload_storage_policies():\n global _POLICIES\n policy_conf = ConfigParser()\n policy_conf.read(SWIFT_CONF_FILE)\n try:\n _POLICIES = parse_storage_policies(policy_conf)\n except PolicyError as e:\n raise SystemExit('ERROR: Invalid Storage Policy Configuration '\n 'in %s (%s)' % (SWIFT_CONF_FILE, e))", "def reload_ini(self):\n while True:\n\n if round(os.path.getmtime(self.config_file)) > self.config_last_modified:\n print('Config Changes Detected, Reloading .ini File')\n config = configparser.ConfigParser()\n config.read(self.config_file)\n self._set_ini_options(config)\n self.config_last_modified = round(os.path.getmtime(self.config_file))\n\n time.sleep(3)", "def refresh(self):\n self.Refresh()", "def refresh(self):\n raise NotImplementedError", "def refresh(self):\n raise NotImplementedError", "def refresh(self):\n self.dto = self.res.get()\n log.debug(f\"Refreshed {self.url}\")", "def handleReload(self, confInfo):\r\n \r\n # Refresh the configuration (handles disk based updates)\r\n entity.refreshEntities('properties/radius', sessionKey=self.getSessionKey())", "def refresh(self, new_content):\n pass", "def refresh(self):\n raise NotImplementedError(\"To be implemented\")", "def refresh_from_api(self):\n self.populate_from_api(self.get_from_api())", "def reload_job(self):\n if self.ui['main_window'].widgets['live_preview'].get_active():\n self._update_preview()", "def reload_config(self):\n if self.faucet is not None:\n self.faucet.reload_config(None)", "def reload_state(self):\n\n log.debug(\"Reload state from file %s\" % self.state_filename)\n if path.isfile(self.state_filename):\n with open(self.state_filename) as sf:\n self.state = yaml.safe_load(sf)\n\n if self.state is None:\n log.debug(\"Statefile returned none\")\n else:\n log.debug(\"Statefile does not exist\")\n self.state = {}", "def reload_data(self):\n super(UpdateMessage, self).reload_data()\n self._previous_avro_payload.reload_data()", "def refresh():\n\tsocketio.emit('refresh')\n\treturn status()", "def _refresh(self):\n resp = self._cb.get_object(self._build_api_request_uri())\n self._info = resp\n self._last_refresh_time = time.time()\n return True" ]
[ "0.74811375", "0.6804164", "0.6666194", "0.66235316", "0.65775543", "0.65775543", "0.65237594", "0.64763457", "0.6454685", "0.6289295", "0.62368816", "0.6080593", "0.6055079", "0.60324496", "0.5997774", "0.5995909", "0.59953433", "0.5965536", "0.5963405", "0.59284127", "0.5891991", "0.5854808", "0.5853909", "0.5841499", "0.5803716", "0.5801305", "0.5791334", "0.57649046", "0.57626754", "0.5754108", "0.5725183", "0.57098997", "0.57098997", "0.57098997", "0.5709128", "0.5700869", "0.56954116", "0.56867886", "0.56867886", "0.5678937", "0.5656663", "0.5648306", "0.5632962", "0.5625964", "0.56169087", "0.56051236", "0.5584726", "0.55846405", "0.55804724", "0.5561954", "0.5550714", "0.5550133", "0.55418396", "0.5527689", "0.5527046", "0.5523535", "0.5508357", "0.55039454", "0.54880655", "0.5487634", "0.54851955", "0.54807454", "0.5460417", "0.5456618", "0.54469645", "0.54411304", "0.5432043", "0.5428059", "0.5427726", "0.54195654", "0.5417484", "0.5399755", "0.53988963", "0.5391076", "0.5379174", "0.5370964", "0.5369476", "0.5366944", "0.5364323", "0.535596", "0.5352235", "0.53519565", "0.53461134", "0.5333737", "0.53325677", "0.532874", "0.5326502", "0.5324853", "0.5324853", "0.53224295", "0.53209853", "0.53188884", "0.5315035", "0.53109044", "0.53107035", "0.5299553", "0.52987534", "0.52986985", "0.52979505", "0.52948964" ]
0.7334716
1
Lists current polls. For a breakdown via statuses, see ' Vote votes'
def listpolls(self, irc, msg, args, channel): if channel and msg.args[0] in irc.state.channels: if self.polls is None: self.polls = [] if self.polls is []: irc.reply("No Polls.") for idx, entry in enumerate(self.polls[channel]): entry_string = [] question = entry['question'] yays = entry['yays'] nays = entry['nays'] added_by = entry['added_by'] # concluded = entry['concluded'] entry_string.append("%d: %s" % (idx, question)) entry_string.append("Yes: %s" % (' '.join(yays) if yays != [] else 'none')) entry_string.append("No: %s" % (' '.join(nays) if nays != [] else 'none')) entry_string.append("Question asked by %s" % added_by) irc.reply(' / '.join(entry_string), notice=True, private=True, prefixNick=False) else: try: if ircdb.checkCapability(msg.prefix, 'admin') or ircdb.checkCapability(msg.prefix, 'owner'): if self.polls is None: self.polls = [] if self.polls is []: irc.reply("No Polls.") for idx, entry in enumerate(self.polls[channel]): entry_string = [] question = entry['question'] yays = entry['yays'] nays = entry['nays'] added_by = entry['added_by'] # concluded = entry['concluded'] entry_string.append("%d: %s" % (idx, question)) entry_string.append("Yays: %s" % (' '.join(yays) if yays != [] else 'none')) entry_string.append("Nays: %s" % (' '.join(nays) if nays != [] else 'none')) entry_string.append("Question asked by %s" % added_by) irc.reply(' / '.join(entry_string), notice=True, private=True, prefixNick=False) else: irc.errorInvalid('argument', channel) except KeyError: return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def index(request, count=10, template_name=\"pollit/index.html\"):\n polls = Poll.objects.get_latest_polls(count=count, include_expired=True)\n \n return render_to_response(template_name,\n {'poll_list': polls},\n context_instance=RequestContext(request))", "def polls_list(request):\n\n polls = Poll.objects.all()[:MAX_OBJECTS]\n data = {\n 'result': list(polls.values('question', 'created_by__username', 'pub_date'))\n }\n return JsonResponse(data)", "def get_invited_polls(self):\n\n invited_polls = []\n for poll_user in PollUser.objects.filter(user=self):\n invited_polls.append(poll_user.poll)\n\n return invited_polls", "def index(request):\n latest_question_list = Question.objects.order_by('-pub_date')[:]\n template = loader.get_template('polls/index.html')\n context = {'latest_question_list': latest_question_list, }\n return HttpResponse(template.render(context, request))", "def get_queryset(self):\n return Poll.objects.order_by('-pub_date')[:5]", "def offers_list(update, context):\n chat = Chat.get(update.message.chat_id)\n\n if not chat.cart.subscriptions:\n text = 'ℹ️ Du får ingen tilbud, hvis du ikke har nogen søgninger.'\n update.message.reply_text(text)\n\n for sub in chat.cart:\n if not sub.offers:\n text = f'ℹ️ Søgningen efter \"{sub.query}\" har ingen tilbud.'\n update.message.reply_text(text)\n continue\n\n lines = [\n f'ℹ️ Søgningen efter \"{sub.query}\" har {len(sub.offers)} tilbud:',\n ''\n ]\n for offer in sub.offers:\n print(offer, offer.timeleft(), offer.run_till)\n lines.append(offer_text(offer))\n\n update.message.reply_text('\\n'.join(lines))", "def polls_details(request, pk):\n poll = get_object_or_404(Poll, pk=pk)\n data = {\n 'result': {\n 'question': poll.question,\n 'created_by': poll.created_by.username,\n 'pub_date': poll.pub_date\n }\n }\n return JsonResponse(data)", "def ballot_list(request, election):\n limit = after = None\n if 'limit' in request.GET:\n limit = int(request.GET['limit'])\n if 'after' in request.GET:\n after = datetime.datetime.strptime(request.GET['after'], '%Y-%m-%d %H:%M:%S')\n \n voters = Voter.get_by_election(election, cast=True, order_by='cast_at', limit=limit, after=after)\n\n # we explicitly cast this to a short cast vote\n return [v.last_cast_vote().ld_object.short.toDict(complete=True) for v in voters]", "def get_available_polls(game_type_id):\n\n poll_response = requests.get(\n url=f'{settings.GAME_SETUP_URL}/all-polls/{game_type_id}/',\n timeout=5 # in sec\n )\n if poll_response.status_code == 200:\n return poll_response.json()\n return {}", "def ls():\n if not g.userpl:\n g.message = F('no playlists')\n g.content = g.content or generate_songlist_display(zeromsg=g.message)\n\n else:\n g.content = playlists_display()\n g.message = F('pl help')", "def get_choice(cls, polls):\n\n cl = cls()\n items = []\n for poll in polls:\n items.append((poll.id, poll.question))\n\n setattr(cl.poll, 'items', items)\n return cl", "def on_connect(self):\n if current_user.is_authenticated:\n polls = Poll.query \\\n .filter(User.rooms.any(User.id == current_user.id)) \\\n .filter(or_(Poll.visible.is_(True), Room.owner_id == current_user.id)).all()\n else:\n if session.get(\"rooms\") is not None:\n polls = Poll.query \\\n .filter(Room.id.in_(session.get(\"rooms\"))) \\\n .filter(Poll.visible.is_(True)).all()\n else:\n polls = []\n\n for poll in polls:\n join_room(poll.id)", "def results(request, year, month, day, slug, template_name=\"pollit/results.html\"):\n params = {\n 'pub_date__year': year,\n 'pub_date__month': datetime.datetime.strptime(month, '%b').month,\n 'slug': slug,\n }\n \n if MULTIPLE_SITES:\n params['sites__pk'] = settings.SITE_ID\n if day is not None:\n params['pub_date__day'] = day\n \n try:\n poll = Poll.objects.get(**params)\n except:\n raise Http404\n \n ip = get_client_ip(request)\n poll_choice_id = get_poll_choice_id_from_cookies(poll, request.COOKIES)\n poll_choice = poll.get_poll_choice(request.user, poll_choice_id, ip)\n \n return render_to_response(template_name,\n {'poll': poll,\n 'has_voted': poll_choice is not None,\n 'user_choice': poll_choice},\n context_instance=RequestContext(request))", "def get_voters():", "def get_voters():", "def watchlist(request):\n \n # query for listings that are in current user's watchlist\n auctions = Auction_listing.objects.filter(watchlist__user=request.user)\n current_bid = Auction_listing.objects.annotate(max_bid=Max('bid__bid'))\n \n return render(request, 'auctions/watchlist.html', {\n 'auctions': auctions,\n \"current_bid\": current_bid\n })", "def _get_poll_info(self, poll_id):\n url = 'https://strawpoll.me/api/v2/polls/{}'.format(poll_id)\n for attempt in range(5):\n try:\n r = requests.get(url)\n poll_options = r.json()['options']\n poll_votes = r.json()['votes']\n except ValueError:\n continue\n except TypeError:\n continue\n else:\n return poll_options, poll_votes\n else:\n self._add_to_chat_queue(\n \"Sorry, there was a problem talking to the strawpoll api. Maybe wait a bit and retry your command?\")", "def detail(request, year, month, day, slug, template_name=\"pollit/detail.html\"):\n bot_detection = False\n \n # These are bogus fields to try to detect a bot trying to vote. These\n # fields must be added to the form. Look in templates/pollit/detail.html \n # for example\n if request.POST and CHECK_FOR_BOTS:\n bogus_email = request.POST.get('email', None)\n bogus_username = request.POST.get('username', None)\n if bogus_email != 'valid_email' or bogus_username:\n bot_detection = True\n\n params = {\n 'pub_date__year': year,\n 'pub_date__month': datetime.datetime.strptime(month, '%b').month,\n 'slug': slug,\n }\n \n if MULTIPLE_SITES:\n params['sites__pk'] = settings.SITE_ID\n if day is not None:\n params['pub_date__day'] = day\n \n try:\n poll = Poll.objects.get(**params)\n except Poll.DoesNotExist, Poll.MultipleItemsReturned:\n raise Http404\n ip = get_client_ip(request)\n poll_choice_data_id = get_poll_choice_id_from_cookies(poll, request.COOKIES)\n \n errors = []\n poll_choice = None\n cookies_enabled = test_cookies_enabled(request.COOKIES)\n \n if 'choice' in request.POST and not bot_detection:\n if cookies_enabled and poll.user_can_vote(request.user, poll_choice_data_id, ip):\n try:\n poll_choice = poll.vote(request.POST['choice'], request.user, \n poll_choice_data_id, ip)\n # This is the same render to response as results. Cannot redirect because\n # we need to set a cookie on the users browser. Plus we already have \n # everything queried might as well use it\n response = render_to_response('pollit/results.html',\n {'poll': poll,\n 'has_voted': poll_choice is not None,\n 'user_choice': poll_choice},\n context_instance=RequestContext(request))\n response.set_cookie(get_poll_key(poll), poll_choice.id, \n max_age=POLL_CHOICE_DATA_COOKIE_MAX_AGE,\n domain=COOKIE_DOMAIN)\n return response\n except PollExpired:\n errors.append('The poll has expired.')\n elif not cookies_enabled:\n errors.append('Cookies must be enabled to vote.')\n\n poll_choice = poll.get_poll_choice(request.user, poll_choice_data_id, ip)\n \n response = render_to_response(template_name,\n {'poll': poll,\n 'has_voted': (poll_choice is not None),\n 'user_choice': poll_choice,\n 'errors': errors,\n 'must_login_to_vote':AUTHENTICATION_REQUIRED and \\\n not request.user.is_authenticated()},\n context_instance=RequestContext(request))\n add_cookies_enabled_test(response, request.COOKIES)\n return response", "async def votechannel_list(self, ctx):\n channels = await self.bot.db.execute(\n \"\"\"\n SELECT channel_id, voting_type FROM voting_channel WHERE guild_id = %s\n \"\"\",\n ctx.guild.id,\n )\n if not channels:\n raise exceptions.Info(\"There are no voting channels on this server yet!\")\n\n rows = []\n for channel_id, voting_type in channels:\n rows.append(f\"<#{channel_id}> - `{voting_type}`\")\n\n content = discord.Embed(\n title=f\":1234: Voting channels in {ctx.guild.name}\", color=int(\"3b88c3\", 16)\n )\n await util.send_as_pages(ctx, content, rows)", "def vote_for_poll(request, question_id):\n question = Question.objects.get(pk=question_id)\n if not(q.can_vote()):\n messages.error(request, \"poll expires\")\n return redirect('polls:index')\n return render(request, \"polls/details.html\", {\"question\": question})", "async def handle_list(message: Message):\n await list_subscriptions(message[\"chat\"][\"id\"])", "def index(request):\n\n # Fetches the latest 3 listings published\n listings = Listing.objects.order_by('-list_date').filter(is_published=True)[:3]\n context = {\n 'listings': listings,\n 'state_choices': state_choices,\n 'bedroom_choices': bedroom_choices,\n 'price_choices': price_choices\n }\n return render(request, \"pages/index.html\", context)", "async def list(self, ctx):\n cyphon = discord.utils.get(ctx.message.server.members, id=\"186835826699665409\")\n\n if self.check_channel(ctx):\n if self.check_permission(ctx) or ctx.message.author == cyphon:\n message = []\n message.append(\"```\\n\")\n if self.check_channel(ctx):\n if self.check_permission(ctx) or ctx.message.author == cyphon:\n if len(self.twitch_streams) > 0:\n for stream in self.twitch_streams:\n message.append(stream[\"NAME\"] + \"\\n\")\n else:\n message.append(\"No streams found!\")\n message.append(\"```\")\n output = ''.join(message)\n await self.bot.say(output)\n else:\n await self.bot.send_message(ctx.message.author, \"You don't have permission to execute that command.\")", "def voters_list_pretty(request, election):\n\n # for django pagination support\n page = int(request.GET.get('page', 1))\n limit = int(request.GET.get('limit', 50))\n q = request.GET.get('q','')\n \n order_by = 'user__user_id'\n\n # unless it's by alias, in which case we better go by UUID\n if election.use_voter_aliases:\n order_by = 'alias'\n\n user = get_user(request)\n admin_p = user_can_admin_election(user, election)\n\n categories = None\n eligibility_category_id = None\n\n try:\n if admin_p and can_list_categories(user.user_type):\n categories = AUTH_SYSTEMS[user.user_type].list_categories(user)\n eligibility_category_id = election.eligibility_category_id(user.user_type)\n except AuthenticationExpired:\n return user_reauth(request, user)\n \n # files being processed\n voter_files = election.voterfile_set.all().order_by('-uploaded_at')\n\n # load a bunch of voters\n # voters = Voter.get_by_election(election, order_by=order_by)\n voters = Voter.objects.filter(election = election).order_by(order_by).defer('vote')\n\n if q != '':\n if election.use_voter_aliases:\n voters = voters.filter(alias__icontains = q)\n else:\n voters = voters.filter(voter_name__icontains = q)\n\n voter_paginator = Paginator(voters, limit)\n voters_page = voter_paginator.page(page)\n\n total_voters = voter_paginator.count\n \n return render_template(request, 'voters_list', \n {'election': election, 'voters_page': voters_page,\n 'voters': voters_page.object_list, 'admin_p': admin_p, \n 'email_voters': VOTERS_EMAIL,\n 'limit': limit, 'total_voters': total_voters,\n 'upload_p': VOTERS_UPLOAD, 'q' : q,\n 'voter_files': voter_files,\n 'categories': categories,\n 'eligibility_category_id' : eligibility_category_id})", "def get_queryset(self):\n user_requested = self.kwargs['user']\n self.check_object_permissions(self.request, user_requested)\n return Poll.objects.filter(created_by__username=user_requested)", "def index(request):\n\n # todo implement\n # create a watchlist for a user if Logged in and the watchlist doesn't yet exist\n if request.user.is_authenticated and \"watchlist\" not in request.session:\n request.session[\"watchlist\"] = []\n \n return render(request, \"auctions/index.html\", {\"listings\": Listing.objects.filter(isActive=True)})", "async def poll(self) -> List[Message]:\n if not self._session:\n await self._create_session()\n \n res = await self._session.get(self._network.SERVER_ADDR + '/api/poll')\n obj = await res.json()\n self._network.connected_robots = obj['robots']\n ret = []\n for m in obj['messages']:\n ret.append(Message.from_dict(m))\n return ret", "def _check_ongoing_poll(view):\n meeting_path = resource_path(view.request.meeting)\n ongoing = view.catalog_search(type_name = 'Poll',\n path = meeting_path,\n workflow_state = 'ongoing')\n if ongoing:\n raise HTTPForbidden(_(u\"access_during_ongoing_not_allowed\",\n default = u\"During ongoing polls, this action isn't allowed. \"\n \"Try again when polls have closed.\"))", "def get(self, request):\n listings = self.get_queryset().all().order_by(\"start_date\")\n username = None\n auth = request.user.is_authenticated\n if auth:\n username = request.user.username\n return render(request, 'listings/list.html', {'listings': listings,\n 'username': username,\n 'auth': auth})", "def _list(self, req):\n list_type = None\n status_prefix = 'STATUS LIST '\n if req:\n list_type = req.pop(0)\n if list_type and list_type == SPECTATE:\n games = self.server.get_unfinished_games()\n status_prefix += SPECTATE + ' '\n else:\n games = self.server.get_open_games()\n self.send_line(status_prefix + ' '.join(\n [str(g.id) for g in games if not self.game or self.game is not g]))", "def _list(self, irc, msg, args):\n # TODO: write _list; use local.punny modules print/list if avail\n pass", "def show_listings(offset):\n items = Item.query.filter(Item.status == \"listed\").order_by(desc(Item.date_listed)).offset(offset).limit(LIMIT).all()\n return jsonify(data=[item.serialize for item in items])\n #return render_template('primary_user_interface.html', items=items)", "def index(request):\n return redirect('polls:index')", "def list(self):\n return self.request(\"GET\")", "async def list(self, ctx):\n\n query = {\"resolved\": False, \"user_id\": ctx.author.id}\n count = await self.bot.mongo.db.reminder.count_documents(query)\n\n async def get_reminders():\n async for x in self.bot.mongo.db.reminder.find(query).sort(\"expires_at\", 1):\n yield Reminder.build_from_mongo(self.bot, x)\n\n def format_item(i, x):\n name = f\"{x._id}. {discord.utils.format_dt(x.expires_at, 'R')}\"\n return {\"name\": name, \"value\": textwrap.shorten(x.event, 512), \"inline\": False}\n\n pages = ViewMenuPages(\n source=AsyncEmbedFieldsPageSource(\n get_reminders(),\n title=\"Reminders\",\n format_item=format_item,\n count=count,\n )\n )\n\n try:\n await pages.start(ctx)\n except IndexError:\n await ctx.send(\"No reminders found.\")", "def get_prefetched_queryset(self, *args, **kwargs):\n return (\n super()\n .get_prefetched_queryset(*args, **kwargs)\n .select_related(\"user\", \"poll\")\n .prefetch_related(\"votes\")\n )", "def get_queryset(self):\n #Old get_queryset() method.\n #Return last 5 published polls\n #return Poll.objects.order_by('-pub_date')[:5]\n\n #New get_queryset() method.\n #return Poll.objects.filter(pub_date__lte=timezone.now()).order_by('-pub_date')[:5]\n return Poll.objects.annotate(num_choices=Count('choice')).filter(pub_date__lte=timezone.now(), num_choices__gte=2).order_by('-pub_date')[:5]", "def get_voted_players():\n\n context = {}\n\n # Init context\n context[\"url\"] = flask.request.path\n\n # Database\n db = quiplash.model.get_db()\n\n cur = db.execute(\"SELECT * FROM votes\",)\n votes = cur.fetchall()\n\n players = {}\n for vote in votes:\n players[vote['name']] = True\n\n context[\"voters\"] = list(players.keys())\n\n return flask.jsonify(**context)", "def get_all_votes(self) -> Response:\n response = self.client.get(\n path=self.specific_question_url,\n format='json'\n )\n return response", "def chatlist(request):\n\n chats = get_chat_list()\n chat_list = pagination(request, chats, CHATS_PER_PAGE)\n\n dic = {'chatlist': chat_list}\n return render_to_response('whatsapp/chatlist.html', dic, context_instance=RequestContext(request))", "def list_talk(request, template=\"core/list_talk.html\"):\n response = {\n 'morning': Talk.objects.at_morning(),\n 'afternoon': Talk.objects.at_afternoon(),\n }\n return direct_to_template(request, template, response)", "def list(cls, **kwargs):\n response = Yola().list_subscriptions(**kwargs)\n return [cls(**sub) for sub in response['results']]", "def get_votes():\n\n context = {}\n\n # url\n context[\"url\"] = flask.request.path\n\n # Database\n db = quiplash.model.get_db()\n\n cur = db.execute(\"SELECT * FROM votes\",)\n output = cur.fetchall()\n\n context[\"votes\"] = output\n\n return flask.jsonify(**context)", "def list(cls, limit=None, starting_after=None):\n response = Requester.get(cls.endpoint, params={'limit': limit,\n 'starting_after': starting_after})\n return List(response, WebhookEndpoint, limit)", "def polling_call(self) -> global___Snippet.ClientCall:", "async def poll(self, ctx, choice=None):\n\n if choice is None or choice.lower() in (\"online\", \"voice\"):\n suggestions = get_suggestions(get_users(ctx, choice))\n\n if suggestions:\n poll_id = create_strawpoll(\"What to play?\", suggestions)\n\n if poll_id:\n await self.bot.say(\"Here's your strawpoll link: https://www.strawpoll.me/{}\".format(poll_id))\n else:\n await self.bot.say(\"Phew! You have way too many games to create a poll. You should try `{}game suggest` instead.\".format(ctx.prefix))\n else:\n await self.bot.say(\"You have exactly **zero** games in common, go buy a 4-pack!\")\n else:\n await self.bot.say(\"Please enter a valid filter -> either use `online` (default) for all online users or `voice` for all users in a voice channel\")", "def list(self, request):\n\n # Compute the time series information\n return Response([\n {'date': d.strftime(\"%Y-%m-%d\"), 'count': c}\n for (d,c) in Vote.objects.response_timeseries()\n ])", "def get_all_votes(self, poll_key):\n poll_data = self.get_poll(poll_key)\n part_keys = poll_data['participants']\n results = []\n for part_key in part_keys:\n part_choice = self.get_participant(part_key)['choice']\n results.append(part_choice)\n return results", "async def listlaunches(self, ctx, *args):\n if not can_answer(ctx):\n return\n num = 5\n for arg in args:\n if arg.isdigit():\n num = int(arg)\n launches = launchlibrary.Launch.fetch(api, status=(1,2))[:num]\n if launches[0].agency != None:\n embedcolor = discord.Colour(await get_color(launches[0].agency.id))\n else:\n embedcolor = discord.Colour(5592405)\n msg = discord.Embed(title=\"Listing next launches: \", colour=embedcolor)\n IDs = []\n for launch in launches:\n launchtime = launch.net\n utc = datetime.now(timezone.utc)\n T = chop_microseconds(launchtime - utc)\n if launch.status == 1:\n value = \"T-: {0}\".format(T)\n else:\n value = \"T-: {0}; {1}\".format(T, launch.get_status().name)\n msg.add_field(name=launch.name, value=value, inline=False)\n IDs.append(launch.id)\n footer = 'IDs: ' + ', '.join(str(x) for x in IDs)\n msg.set_footer(text=footer)\n await ctx.send(embed=msg)", "def list_speaker(request, template=\"core/list_speaker.html\"):\n speakers = Speaker.objects.all()\n response = { 'speakers': speakers, 'show_all_info': False }\n return direct_to_template(request, template, response)", "def getPollListRaw(self, jsonResponsePayload):\n logger.info('parsing:\\n%s' % jsonResponsePayload)\n obj = json.loads(jsonResponsePayload)\n respList = obj['pdResponse']['demands']['demand'][0]['polls']['poll']\n logger.info('Returning:\\n%s' % str(respList))\n return respList", "def list():\n data = getInstaData()\n return render_template(\"list.html\", data=data)", "def list(request, template='events/list.html'):\n return render(request, template, {\n 'events': Event.objects.get_upcoming().order_by('start_date'),\n })", "def query_list(self):\r\n self.plot_list.clear()\r\n self.settings.send_to_databox_header(self.plot_list)\r\n \r\n self.label_list_status.set_text('Getting frequencies and powers.')\r\n self.window.process_events()\r\n \r\n fs = self.api.get_list_frequencies()\r\n ps = self.api.get_list_powers()\r\n if fs == None or ps == None: return\r\n \r\n if not len(fs) == len(ps):\r\n print(\"ERROR query_list(): List lengths do not match. len(fs)=\"+str(len(fs))+' len(ps)='+str(len(ps)) )\r\n \r\n N = len(fs)\r\n self.plot_list['n'] = _n.linspace(0, N-1, N)\r\n self.plot_list['f_Hz'] = fs\r\n self.plot_list['P_dBm'] = ps\r\n \r\n self.label_list_status.set_text(str(N) + ' points in list memory')\r\n self.plot_list.plot()\r\n self.button_send_list.disable()\r\n self.window.process_events()", "def get_voted_offices(self):\n con = self.db.init_db()\n cur = con.cursor()\n query = \"SELECT distinct office_id from vote;\"\n cur.execute(query)\n data = cur.fetchall()\n office_list = []\n for id in data:\n office_id = id[0]\n office = dict(\n office_id=office_id\n\n )\n office_list.append(office)\n return dict(status=200, data=office_list)", "async def list(self, ctx: MyContext):\n if ctx.subcommand_passed is None:\n await ctx.send_help(\"wormhole list\")", "def get_all_votes(self) -> List[dict]:", "def all_plans(request):\n\n plans = Plan.objects.all()\n\n context = {\n 'plans': plans,\n }\n\n return render(request, 'plans/plans.html', context)", "def slides():\n return Slide.objects.filter(live=True).order_by('order')", "def list_talks(request, page=1, lib=None):\n count = lib.count_talks()\n return render_to_response('talks/list_talks.html', {\n 'talks': lib.list_talks(page, items_per_page), \n 'count': count, \n 'page': int(page),\n 'listpages': compute_nb_pages(count, items_per_page),\n 'is_talk': True,\n }, context_instance=RequestContext(request))", "def show_list():\n\n response = []\n docs = SUPERHEROES.stream()\n for doc in docs:\n response.append(doc.to_dict())\n return jsonify(response), 201", "async def replist_command(self, ctx):\n rep_model = (\n await ReputationPoints.filter(guild_id=ctx.guild.id)\n .order_by(\"-points\")\n .limit(10)\n )\n leaderboard = \"\\n\".join(\n [\n f\"**{i+1}.** {model.member_name} - {model.points}\"\n for (i, model) in enumerate(rep_model)\n ]\n )\n # print(leaderboard)\n embed = Embed(\n description=leaderboard if len(rep_model) else \"No data found\",\n color=Color.blurple(),\n timestamp=datetime.utcnow(),\n )\n embed.set_footer(text=f\"Requested by {ctx.author.name}\")\n embed.set_author(\n name=f\"{ctx.guild.name} Reputation Leaderboard\", icon_url=ctx.guild.icon_url\n )\n await ctx.send(embed=embed)", "def curr_list(request):\n if request.method == 'GET':\n all_rates = Currencies.objects.all()\n serializer = CurrenciesSerializer(all_rates, many=True)\n return Response(serializer.data)", "def laptops_list(request):\n laptops = Laptop.objects.filter(retired=False)\n return render(request, 'laptops/laptops_list.html', {\"laptops\": laptops})", "def get_online_list(self) -> list:\n return self._get_json(self._URLS['GetOnlineList'])[1:]", "def get_all_poll_data():\n\trcp_poll_race_dict = get_rcp_poll_data('http://www.realclearpolitics.com/epolls/latest_polls/') # realclearpolotics poll data\n\treturn rcp_poll_race_dict", "def listVotes(bot, trigger):\n\n plugins_info = bot.db.get_plugin_value(PLUGIN_INFO, \"list\", [])\n if (plugins_info != []):\n plugins_info = json.loads(plugins_info)\n\n id = 0\n for voteName in plugins_info:\n id += 1\n vote_info = json.loads(bot.db.get_plugin_value(PLUGIN_NAME, voteName))\n\n id_str = \"%d\" % (id)\n\n if (vote_info[\"type\"] == \"single\"):\n positiveVotes_str = \"%d\" % (vote_info[\"positiveVotes\"])\n negativeVotes_str = \"%d\" % (vote_info[\"negativeVotes\"])\n\n bot.say(VOTE_FORMAT_SINGLE % {\n \"id\" : id_str,\n \"voteName\" : voteName,\n \"proposer\" : vote_info[\"proposer\"],\n \"created\" : vote_info[\"created\"],\n \"positiveVotes\" : positiveVotes_str,\n \"negativeVotes\" : negativeVotes_str,\n \"note\" : vote_info[\"note\"]\n })\n else:\n options = []\n\n for key_id, option_dict in vote_info[\"multi_value\"].items():\n positiveVotes_str = \"%d\" % (option_dict[\"positiveVotes\"])\n negativeVotes_str = \"%d\" % (option_dict[\"negativeVotes\"])\n\n options.append(MULTI_TEMPLATE % {\n \"key_id\" : key_id,\n \"name\" : option_dict[\"name\"],\n \"positiveVotes\" : positiveVotes_str,\n \"negativeVotes\" : negativeVotes_str\n })\n options_str = \", \".join(options)\n\n bot.say(VOTE_FORMAT_MULTI % {\n \"id\" : id_str,\n \"voteName\" : voteName,\n \"proposer\" : vote_info[\"proposer\"],\n \"created\" : vote_info[\"created\"],\n \"note\" : vote_info[\"note\"],\n \"options\" : options_str\n })\n\n if (id == 0):\n bot.reply(\"Lo siento, pero no hay votaciones.\")", "def test_list_past_meeting_polls(self):\n pass", "def newsList(request):\n\n news_count = New.objects.count() # Pocet vsech zaznamu novinek\n news_list = New.objects.all().order_by(\"date\") # Sort by date ... and only part of list\n # misto vsech zaznamu ziskat jen ty v intervalu start - stop -> API\n\n pictureOfWeek = PhotoOfWeek.objects.last()\n context = {'news_list': news_list, 'news_count': news_count, 'pictureOfWeek': pictureOfWeek}\n return render(request, 'news/newsList.html', context)", "def overview():\r\n # Update the list of languages allowed on the site, \r\n # except for the language used by your users at that time.\r\n if request.method == 'POST':\r\n lan_object = Languages()\r\n data = lan_object.update()\r\n message = lan_object.message\r\n status = lan_object.status\r\n \r\n # Gets documents from the collections of all languages \r\n languages_list = g.languages_object.get_languages(1)\r\n language_chosen = g.languages_object.get_languages(2)\r\n return render_template( '{}/index.html'.format(MODULE_DIR), **locals())", "def do_list(self, args):\n if args.option == 'config':\n print(list_config())\n if args.option == 'queries':\n for k,v in list_queries().items():\n print(k, \":\", json.dumps(v, indent=4))\n if args.option == 'jobs':\n update_jobs(CLI_GLOBALS.ENGAGEMENT)\n for k,v in list_jobs().items():\n print(k, \":\", json.dumps(v, indent=4))\n if args.option == 'results':\n for i in list_results():\n print(i)\n if args.option == 'key':\n for k,v in list_key().items():\n print(k, \":\", json.dumps(v, indent=4))\n if args.option == 'engagement':\n print(list_engagement())", "def view_status(request, pk):\n\n status = Status.objects.get(pk=pk)\n user_profile = get_users_profile(request.user.id)\n\n context = {\n 'news_feed': [status],\n 'user_profile': user_profile,\n }\n\n return render(request, 'status/view_status.html', context)", "def get_queryset(self):\n return Question.objects.all().order_by(\"-allVote\") #แสดงคำถาม", "async def pickemsvotes(self, ctx: commands.Context):\n if str(ctx.guild.id) not in self.all_pickems:\n await ctx.send(_(\"This server does not have any pickems setup.\"))\n return\n msg = _(\"You have voted on the following games:\\n\")\n timezone = await self.pickems_config.guild(ctx.guild).pickems_timezone()\n for game_id, pickem in self.all_pickems[str(ctx.guild.id)].items():\n if str(ctx.author.id) in pickem.votes:\n vote = pickem.votes[str(ctx.author.id)]\n game_start = utc_to_local(pickem.game_start, timezone)\n time_str = game_start.strftime(\"%B %d, %Y at %I:%M %p %Z\")\n msg += f\"{pickem.away_team} @ {pickem.home_team} {time_str} - {vote}\\n\"\n msgs = []\n for page in pagify(msg):\n if ctx.channel.permissions_for(ctx.me).embed_links:\n em = discord.Embed(\n title=_(\"Pickems votes in {guild}\").format(guild=ctx.guild.name),\n description=page,\n )\n msgs.append(em)\n else:\n msgs.append(page)\n await BaseMenu(source=SimplePages(msgs)).start(ctx=ctx)", "def get_political_handles(list_file=['files/inc_handles.txt', 'files/bjp_handles.txt'], get_online=True):\n ls_fin = []\n for i in range(1, 23):\n url = \"https://www.socialbakers.com/statistics/twitter/profiles/india/society/politics/page-\" + \\\n str(i) + \"/?showMoreList-from=1&do=platformList-renderAjax&json\"\n html = urllib.request.urlopen(url)\n soup = BeautifulSoup(html, 'html.parser')\n intm = soup.find_all('h2')\n for y in intm:\n for x in y.find_all('span'):\n ls_fin.append(x.text.split(\n '(')[-1].replace(')', '').replace('@', ''))\n for file in list_file:\n with open(file) as f:\n for i in f:\n if i:\n ls_fin.append(i.strip())\n logging.info(str(len(ls_fin)) + \" IDs crawled\")\n return (ls_fin)", "def Poll(request, document_id):\n data = {'android': _IsAndroid(request), 'document_id': document_id}\n context = template.RequestContext(request, data)\n return shortcuts.render_to_response('poll.html', context)", "def ls(self, count = 200):\n return self._manager.ls_notes(self['id'], count)", "def get(self):\n return self.render_template('index.html', quote=PollHandler.quote)", "def cli_list(ctx):\n\n _list_spiders(ctx)", "async def list(self, ctx):\n\n cursor = await db.execute(\"Select MessageID, TimeEnding, Members, ChannelID from Giveaway \"\n \"where GuildID = ? and Ended = ?\", (ctx.guild.id, False))\n result = await cursor.fetchall()\n\n for i, tup in enumerate(result):\n try:\n msg = await ctx.guild.get_channel(tup[3]).fetch_message(tup[0])\n tup = list(tup)\n tup[0] = msg\n result[i] = tup\n except:\n result.remove(tup)\n await db.execute(\"Delete from Giveaway where MessageID = ?\", (tup[0],))\n await db.commit()\n\n if not result:\n return await send_embed(ctx, \"No active giveaways on this server.\", negative=True)\n\n embeds = []\n fields = []\n\n for i, tup in enumerate(result, start=1):\n fields.append((str(tup[0].id),\n f\"Prize: {tup[0].embeds[0].author.name}\\n\"\n f\"{tup[2]} possible winners\\n\"\n f\"Ends at {datetime.utcfromtimestamp(tup[1]).strftime('%Y-%m-%d %H:%M:%S')}\"))\n\n if i % 10 == 0 or i == len(result):\n embed = discord.Embed(\n colour=discord.Colour.blue(),\n title=\"Active Giveaways\"\n )\n\n for field in fields:\n embed.add_field(name=field[0], value=field[1], inline=False)\n\n embeds.append(embed)\n fields = []\n\n await self.bot.paginate(ctx, embeds)", "def disp_watchlist(caller):\n watchlist = caller.db.watching or []\n if not watchlist:\n caller.msg(\"Not watching anyone.\")\n return\n table = []\n for ob in sorted(watchlist, key=lambda x: x.key):\n name = ob.key.capitalize()\n if ob.player_ob.is_connected and not ob.player_ob.db.hide_from_watch:\n name = \"{c*%s{n\" % name\n table.append(name)\n caller.msg(\n \"Currently watching (online players are highlighted):\\n%s\"\n % \", \".join(table),\n options={\"box\": True},\n )\n if caller.db.hide_from_watch:\n caller.msg(\"You are currently in hidden mode.\")\n return", "def list(self, status: Optional[str] = None) -> SessionList:\n filter = {\"status\": status} if status else None\n return self._list(list_cls=SessionList, resource_cls=Session, method=\"GET\", filter=filter)", "def getPollsJSON(self, parentID='0', start='0', end='0'):\n jsonPayload = pdReq.GET_POLLS_JSON % (self.UID, self.userCode, start, end, parentID)\n logger.info('calling fetchInfo with %s' % jsonPayload)\n return self.fetchInfo(jsonPayload)", "def vp():\n if g.active.is_empty:\n txt = F('advise search') if g.model.is_empty else F('advise add')\n g.message = F('pl empty') + \" \" + txt\n\n else:\n g.browse_mode = \"normal\"\n g.model.songs = g.active.songs\n g.message = F('current pl')\n\n g.content = generate_songlist_display(zeromsg=g.message)", "def get_tv_listings():\n\n #get user email from session\n email = session.get(\"current_user\")\n\n if email: \n #get user_id to get access to favorites table and users table\n user = User.get_user_with_email(email)\n\n #use the backref relationship to find the titles of the user's favorite shows and save in a list\n favorite_titles = []\n for favorite in user.favorites:\n favorite_titles.append(favorite.show.title)\n\n #create list that will contain dictionaries with show title and a list of dictionaries regarding tv listings\n listings = []\n\n for title in favorite_titles:\n show = {}\n #convert title from unicode to string to run API call\n title_str = str(title)\n series_id = onconnect_search_series_id(title_str)\n print \"\\n\\n\", series_id, \"\\n\\n\"\n airings = onconnect_search_airings(series_id)\n #add show title to dictionary, add airings object to dictionary\n show[\"title\"] = title_str\n if airings:\n show[\"listings\"] = airings\n else:\n show[\"listings\"] = [\"empty\"]\n\n #add dictionary to the listings list\n listings.append(show)\n time.sleep(1)\n\n \n listings = jsonify(listings)\n\n return listings\n\n else:\n flash(\"Please login first!\")\n return redirect('/login')", "def list(self, request, *args, **kwargs):\n question_list = self.get_queryset().values_list('question', flat=True)\n queryset = Question.objects.filter(id__in=question_list).order_by('key')\n\n page = self.paginate_queryset(queryset)\n if page is not None:\n serializer = self.get_serializer_class()(page, many=True)\n return self.get_paginated_response(serializer.data)\n\n serializer = self.get_serializer_class()(queryset, many=True)\n return Response(serializer.data)", "def get(self, request):\n return Response(\"Dashboard Listing Page\", status=status.HTTP_200_OK)", "def get_queryset(self):\n #.1 below code was showing future poll/questions\n #.1 return Question.objects.order_by('-pub_date')[:5]\n\n #re-defining\n \"\"\"\n Return the last five published questions (not including those set to be\n published in the future).\n \"\"\" \n #imported timezone\n \n return Question.objects.filter(pub_date__lte=timezone.now()).order_by('-pub_date')[:5]", "def watchlists(request):\n \"\"\"\n Alternative to @login_required decorator: manually test with:\n request.user.is_authenticated\n \"\"\"\n assert isinstance(request, HttpRequest)\n\n # Get all of the user's watchlists\n watchlists = WatchList.objects.filter(user=request.user).all()\n \n # Store the stocks in each watchlist in a dictionary\n # Each key is the watchList_name from the user's watchlists\n # Each value is a list of Stocks (as StockList model objects) \n # present in the watchlist\n stocks = []\n counter = 0\n\n for w in watchlists:\n stocks.append([])\n for stock in w.stockResults.all():\n # No need to check if key is in the dict, since \n # it is added above\n stocks[counter].append(stock)\n counter += 1\n\n print(f'Watchlists:{watchlists}\\tStocks:{stocks}')\n\n if watchlists.count() != 0 and len(stocks) != 0:\n watchlist_stocks = zip(watchlists, stocks)\n else:\n watchlist_stocks = None\n\n context = {\n 'title':'Watchlists',\n 'message':'Your Watchlist page.',\n 'year':datetime.now().year,\n 'user': request.user,\n 'data': watchlist_stocks,\n }\n\n return render(\n request,\n 'app/watchlists.html',\n context\n )", "def list_webhooks(self):\n response = requests.get(\n '%spreferences/notifications' % self._url,\n **self._auth\n )\n\n if response.status_code == 401:\n raise MoipAuthorizationException(response.json())\n else:\n pretty_print(response.json())\n return response.json()", "def _create_api_ulr_list(self) -> List[str]:\n api = Setup.pickpoint_api\n return [\n f\"https://api.pickpoint.io/v1/reverse/?key={api}&lat={lat}&lon={lon}\"\n f\"&accept-language=en-US\"\n for lat, lon in self.locations\n ]", "def privileged_polling(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"privileged_polling\"), kwargs)", "def current_user_playlists(self, limit=50, offset=0, **kwargs):\n return self._get(API.MY_PLAYLISTS.value, limit=limit, offset=offset, **kwargs)", "def get_queryset(self):\n\n userteammates = TeamMate.objects.filter(user=self.request.user)\n teams = []\n for teammateobject in userteammates:\n teams.append(teammateobject.team)\n\n\n #return Vote.objects.filter(team__in=teams)\n return Vote.objects.filter(choice__poll=1)", "def __init__(self, poll, question, options, total_votes, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.poll = poll\n self.question = question\n self.options = options\n self.total_votes = total_votes", "def list(default_view):\n ListCommandExecutor(default_view).list()", "def watch_policy_list(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method watch_policy_list\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/watch/policies'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='JsonWatchEvent',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "async def list(\n self,\n ctx\n ):\n\n # Skipping non-eventer users\n if not self._have_permission(ctx.author, ctx.guild):\n await ctx.send(embed=decoration.embeds.ERRORS[\"NO_PERM\"])\n return\n\n # Getting events\n event_types = connector.getAllEventTypes(guild_id=ctx.guild.id)\n if len(event_types[::]) == 0:\n await ctx.send(content=\"```md\\nВ данный момент не создано никаких видов ивентов```\")\n return\n\n await ctx.send(content=f\"Типы ивентов ({len(event_types)}):\")\n\n for event_type in event_types:\n url = f'https://discord.com/channels/{event_type.guild_id}/{event_type.channel_id}/{event_type.message_id}'\n _message = Embed(\n title=f\"**{event_type.title}**ㅤ{event_type.emoji}\",\n description=f\"Описание: {event_type.description}\\nСообщение: [click]({url})\\nID Вида ивента: {str(event_type.type_id)}\\nАктивирован: {'да' if event_type.enabled else 'нет'}\",\n color=0x58b9ff\n )\n await ctx.channel.send(embed=_message)", "def current_listing(request, auction_id):\n \n # if user is not logged in, display an error message\n if not request.user.is_authenticated:\n return render(request, 'auctions/apology.html', {\n 'message': \"You must be logged in to see this listing.\"\n })\n \n else:\n # query for watchlist status of the selected listing\n watchlist_item = Watchlist.objects.filter(user = request.user, auction_listing_id = auction_id)\n # query for the selected listing's data in the database\n listing = Auction_listing.objects.get(pk = auction_id)\n # if data is submitted\n if request.method == 'POST':\n # if user submits form via the watchlist button\n if request.POST.get('Watchlist_delete') or request.POST.get('Watchlist_add'):\n # check whether listing is on watchlist, if not add it, if yes remove it from watchlist\n if watchlist_item:\n watchlist_item.delete()\n else:\n watchlist = Watchlist(user = request.user, auction_listing_id = auction_id)\n watchlist.save()\n # if user submits form via the place bid button\n elif request.POST.get('min_bid') or request.POST.get('min_price'):\n # if previous bids were already made\n if request.POST.get('min_bid'):\n # if user provided amount is greater than the current highest bid\n if Decimal(request.POST.get('min_bid')) > Bid.objects.filter(auction_listing_id = auction_id).aggregate(Max('bid')).get('bid__max'):\n bid = Bid(user = request.user, auction_listing_id = auction_id, bid = request.POST.get('min_bid'))\n bid.save()\n # return an error message if user tries to bypass HTML verification\n else:\n return render(request, 'auctions/apology.html', {\n 'message': \"Looks you tried to bypass the HTML verification. Unfortunately, your hacker level is too low to break this site.\"\n })\n # if no bids were made yet \n elif request.POST.get('min_price'):\n # if user provided amount is greater than or equal to the starting price\n if Decimal(request.POST.get('min_price')) >= listing.price:\n bid = Bid(user = request.user, auction_listing_id = auction_id, bid = request.POST.get('min_price'))\n bid.save()\n # return an error message if user tries to bypass HTML verification\n else:\n return render(request, 'auctions/apology.html', {\n 'message': \"Looks you tried to bypass the HTML verification. Unfortunately, your hacker level is too low to break this site.\"\n })\n # if user submits form via the post comment button \n elif request.POST.get('post'):\n form = CommentForm(request.POST)\n # verify form is valid\n if form.is_valid():\n instance = form.save(commit=False)\n instance.user = request.user\n instance.auction_listing_id = auction_id\n instance.save()\n # else return an error message\n else:\n return render(request, 'auctions/apology.html', {\n 'message': \"Form is invalid.\"\n })\n # if user submits form via the close auction button\n elif request.POST.get('close'):\n listing.active = False\n listing.save()\n \n return HttpResponseRedirect(reverse(\"current_listing\", kwargs={'auction_id': auction_id }))\n \n # if reached via URL\n else:\n form = CommentForm()\n # check if bid exists for current auction listing\n if Bid.objects.filter(auction_listing_id = auction_id).aggregate(Max('bid')).get('bid__max'):\n # query for the current bid in current listing\n current_bid = round((Bid.objects.filter(auction_listing_id = auction_id).aggregate(Max('bid')).get('bid__max')), 2)\n # find the user who made the current bid\n max_price = Bid.objects.get(auction_listing_id = auction_id, bid = Bid.objects.filter(auction_listing_id = auction_id).aggregate(Max('bid')).get('bid__max'))\n winner = max_price.user\n # if not bids were made, initiliaze both variables to 0 \n else:\n current_bid = 0\n winner = 0\n return render(request, 'auctions/current_listing.html', {\n 'listing': listing,\n 'price': listing.price,\n 'watchlist': watchlist_item,\n \"bid_count\": Bid.objects.filter(auction_listing_id = auction_id).count(),\n \"min_bid\": current_bid + Decimal(0.01),\n \"current_bid\": current_bid,\n \"winner\": winner,\n \"form\": form,\n \"comments\": Comment.objects.filter(auction_listing_id = auction_id),\n \"user\": request.user\n })", "def list(cls):\n return cls().requests.get('plan')" ]
[ "0.68773824", "0.67399997", "0.63377434", "0.6063946", "0.5996496", "0.5898129", "0.5813151", "0.5798766", "0.56912804", "0.5689598", "0.5687956", "0.56795925", "0.5662718", "0.5646899", "0.5646899", "0.5614585", "0.55864877", "0.5577538", "0.55622435", "0.55242187", "0.54779744", "0.5453258", "0.5446621", "0.5431568", "0.54238", "0.5411924", "0.54039454", "0.5382069", "0.53796685", "0.53411424", "0.5316617", "0.5312747", "0.529521", "0.52934134", "0.5288308", "0.5288099", "0.52842414", "0.5256932", "0.52456445", "0.5243724", "0.5237112", "0.52300525", "0.5227887", "0.521906", "0.52113664", "0.51997674", "0.51928246", "0.5185567", "0.5171897", "0.5156777", "0.5152957", "0.51519", "0.51476705", "0.51456743", "0.5117764", "0.51039296", "0.50997645", "0.5099395", "0.5097714", "0.5090299", "0.5079791", "0.5078326", "0.50743955", "0.5068712", "0.5064661", "0.506256", "0.50578743", "0.5057474", "0.5050353", "0.5046944", "0.50408596", "0.5037775", "0.5033434", "0.5031397", "0.5023348", "0.50179154", "0.50107646", "0.5010031", "0.500856", "0.4999726", "0.49918932", "0.49828565", "0.49827108", "0.4969499", "0.49680683", "0.49632424", "0.4958144", "0.49559826", "0.49513307", "0.49506366", "0.494801", "0.49389666", "0.49348417", "0.4931229", "0.49300975", "0.4927394", "0.4923633", "0.49232864", "0.48961574", "0.4894041" ]
0.66753453
2
[channel] Vote on a poll. Channel is only needed if used in a PM.
def vote(self, irc, msg, args, channel, pid, yaynay): if yaynay not in ['yay', 'nay']: irc.error("Valid Answers are 'yay' or 'nay'.") return if channel in self.polls.keys(): if self.polls[channel][pid]['concluded']: irc.reply("Poll #%s is finished, it does not accept updates." % pid) return if self._vote(irc, channel, msg.nick, pid, yaynay): irc.reply("Successfully voted on %s" % self.polls[channel][pid]['question']) else: log.debug('Not dumping due to no change.') else: irc.error("'%s' has no polls." % channel)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def votes(self, irc, msg, args, channel, pid):\n if channel and msg.args[0] in irc.state.channels:\n if msg.args[0] != channel:\n if ircdb.checkCapability(msg.prefix, 'admin') or ircdb.checkCapability(msg.prefix, 'owner'):\n irc.error(\"Not Implemented\")\n else:\n irc.errorInvalid('argument', channel)\n elif msg.args[0] == channel:\n irc.error(\"Not Implemented\")", "async def _vote_count(\n self, ctx: Context, *, channel: discord.TextChannel = None\n ):\n\n guild: discord.Guild = ctx.guild\n\n if not channel:\n channel = await self.get_vote_channel(guild)\n if isinstance(channel, str):\n return await ctx.send(channel)\n\n history = await channel.history(oldest_first=True).flatten()\n if len(history) > 100:\n return await ctx.send(_(\n \"I couldn't identify a voting channel. Please specify one explicitly.\"\n ))\n else:\n history = await channel.history(oldest_first=True).flatten()\n if len(history) > 100:\n return await ctx.send(_(\n \"That channel has too many messages!\"\n \" Please ask a host for manual vote count.\"\n ))\n\n if len(history) < 1:\n return await ctx.send(_(\"{} is empty.\").format(channel.mention))\n\n user_votes = {}\n player_role = guild.get_role(\n await self.config.guild(guild).player_id()\n )\n\n for message in history:\n author = message.author\n if player_role not in author.roles:\n continue\n vote = self.get_vote_from_message(message)\n if not vote:\n continue\n user_votes[f\"{author.name}#{author.discriminator}\"] = vote\n\n user_votes = await self.get_non_voters(guild, user_votes)\n\n votes = {}\n for user in user_votes:\n val = user_votes[user].capitalize()\n try:\n votes[val].append(user)\n except KeyError:\n votes[val] = [user]\n\n # max votes first\n votes = dict(sorted(\n votes.items(), key=lambda item: len(item[1]), reverse=True\n ))\n\n # Pop and add stuff back to dict for ordering purpose.\n try:\n votes[\"VTNL\"] = votes.pop(\"Vtnl\")\n except KeyError:\n pass\n try:\n votes[\"No vote\"] = votes.pop(\"No vote\")\n except KeyError:\n pass\n\n txt = \"\"\n\n for i, vote in enumerate(votes, start=1):\n voters = votes[vote]\n\n if vote == \"VTNL\":\n txt += _(\"\\n\\n**{}** - {} ({})\").format(vote, len(voters), \", \".join(voters))\n elif vote == \"No vote\":\n txt += _(\"\\n\\n**Not voting** - {} ({})\").format(len(voters), \", \".join(voters))\n else:\n txt += _(\"\\n{}. **{}** - {} ({})\").format(i, vote, len(voters), \", \".join(voters))\n\n title = _(\"Vote Count\")\n\n embed = discord.Embed(\n color=0x00CDFF, title=title,\n description=_(\"__Counting from {} channel.__\\n\\n{}\").format(\n channel.mention, txt.strip()\n )\n )\n\n try:\n await ctx.send(embed=embed)\n except discord.Forbidden:\n await ctx.send(\n f\"**{title}**\\n\\n__Counting from {channel.mention}\"\n f\" channel.__\\n\\n{txt.strip()}\"\n )", "def receive_poll_answer(self, update, context):\n answer = update.poll_answer\n poll_id = answer.poll_id\n selected_options = answer.option_ids\n\n timeout = self.DEFAULT_DELETE_TIMEOUT\n vote_count = self.DEFAULT_VOTE_COUNT\n chat_data = get_chat(context.bot_data[poll_id][\"chat\"])\n if chat_data:\n timeout = chat_data.delete_timeout or self.DEFAULT_DELETE_TIMEOUT\n vote_count = chat_data.vote_count or self.DEFAULT_VOTE_COUNT\n\n if len(selected_options) < 1:\n context.bot_data[poll_id][\"count\"][\n context.bot_data[poll_id][\"voters\"][update.effective_user.id]\n ] -= 1\n return\n\n if selected_options[0] == 0:\n context.bot_data[poll_id][\"count\"][\"yes\"] += 1\n context.bot_data[poll_id][\"voters\"][update.effective_user.id] = \"yes\"\n elif selected_options[0] == 1:\n context.bot_data[poll_id][\"count\"][\"no\"] += 1\n context.bot_data[poll_id][\"voters\"][update.effective_user.id] = \"no\"\n\n # Close poll after three participants voted\n if (\n context.bot_data[poll_id][\"count\"][\"yes\"] == vote_count\n or context.bot_data[poll_id][\"count\"][\"no\"] == vote_count\n ):\n context.bot.stop_poll(\n context.bot_data[poll_id][\"chat\"],\n context.bot_data[poll_id][\"message_id\"],\n )\n if timeout == -2:\n return\n context.job_queue.run_once(\n self.sched_delete,\n timeout,\n context=(\n context.bot_data[poll_id][\"chat\"],\n context.bot_data[poll_id][\"message_id\"],\n ),\n )", "async def _msgvote_on(self, ctx):\n\n channel = ctx.message.channel\n if channel.id in self.settings[\"channels_enabled\"]:\n await self.bot.say(\"Msgvote mode is already on in this channel.\")\n else:\n self.settings[\"channels_enabled\"].append(channel.id)\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"Msgvote mode is now on in this channel.\")", "async def vote(self, ctx):\n embed = discord.Embed(title = \"Here are some bot lists that you can vote for me on, voters may soon™ recieve perks\", color = discord.Color.blurple())\n embed.add_field(name = \"Bots For Discord\", value = \"[Click Here](https://botsfordiscord.com/bot/592811241756688405/vote)\")\n embed.add_field(name = \"Discord Boats\", value = \"[Click Here](https://discord.boats/bot/592811241756688405/vote)\")\n embed.add_field(name = \"Divine Discord Bots\", value = \"[Click Here](https://divinediscordbots.com/bot/592811241756688405/vote)\") \n embed.add_field(name = \"Botlist.space\", value = \"[Click Here](https://botlist.space/bot/592811241756688405/upvote)\") \n embed.set_thumbnail(url = self.bot.user.avatar_url)\n await ctx.send(embed = embed)", "async def get_vote_channel(self, guild: discord.Guild):\n\n vote_channels = [\n ch for ch in guild.channels\n if \"voting\" in ch.name\n or \"vote\" in ch.name\n ]\n\n if len(vote_channels) < 1:\n return _(\n \"I couldn't identify a voting channel.\"\n \" Please specify one explicitly.\"\n )\n\n if len(vote_channels) > 1:\n # get channel with the largest suffixed number\n return max(\n vote_channels, key=lambda obj: int(obj.name.split(\"-\")[1])\n )\n\n else:\n return vote_channels[0]", "def receive_poll_answer(update: Update, context: CallbackContext) -> None:\n answer = update.poll_answer\n poll_id = answer.poll_id\n try:\n questions = context.bot_data[poll_id][\"questions\"]\n # this means this poll answer update is from an old poll, we can't do our answering then\n except KeyError:\n return\n selected_options = answer.option_ids\n answer_string = \"\"\n for question_id in selected_options:\n if question_id != selected_options[-1]:\n answer_string += questions[question_id] + \" and \"\n else:\n answer_string += questions[question_id]\n context.bot.send_message(\n context.bot_data[poll_id][\"chat_id\"],\n f\"{update.effective_user.mention_html()} feels {answer_string}!\",\n parse_mode=ParseMode.HTML,\n )\n context.bot_data[poll_id][\"answers\"] += 1\n # Close poll after three participants voted\n if context.bot_data[poll_id][\"answers\"] == 3:\n context.bot.stop_poll(\n context.bot_data[poll_id][\"chat_id\"], context.bot_data[poll_id][\"message_id\"]\n )", "def poll(update: Update, context: CallbackContext) -> None:\n questions = [\"Good\", \"Really good\", \"Fantastic\", \"Great\"]\n message = context.bot.send_poll(\n update.effective_chat.id,\n \"How are you?\",\n questions,\n is_anonymous=False,\n allows_multiple_answers=True,\n )\n # Save some info about the poll the bot_data for later use in receive_poll_answer\n payload = {\n message.poll.id: {\n \"questions\": questions,\n \"message_id\": message.message_id,\n \"chat_id\": update.effective_chat.id,\n \"answers\": 0,\n }\n }\n context.bot_data.update(payload)", "def receive_poll(self, update: Update, context: CallbackContext) -> None:\r\n actual_poll = update.effective_message.poll\r\n # Only need to set the question and options, since all other parameters don't matter for\r\n # a closed poll\r\n update.effective_message.reply_poll(\r\n question=actual_poll.question,\r\n options=[o.text for o in actual_poll.options],\r\n # with is_closed true, the poll/quiz is immediately closed\r\n is_closed=True,\r\n reply_markup=ReplyKeyboardRemove(),\r\n )", "def receive_poll(update: Update, context: CallbackContext) -> None:\n actual_poll = update.effective_message.poll\n # Only need to set the question and options, since all other parameters don't matter for\n # a closed poll\n update.effective_message.reply_poll(\n question=actual_poll.question,\n options=[o.text for o in actual_poll.options],\n # with is_closed true, the poll/quiz is immediately closed\n is_closed=True,\n reply_markup=ReplyKeyboardRemove(),\n )", "def vote(self, part_key, choice):\n part_data = self.get_participant(part_key)\n poll_key = part_data['poll']\n poll_data = self.get_poll(poll_key)\n num_choices = len(poll_data['choices'])\n if(choice not in range(num_choices)):\n raise Exception('Invalid choice value ' + choice +\n ' provided to model.vote()')\n part_data['choice'] = choice\n part_data['voted'] = True\n self.set_participant(part_key, part_data)\n # TODO: Remove the following log notification\n print ('Participant ' + part_data['email'] + ' voted for ' +\n poll_data['choices'][part_data['choice']] + '.')\n return part_data", "def vote_for_poll(request, question_id):\n question = Question.objects.get(pk=question_id)\n if not(q.can_vote()):\n messages.error(request, \"poll expires\")\n return redirect('polls:index')\n return render(request, \"polls/details.html\", {\"question\": question})", "async def vote_setup(ctx: commands.Context):\n session = session_maker()\n old_channel = session.query(Channel).filter_by(channel_id=ctx.channel.id).one_or_none()\n if old_channel is not None:\n await ctx.send('This channel is already setup.')\n return\n channel = Channel(server_id=ctx.guild.id, channel_id=ctx.channel.id)\n session.add(channel)\n session.commit()\n await ctx.send(f'{ctx.channel} set up for voting!')", "def __init__(self, poll, question, options, total_votes, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.poll = poll\n self.question = question\n self.options = options\n self.total_votes = total_votes", "async def vps(self, ctx):\n await ctx.send(\"https://discordapp.com/channels/566451504332931073/566451504903618561/662484243808780309\")", "def cmd_comment_vote(client, args):\n comment_vote = client.comment_vote(args.comment_id, args.vote)\n generate_output({'comment_vote': comment_vote})", "async def vote(ctx: commands.Context):\n await ctx.send(\"this isn't a command\")", "async def letVote(self, ctx):\n timeLeft = 300\n msgStart = await self.textChannel.send(\n self.roleForPlayer.mention + \" \\nDès maintenant les votes sont pris en compte. Votez parmis :```\" +\n \"``````\".join(\n self.getMembersName()) + \"```en écrivant un des pseudos ci-dessus en **_message privé_**.\\nÉvitez\"\n \" de trop spammer si vous ne voulez pas que le décompte soit trop \"\n \"long.\\nN'oubliez pas que vous ne pouvez pas voter pour vous même.\\n\"\n \"Vous avez \" + str(timeLeft) + \" secondes pour voter.\")\n for player in self.playersAndRoles:\n await player.user.send(\"Votez ici parmis :```\" + \"``````\".join(player.getMembersName()) +\n \"```Seul le dernier pseudo valide sera pris en compte.\")\n\n await asyncio.sleep(timeLeft - 30)\n await self.textChannel.send(\"Plus que 30s.\")\n await asyncio.sleep(30)\n msgEnd = await self.textChannel.send(\"Le décompte est terminé, obtention des votes ...\")\n votes = await self.getVote(msgStart=msgStart, msgEnd=msgEnd)\n await self.applyVote(votes=votes)\n await self.displayCourseOfTheGame()\n\n await self.textChannel.send(\"Fin de la partie. Suppression du channel dans 2 minutes.\")\n await asyncio.sleep(120)\n await self.endGame(ctx=ctx)", "async def votechannel_list(self, ctx):\n channels = await self.bot.db.execute(\n \"\"\"\n SELECT channel_id, voting_type FROM voting_channel WHERE guild_id = %s\n \"\"\",\n ctx.guild.id,\n )\n if not channels:\n raise exceptions.Info(\"There are no voting channels on this server yet!\")\n\n rows = []\n for channel_id, voting_type in channels:\n rows.append(f\"<#{channel_id}> - `{voting_type}`\")\n\n content = discord.Embed(\n title=f\":1234: Voting channels in {ctx.guild.name}\", color=int(\"3b88c3\", 16)\n )\n await util.send_as_pages(ctx, content, rows)", "def poll(self, poll_input):", "def upvote_comment():\n x = upvoteBot.main(number=1, submissionid='7xbrcw')\n return x", "async def vote_clear(ctx: commands.Context):\n session = session_maker()\n old_channel = session.query(Channel).filter_by(channel_id=ctx.channel.id).one_or_none()\n if old_channel is None:\n await ctx.send('This channel was never setup for votes.')\n return\n old_votes = session.query(Vote).filter_by(channel_id=ctx.channel.id).all()\n for old_vote in old_votes:\n session.delete(old_vote)\n session.commit()\n await ctx.send(f'Votes for {ctx.channel} cleared!')", "async def channel(self, ctx):\n pass", "async def _msgvote_off(self, ctx):\n\n channel = ctx.message.channel\n if channel.id not in self.settings[\"channels_enabled\"]:\n await self.bot.say(\"Msgvote mode is already off in this channel.\")\n else:\n self.settings[\"channels_enabled\"].remove(channel.id)\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"Msgvote mode is now off in this channel.\")", "def abstimmungen(self, irc, msg, args):\n if self._is_voting_enabled(irc, msg, reply=True):\n channel = msg.args[0]\n users = irc.state.channels[channel].users\n voting_timeout = int(self.registryValue(\"voting_timeout\"))\n\n votes = []\n for voting_id in self.running_votes:\n voting = self.running_votes[voting_id]\n votes.append(\"[ Abstimmung gegen %s (%d von %d Stimmen) noch %d Sekunden ]\" % (\n voting.target,\n voting.count_votes(users),\n voting.threshold,\n voting.remaining_time(voting_timeout)))\n if votes:\n irc.reply(\", \".join(votes))\n else:\n irc.reply(\"Momentan laufen keine Abstimmungen.\")", "async def vote_unsetup(ctx: commands.Context):\n session = session_maker()\n old_channel = session.query(Channel).filter_by(channel_id=ctx.channel.id).one_or_none()\n if old_channel is None:\n await ctx.send('This channel was never setup for votes.')\n return\n session.delete(old_channel)\n session.commit()\n await vote_clear(ctx)\n await ctx.send(f'{ctx.channel} no longer open for voting.')", "def update_vote(self, vote):\n enemy = Enemy(vote.target, history={}).update_hostility(hostility=4, message=vote)\n self.update_enemy(enemy)", "def slot_poll(self, _sender, _data):\r\n if self.client.secret and self.client.secret.know_secret():\r\n # poll recent own trades\r\n # fixme: how do i do this, whats the api for this?\r\n pass", "def up_vote(cls, user, message):\r\n pass", "def do_initiate(bot, msg, **kwargs):\n channel = kwargs.get('event').get('channel')\n instructions = textwrap.dedent(\n '''\n :cop:I am *{name}*, your election police.\n\n \n :grey_question:*How to Vote:*\n Voting in here is simple. Each candidate's profile is listed with a white-on-green checkmark beneath their profile. All you have to do is *click the checkmark once* for your preferred candidate.\n\n\n :warning:*Rules*:\n 1. *Only your first vote counts*. Regardless of the count on checkmark, only your first vote is valid and recorded. Subsequent votes or attemps to remove already cast ballots would be ignored.\n\n 2. *Do not try to post any messages in this channel* as such messages would be deleted immediately.\n\n Now...\n > _Be Nice, Be Respectful, Be Civil_ :simple_smile:\n\n\n I will now list the candidates. Happy Voting :simple_smile:\n > One more thing: _You can vote for yourself._\n\n '''.format(name=bot.username)\n )\n\n # Clear channel\n bot.clear_channel(channel)\n \n print 'Begin Inviting...'\n if 'DEBUG' in dir(bot.config) or 'TESTING' in dir(bot.config):\n print 'test invites'\n # for userid in bot.masters.values():\n # bot.invite_user_to_channel(channel, userid)\n else:\n for member in bot.team_members:\n bot.invite_user_to_channel(channel, member.get('id'))\n print 'End Inviting...'\n\n # Set channel topic\n bot.set_channel_topic(bot.stats.get(channel).get('topic'), channel)\n # Show instructions\n instruction_response = bot.post_msg(text=instructions, channel_name_or_id=channel)\n # Set channel purpose\n bot.set_channel_purpose(bot.stats.get(channel).get('purpose'), channel)\n # Pin message to channel\n bot.pin_msg_to_channel(channel, instruction_response.get('ts'))\n\n help_response = do_help(bot, **kwargs)\n bot.pin_msg_to_channel(channel, help_response.get('ts'))\n\n # Add candidates for this office\n for userid, data in bot.stats.get(channel).get('candidates').iteritems():\n bot.add_candidate(userid, channel)\n bot.vote_for(userid, channel)\n #bot.update_live_stats(channel)\n\n live_stats = bot.get_stats(channel)\n if live_stats is not None:\n response = bot.post_msg(\n text=live_stats,\n channel_name_or_id=channel\n )\n bot.stats.get(channel)['live_ts'] = response.get('ts')\n bot.db.session.query(bot.db.Office).filter_by(channel=channel).first().live_ts=response.get('ts')\n\n response = bot.post_msg(\n text='*NO ONGOING ELECTIONS IN THIS CHANNEL*',\n channel_name_or_id=channel\n )\n bot.stats.get(channel)['election_status_ts'] = response.get('ts')\n bot.db.session.query(bot.db.Office).filter_by(channel=channel).first().election_status_ts=response.get('ts')\n bot.stats.get(channel)['election_status'] = False\n bot.db.session.query(bot.db.Office).filter_by(channel=channel).first().election_status= False\n bot.db.session.commit()\n\n bot.log_msg('Channel{} prepared for voting.'.format(channel), channel)\n \n return True\n #return Response(bot.about)", "def receive_quiz_answer(update: Update, context: CallbackContext) -> None:\n # the bot can receive closed poll updates we don't care about\n if update.poll.is_closed:\n return\n if update.poll.total_voter_count == 3:\n try:\n quiz_data = context.bot_data[update.poll.id]\n # this means this poll answer update is from an old poll, we can't stop it then\n except KeyError:\n return\n context.bot.stop_poll(quiz_data[\"chat_id\"], quiz_data[\"message_id\"])", "def tpc_vote(self, transaction):\n raise NotImplementedError", "async def change_promotion_channel(self, **kwargs):\n\n facebook = kwargs.get('facebook', None)\n twitter = kwargs.get('twitter', None)\n youtube = kwargs.get('youtube', None)\n twitch = kwargs.get('twitch', None)\n privacy = kwargs.get('privacy', None)\n data = {\n \"facebook\": facebook,\n \"twitter\": twitter,\n \"youtube\": youtube,\n \"twitch\": twitch,\n \"promotionChannelsVisibilityPrivacy\": privacy\n }\n\n e = await self.request.request(url='https://accountinformation.roblox.com/v1/phone/promotion-channels',\n method='post',\n data=data,\n )\n return e", "def process_VOTED(self, msg):\n\n result = parseYesOrNo(' '.join(msg[1:]))\n if result is not None:\n assert self._vote is not None\n self._vote.set(result)", "def read_channel(self, channel: int, /) -> int:", "def update_vote(self):\n if not self.answer_id:\n return False\n try:\n con = psycopg2.connect(**self.config)\n cur = con.cursor(cursor_factory=RealDictCursor)\n query = \"UPDATE votes SET vote=%s WHERE answer_id=%s AND user_id=%s\"\n cur.execute(query, (self.vote_value, self.answer_id, self.user_id))\n con.commit()\n except Exception as e:\n print(e)\n con.close()\n return False\n return True", "def process_vote(self, comment_id, username, value):\n raise NotImplementedError()", "def up_vote(cls, user, message):\n pass", "async def legsessionvoting(self, ctx):\n\n new_value = await self.toggle_dm_setting(ctx.author.id, \"leg_session_update\")\n\n if new_value:\n message = f\":white_check_mark: You will now receive DMs when you are \" \\\n f\"a {self.bot.mk.LEGISLATURE_LEGISLATOR_NAME} \" \\\n f\"and voting starts for a Legislative Session.\"\n else:\n message = f\":white_check_mark: You will no longer receive DMs when you are \" \\\n f\"a {self.bot.mk.LEGISLATURE_LEGISLATOR_NAME} \" \\\n f\"and voting starts for a Legislative Session.\"\n\n await ctx.send(message)", "def on_channel_change(self, new_channel):\n pass", "async def vote_comment(*, comment: models.Comment = Depends(resolve_comment), vote: int = Path(..., ge=-1, le=1),\n current_user: models.User = Depends(resolve_current_user), db: Session = Depends(get_db)):\n return crud.vote_comment(db, comment_id=comment.id, author_id=current_user.id, vote=vote)", "def negotiate_time(self, update, context):\n chat_id = update.effective_chat.id\n response_code = update.callback_query[\"data\"] # eta_later, eta_never, eta_20:45, etc.\n log.info(\"Offer @%s raw: @%s\", update.effective_chat.id, response_code)\n\n if response_code == \"eta_never\":\n # the user pressed the button to say they're cancelling their offer\n self.send_message(chat_id, c.MSG_THANKS_NOTHANKS)\n context.user_data[\"reviewed_request\"] = None\n context.user_data[\"state\"] = c.State.AVAILABLE\n\n elif response_code == \"eta_later\":\n # Show them more options in the interactive menu\n self.updater.bot.send_message(\n chat_id=chat_id,\n text=\"Alege timpul\",\n reply_markup=InlineKeyboardMarkup(k.build_dynamic_keyboard()),\n )\n else:\n # This is an actual offer, ot looks like `eta_20:40`, extract the actual timestamp in UTC\n offer = response_code.split(\"_\")[-1]\n log.info(\n \"Relaying offer @%s UTC (%s %s)\", offer, utc_short_to_user_short(offer), c.TIMEZONE\n )\n\n # tell the backend about it\n request_id = context.user_data[\"reviewed_request\"]\n self.backend.relay_offer(request_id, chat_id, offer)\n\n # tell the user that this is now processed by the server\n self.send_message(\n chat_id, (c.MSG_ACK_TIME % utc_short_to_user_short(offer)) + c.MSG_COORDINATING\n )", "async def votechannel_remove(self, ctx, *, channel: discord.TextChannel):\n await self.bot.db.execute(\n \"DELETE FROM voting_channel WHERE guild_id = %s and channel_id = %s\",\n ctx.guild.id,\n channel.id,\n )\n self.bot.cache.votechannels.discard(channel.id)\n await util.send_success(ctx, f\"{channel.mention} is no longer a voting channel.\")", "def vote(request, question_id):\n question = get_object_or_404(Question, pk=question_id)\n try:\n selected_choice = question.choice_set.get(pk=request.POST['choice'])\n except (KeyError, Choice.DoesNotExist):\n # Redisplay the question voting form.\n return render(request, 'polls/detail.html', {\n 'question': question,\n 'error_message': \"You didn't select a choice.\",\n })\n else:\n user = request.user\n Vote.objects.update_or_create(user=user, question=question, defaults={'choice': selected_choice})\n for choice in question.choice_set.all():\n choice.votes = Vote.objects.filter(question=question).filter(choice=choice).count()\n choice.save()\n # Always return an HttpResponseRedirect after successfully dealing\n # with POST data. This prevents data from being posted twice if a\n # user hits the Back button.\n date = datetime.now()\n log.info(\"User: %s, Poll's ID: %d, Date: %s.\", user, question_id, str(date))\n return HttpResponseRedirect(reverse('polls:results', args=(question.id,)))", "def new_vote(request, ballot_url):\n\tdisplay_ballot = get_object_or_404(BallotPaper, ballot_url=ballot_url)\n\tqueryset = Category.objects.filter(ballot_paper=display_ballot)\n\tcaty = get_list_or_404(queryset)\n\n\tif request.session.get('has_voted', False):\n\t\tcontext = {'base_template': 'polls/base.html', 'error_message': 'You have voted already, thus you cannot vote again.'}\n\t\treturn render(request, 'polls/not_available.html', context)\n\telse:\n\t\tfor cat in caty:\n\t\t\ttry:\n\t\t\t\tselected_choice = cat.choice_set.get(pk=request.POST[cat.category_name])\n\t\t\t#except (KeyError, Choice.DoesNotExist):\n\t\t\t#\treturn render(request, 'polls/display_ballot.html', {\n\t\t\t#\t\t'display_ballot': display_ballot,\n\t\t\t#\t\t'error_message': 'Please select a valid choice.'\n\t\t\t#\t})\n\t\t\texcept (MultiValueDictKeyError):\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tselected_choice.votes += 1\n\t\t\t\tselected_choice.save()\n\n\t\trequest.session['has_voted'] = True\n\t\trequest.session.set_expiry(display_ballot.close_date)\n\t\treturn HttpResponseRedirect(reverse('polls:vote_success'))", "async def votechannel_add(self, ctx, channel: discord.TextChannel, reaction_type=None):\n if reaction_type is None:\n channel_type = \"voting\"\n elif reaction_type.lower() in [\"rate\", \"rating\"]:\n channel_type = \"rating\"\n elif reaction_type.lower() in [\"vote\", \"voting\"]:\n channel_type = \"voting\"\n else:\n raise exceptions.Warning(f\"Unknown reaction type `{reaction_type}`\", help_footer=True)\n\n await self.bot.db.execute(\n \"\"\"\n INSERT INTO voting_channel (guild_id, channel_id, voting_type)\n VALUES (%s, %s, %s)\n ON DUPLICATE KEY UPDATE\n voting_type = VALUES(voting_type)\n \"\"\",\n ctx.guild.id,\n channel.id,\n channel_type,\n )\n self.bot.cache.votechannels.add(channel.id)\n await util.send_success(\n ctx, f\"{channel.mention} is now a voting channel of type `{channel_type}`\"\n )", "def poll(self):\n self.poll_function(self.connection)", "def quiz(update: Update, context: CallbackContext) -> None:\n questions = [\"1\", \"2\", \"4\", \"20\"]\n message = update.effective_message.reply_poll(\n \"How many eggs do you need for a cake?\", questions, type=Poll.QUIZ, correct_option_id=2\n )\n # Save some info about the poll the bot_data for later use in receive_quiz_answer\n payload = {\n message.poll.id: {\"chat_id\": update.effective_chat.id, \"message_id\": message.message_id}\n }\n context.bot_data.update(payload)", "def opinion_vote(mode, verbose, revision):\n judge = VotingJudge(mode, revision)\n flags = judge.vote()\n if verbose is True:\n click.echo(\"Vote resulted in %i flags:\" % len(flags))\n for f in flags:\n format_flag(f)", "def _transit_to_voting(self, **kwargs):\n\n handler = kwargs['handler']\n\n plus_id = kwargs['plus_id']\n card_id = kwargs['card_id']\n game = models.Hangout.get_by_id(self.hangout_id).current_game.get()\n if not game:\n if handler:\n handler.accumulate_response(\n {'status': 'ERROR',\n 'message': \"Game for hangout %s not found\" % (self.hangout_id,)})\n return False\n if not game.state == self.state_name:\n if handler:\n handler.accumulate_response(\n {'status': 'ERROR',\n 'message': (\n \"Can't vote now, wrong game state %s.\" % (game.state,))})\n return False \n # try to get the id of the voted-for player based on their selected card\n # via memcache first.\n selections = memcache.get(\n self._selections_key(game.key.id(), game.current_round))\n if selections: # if cache hit\n logging.info(\"got selections cache hit: %s\", selections)\n pvid = selections.get(card_id)\n if not pvid:\n # cache list was present, but not info for that card\n pvid = self._get_pid_from_selcard(card_id)\n else: # cache miss on selections list\n logging.info(\"did not get selections cache hit\")\n pvid = self._get_pid_from_selcard(card_id)\n logging.debug(\"in _transit_to_voting, with plus id %s and pvid %s\",\n plus_id, pvid)\n if not plus_id or not pvid:\n if handler:\n handler.accumulate_response(\n {'status': 'ERROR',\n 'message': 'Voting information not properly specified'})\n return False\n if plus_id == pvid:\n if handler:\n handler.accumulate_response(\n {'status': 'ERROR',\n 'message': 'Participants cannot vote for themselves.'})\n return False\n\n participant_key = model.Key(models.Participant, plus_id, parent=game.key)\n participant = participant_key.get()\n if not participant:\n if handler:\n handler.accumulate_response(\n {'status': 'ERROR',\n 'message': \"Could not retrieve indicated participant\"})\n return False\n # TODO: also check that entity exists for given participant key\n vpkey = model.Key(models.Participant, pvid, parent=game.key)\n participant.vote = vpkey\n participant.put()\n return True", "def add_channel(self, channel):\n self.task.ai_channels.add_ai_voltage_chan(channel)", "async def set_channel(self, ctx, *, channel: discord.Channel=None):\n\n server = ctx.message.server\n\n temp = self.bot.dota_ticker_settings.get(server.id)\n\n if temp is None or not temp['enabled']:\n await self.bot.say('The match ticker has not been enabled on this server.')\n return\n\n if channel is None:\n await self.bot.say('No channel name or mention received.')\n return\n\n settings = {'enabled': True, 'channel_id': channel.id}\n\n await self.bot.dota_ticker_settings.put(server.id, settings)\n await self.bot.say('The match ticker has been enabled on {0.mention}.'.format(channel))", "def get_vote_from_message(self, message: discord.Message):\n\n content = message.clean_content\n message\n\n res = re.match(VOTE_RE, content)\n if res:\n return res.group(1)\n else:\n res = re.match(NO_VOTE_RE, content)\n if res:\n return \"VTNL\"\n else:\n res = re.match(UN_VOTE_RE, content)\n if res:\n return \"No vote\"", "def do_vote(self, question_id, vote):\n\n try:\n if not question_id:\n raise Exception('Invalid parameter')\n if vote != 1 and vote != 2 and vote != 5 and vote != 6:\n raise Exception('Invalid vote')\n\n vote_url = BASE_URL + 'vote'\n response = self.request('POST', vote_url, params={'type': int(vote), 'postId': int(question_id)})\n response = response.json()\n if response['success'] == 0:\n if response['message']:\n raise Exception(response['message'])\n else:\n raise Exception('Something went wrong. Please try again')\n else:\n return response\n except Exception as e:\n Utils.log(traceback.format_exc())\n Utils.error(e.args[0])", "async def applyVote(self, votes):\n voteCount = {vote: 0 for vote in self.getMembersName()}\n voteCount[None] = 0\n for vote in votes.values():\n voteCount[vote] += 1\n\n if voteCount[None] != 0:\n await self.textChannel.send(\n \"Attention, des joueurs n'ont pas voté / ont mal écrit, les votes peuvent être faussés.\")\n del voteCount[None]\n\n playerOrder = sorted(voteCount.items(), key=lambda x: x[1], reverse=True)\n print(\"playerOrder\", playerOrder)\n if playerOrder[0][1] == 0: # Nobody vote\n await self.textChannel.send(\"`Partie non valide`, personne n'a voté.\")\n\n elif playerOrder[0][1] == 1: # People think nobody is a werewolf\n await self.textChannel.send(\"Le village pense qu'il n'y a pas de loups-garou ? Vérification ...\")\n werewolves = self.getWolves()\n if len(werewolves) == 0:\n await self.textChannel.send(\"Le village a raison, il n'y a pas de loups-garous parmis eux.\")\n await self.textChannel.send(\"```css\\nLES VILLAGEOIS ONT GAGNÉ```\")\n else:\n await self.textChannel.send(\"Malheuresement, il y avait```\" + \", \".join(werewolves) + \"```\")\n await self.textChannel.send(\"```diff\\n-LES LOUPS-GAROUS ONT GAGNÉ-```\")\n\n else: # Classic vote\n werewolves = self.getWolves()\n deaths = []\n for i in range(len(playerOrder)):\n player = self.getMemberFromName(name=playerOrder[i][0])\n isDead = await player.isDead(channel=self.textChannel)\n if isDead:\n deaths += await player.death(channel=self.textChannel, members=self.players)\n print(\"voteCount :\", voteCount)\n\n # Get player name with same number of vote against them\n playerEqualVote = []\n for p in playerOrder:\n if p[1] == playerOrder[i][1] and p[0] != playerOrder[i][0]:\n playerEqualVote.append(self.getMemberFromName(name=p[0]))\n print(\"Other players with equals number of vote :\", playerEqualVote)\n for otherPlayer in playerEqualVote:\n isDead = await otherPlayer.isDead(channel=self.textChannel)\n if isDead:\n deaths += await otherPlayer.death(channel=self.textChannel, members=self.players)\n break\n\n for i in range(len(deaths)):\n if deaths[i] is None:\n del deaths[i]\n\n if len(deaths) == 0: # No one die\n if len(werewolves) == 0: # No Werewolves\n await self.textChannel.send(\"Il n'ya pas eu de mort et il n'y a aucun Loup-Garou !\")\n await self.textChannel.send(\"```css\\nLES VILLAGEOIS ONT GAGNÉ```\")\n else: # Werewolves among players\n await self.textChannel.send(\n \"Il n'y a pas eu de mort mais```\" + \", \".join(werewolves) + \"```\")\n await self.textChannel.send(\"```diff\\n-LES LOUPS-GAROUS ONT GAGNÉ-```\")\n\n elif len(deaths) == 1:\n if deaths[0].lastRole in [\"Loup-Garou\", \"Loup Alpha\", \"Loup Shamane\", \"Loup rêveur\"]: # Werewolf die\n await self.textChannel.send(\"```css\\nLES VILLAGEOIS ONT GAGNÉ```\")\n elif deaths[0].lastRole in [\"Tanneur\"]: # Tanner died\n await self.textChannel.send(\"```Fix\\n#LE TANNEUR A GAGNÉ#```\")\n if len(werewolves) > 0: # Wolves in game\n await self.textChannel.send(\"```diff\\n-LES LOUPS-GAROUS ONT ÉGALEMENT GAGNÉ```\")\n else: # Villager died\n await self.textChannel.send(\"```diff\\n-LES LOUPS-GAROUS ONT GAGNÉ-```\")\n\n else: # more than 2 deaths\n rolesDead = []\n for dead in deaths:\n if dead.lastRole in [\"Loup-Garou\", \"Loup Alpha\", \"Loup Shamane\", \"Loup rêveur\"]:\n rolesDead.append(\"Loup-Garou\")\n elif dead.lastRole in [\"Tanneur\"]:\n await self.textChannel.send(\"```Fix\\n#LE TANNEUR A GAGNÉ#```\")\n else:\n rolesDead.append(\"Villageois\")\n print(\"rolesDead :\", rolesDead)\n rolesDead = list(dict.fromkeys(rolesDead))\n print(\"rolesDead unique :\", rolesDead)\n if \"Loup-Garou\" in rolesDead:\n await self.textChannel.send(\"```css\\nLES VILLAGEOIS ONT GAGNÉ```\")\n else:\n await self.textChannel.send(\"```diff\\n-LES LOUPS-GAROUS ONT GAGNÉ-```\")", "async def _msgvote_interval(self, ctx, interval: int):\n\n if 1 <= interval <= 60:\n self.settings[\"interval\"] = interval\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"I will check each message's votes every \"\n \"{} seconds.\".format(interval))\n else:\n await self.bot.say(\"Invalid interval. Must be an integer \"\n \"between 1-60.\")", "def vote(request, question_id):\n question = get_object_or_404(Question, pk=question_id)\n try:\n configure()\n selected_choice = question.choice_set.get(pk=request.POST['choice'])\n except (KeyError, Choice.DoesNotExist):\n configure()\n # Redisplay the question voting form.\n return render(request, 'polls/detail.html', {\n 'question': question,\n 'error_message': \"You didn't select a choice.\",\n })\n else:\n if Vote.objects.filter(pk=question_id, user_id=request.user.id).exists():\n configure()\n user_vote = question.vote_set.get(user=request.user)\n user_vote.choice = selected_choice\n user_vote.choice.votes += 1\n user_vote.choice.save()\n user_vote.save()\n else:\n configure()\n selected_choice.vote_set.create(user=request.user, question=question)\n\n return HttpResponseRedirect(reverse('polls:results', args=(question_id,)))", "def channel(self, channel: int, /) -> \"TimerChannel\" | None:", "def cmd_gallery_item_vote(client, args):\n gallery_item_vote = client.gallery_item_vote(args.item_id, args.vote)\n generate_output({'gallery_item_vote': gallery_item_vote})", "def channel_secret(self, channel_secret):\n \n self._channel_secret = channel_secret", "async def vconcmd(self, message):\n self._db.set(__name__, 'voicy', True)\n self._db.set(__name__, \"ratelimit\", [])\n await message.edit(self.strings['on'])", "async def pickemsvotes(self, ctx: commands.Context):\n if str(ctx.guild.id) not in self.all_pickems:\n await ctx.send(_(\"This server does not have any pickems setup.\"))\n return\n msg = _(\"You have voted on the following games:\\n\")\n timezone = await self.pickems_config.guild(ctx.guild).pickems_timezone()\n for game_id, pickem in self.all_pickems[str(ctx.guild.id)].items():\n if str(ctx.author.id) in pickem.votes:\n vote = pickem.votes[str(ctx.author.id)]\n game_start = utc_to_local(pickem.game_start, timezone)\n time_str = game_start.strftime(\"%B %d, %Y at %I:%M %p %Z\")\n msg += f\"{pickem.away_team} @ {pickem.home_team} {time_str} - {vote}\\n\"\n msgs = []\n for page in pagify(msg):\n if ctx.channel.permissions_for(ctx.me).embed_links:\n em = discord.Embed(\n title=_(\"Pickems votes in {guild}\").format(guild=ctx.guild.name),\n description=page,\n )\n msgs.append(em)\n else:\n msgs.append(page)\n await BaseMenu(source=SimplePages(msgs)).start(ctx=ctx)", "def update_forum_votes(sender, **kwargs):\r\n vote = kwargs['instance']\r\n if vote.content_type.app_label != \"fretboard\":\r\n return\r\n if vote.content_type.model == \"topic\":\r\n t = get_model('fretboard', 'Topic').objects.get(id=vote.object.id)\r\n t.votes = t.score()\r\n t.save(update_fields=['votes'])\r\n elif vote.content_type.model == \"post\":\r\n p = get_model('fretboard', 'Post').objects.get(id=vote.object.id)\r\n p.votes = p.score()\r\n p.save(update_fields=['votes'])", "def vote(request, ballot_url):\n\tdisplay_ballot = get_object_or_404(BallotPaper, ballot_url=ballot_url)\n\tqueryset = Category.objects.filter(ballot_paper=display_ballot)\n\tcaty = get_list_or_404(queryset)\n\tuser = request.user\n\n\tfor cat in caty:\n\t\ttry:\n\t\t\tselected_choice = cat.choice_set.get(pk=request.POST[cat.category_name])\n\t\texcept (KeyError, Choice.DoesNotExist, MultiValueDictKeyError):\n\t\t\treturn render(request, 'polls/display_ballot.html', {\n\t\t\t\t'display_ballot': display_ballot,\n\t\t\t\t'error_message': 'Please select a choice across all categories.'\n\t\t\t})\n\t\telse:\n\t\t\ttry:\n\t\t\t\tToken.objects.get(user=user)\n\t\t\texcept (Token.DoesNotExist):\n\t\t\t\tif ballot.created_by == user:\n\t\t\t\t\treturn render(request, 'polls/display_ballot.html', {\n\t\t\t\t\t\t'display_ballot': display_ballot,\n\t\t\t\t\t\t'error_message': 'Sorry, you do not have authorization to vote.'\n\t\t\t\t\t\t})\n\t\t\t\telse:\n\t\t\t\t\tlogout(request)\n\t\t\t\t\tmessages.error(request, 'Sorry, you do not have authorization to vote.')\n\t\t\t\t\tHttpResponseRedirect(reverse('users:token_login'))\n\t\t\telse:\n\t\t\t\tselected_choice.votes += 1\n\t\t\t\tselected_choice.save()\n\t\t\t\tuser.token.is_used = True\n\t\t\t\tuser.token.save()\n\t\t\t\tlogout(request)\n\n\t\n\treturn HttpResponseRedirect(reverse('polls:vote_success'))", "def _set_channel_(self, channel):\n self._channel = channel", "async def spoilerchannel(self, ctx):\n pass", "def set_channel(cls, channel):\n cls.channel = channel", "async def mutechannel(self, ctx, channel: str):\n self.data_check(ctx)\n server = ctx.message.server\n\n self.riceCog2[server.id][\"mutechannel\"] = channel\n dataIO.save_json(self.warning_settings,\n self.riceCog2)\n await self.bot.say(\"Mute channel is now: **{}**\".format(channel))", "def onCallvoteFinish(self, event):\n if not self.callvote:\n self.debug('intercepted %s but there is no active callvote', event.type.__str__())\n return\n\n # check again to see if it's the callvote we are actually holding\n r = re.compile(r'''^(?P<type>\\w+)\\s?(?P<args>.*)$''')\n m = r.match(event.data['what'])\n if not m:\n self.warning('could not parse %s data: %s', event.data, event.type.__str__())\n self.veto()\n return\n\n if self.callvote['type'] != m.group('type') or self.callvote['args'] != m.group('args'):\n self.warning('intercepted %s but data don\\'t match the currently stored callvote')\n self.veto()\n return\n\n # replace 'max_num' with the number of players connected,\n # no matter the team they belong to since they may have joined\n # RED or BLUE team to partecipate to the callvote\n self.callvote['num_no'] = event.data['no']\n self.callvote['num_yes'] = event.data['yes']\n self.callvote['max_num'] = len(self.console.clients.getList())\n\n # save the callvote in the storage\n self.console.storage.query(self.sql['q1'] % (self.callvote['client'].id, self.callvote['type'],\n self.callvote['args'] if self.callvote['args'] else None,\n self.callvote['max_num'], self.callvote['num_yes'],\n self.callvote['num_no'], self.callvote['time']))", "def pinger(var, wrapper, message):\n wrapper.reply(messages[\"ping\"].format(nick=wrapper.source, bot_nick=users.Bot))", "async def _set(self, ctx, channel : discord.Channel, count : int):\r\n \r\n server = ctx.message.server\r\n if server.id not in self.set:\r\n await self.bot.say(\":x: Uninitialized server!\")\r\n return\r\n if channel.id not in self.set[server.id][\"channels\"]:\r\n await self.bot.say(\":x: This is not a counting channel!\")\r\n return\r\n self.set[server.id][\"channels\"][channel.id][\"count\"] = count\r\n self.set[server.id][\"channels\"][channel.id][\"last\"] = None\r\n self.save()\r\n goal = self.set[server.id][\"channels\"][channel.id][\"goal\"]\r\n if goal > 0:\r\n await self.bot.edit_channel(channel,topic = \"Next message must start with {} | Reach {} to complete.\".format(count+1,goal))\r\n else:\r\n await self.bot.edit_channel(channel,topic = \"Next message must start with {}\".format(count+1))\r\n await self.bot.say(\"Channel count set to {}!\".format(count))", "async def setcoachchannel(self, ctx, channel: int):\r\n if ctx.guild.id == 445092370006933505:\r\n await self.config.guild(ctx.guild).coachchannel.set(int(channel))\r\n await ctx.send(\"You set {} as the coaching channel\".format(channel))\r\n else:\r\n await ctx.send(\"This command only works in the Legend eSports server, join us at: https://discord.gg/GGuCXDn\")", "def vol_push_callback(channel):\n \n global volume, mute\n \n if mute:\n subprocess.run([\"mpc\", \"volume\", str(volume)],stdout=subprocess.DEVNULL)\n else:\n print(\"mute\")\n subprocess.run([\"mpc\", \"volume\", \"0\"],stdout=subprocess.DEVNULL)\n mute = not mute", "def modify_channel(self, channel):\n self._poller.modify(channel.fileno, channel._events)", "def test_vote_nopermission(self):\r\n mock_module = CHModuleFactory.create(user_voted=True)\r\n json_in = {'answer': '24.0', 'hint': 1, 'pk_list': json.dumps([['24.0', 1], ['24.0', 3]])}\r\n old_hints = copy.deepcopy(mock_module.hints)\r\n mock_module.tally_vote(json_in)\r\n self.assertTrue(mock_module.hints == old_hints)", "def handle_current_setting(event):\n info = forex_notifier.get_notify_currency_info(event.source.user_id)\n line_bot.replyMessage(event.reply_token, info)", "def cheer(self, user_id, channel_id, message, cookie):\n\t\tcheer_amount = sum(map(int, [match.group(1) for match in re.finditer(r'cheer(\\d+)', message)]))\n\t\tresp = self._request('POST', 'https://api.twitch.tv/bits/events', version=4, oauth=False, data={\n\t\t\t'user_id': user_id,\n\t\t\t'channel_id': channel_id,\n\t\t\t'amount': cheer_amount,\n\t\t\t'event_id': str(uuid4()),\n\t\t\t'message': message,\n\t\t}, headers={'cookie': cookie})\n\t\treturn resp['balance']", "def set_channel(self, channel_name, value):\n try:\n cm = self.__core.get_service(\"channel_manager\")\n cdb = cm.channel_database_get()\n channel = cdb.channel_get(channel_name)\n try:\n typing_value = channel.type()(value)\n except Exception:\n traceback.print_exc()\n return\n channel.consumer_set(Sample(time.time(), typing_value))\n except Exception:\n traceback.print_exc()", "def toggle_vote(self):\n\n self.vote = 1 - self.vote", "def __init__(__self__, *,\n channel: Optional[pulumi.Input['ReleaseChannelChannel']] = None):\n if channel is not None:\n pulumi.set(__self__, \"channel\", channel)", "def quiz(self, update: Update, context: CallbackContext) -> None:\r\n #questions = [\"1\", \"2\", \"4\", \"20\"]\r\n questions = qa.Question().options\r\n message = update.effective_message.reply_poll(\r\n \"How many eggs do you need for a cake?\", questions, type=Poll.QUIZ, correct_option_id=2\r\n )\r\n # Save some info about the poll the bot_data for later use in receive_quiz_answer\r\n payload = {\r\n message.poll.id: {\r\n \"questions\": questions,\r\n \"chat_id\": update.effective_chat.id, \r\n \"message_id\": message.message_id}\r\n }\r\n context.bot_data.update(payload)", "def vote(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'votes')\r\n request = http.Request('POST', url, {'to': '1'})\r\n\r\n return request, parsers.parse_json", "def vote(self):\n if self.vote_exists():\n return self.update_vote()\n return self.create_vote()", "async def defchannel(self, ctx, channel: str):\n self.data_check(ctx)\n server = ctx.message.server\n\n self.riceCog2[server.id][\"defchannel\"] = channel\n dataIO.save_json(self.warning_settings,\n self.riceCog2)\n await self.bot.say(\"Log channel is now: **{}**\".format(channel))", "async def get_promotion_channel(self) -> PromotionChannel:\n e = await self.request.request(url='https://accountinformation.roblox.com/v1/promotion-channels', method='get')\n return PromotionChannel(iteam=e)", "def execute(cls, slack_wrapper, args, channel_id, user_id, user_is_admin):\n slack_wrapper.post_message(channel_id, \"Pong!\")", "def set_channel(self, channel_name, value):\n try:\n cm = self.__core.get_service(\"channel_manager\")\n cdb = cm.channel_database_get()\n channel = cdb.channel_get(channel_name)\n try:\n print \"in set_channel\" #if this is not displayed => this function is not called => it must be deleted...\n typing_value = channel.type()(value)#what is going on here?! I don't know...\n except:\n traceback.print_exc()\n return\n channel.consumer_set(Sample(time.time(), typing_value))\n except Exception:\n traceback.print_exc()", "def listpolls(self, irc, msg, args, channel):\n if channel and msg.args[0] in irc.state.channels:\n if self.polls is None:\n self.polls = []\n if self.polls is []:\n irc.reply(\"No Polls.\")\n for idx, entry in enumerate(self.polls[channel]):\n entry_string = []\n question = entry['question']\n yays = entry['yays']\n nays = entry['nays']\n added_by = entry['added_by']\n # concluded = entry['concluded']\n entry_string.append(\"%d: %s\" % (idx, question))\n entry_string.append(\"Yes: %s\" % (' '.join(yays) if yays != [] else 'none'))\n entry_string.append(\"No: %s\" % (' '.join(nays) if nays != [] else 'none'))\n entry_string.append(\"Question asked by %s\" % added_by)\n irc.reply(' / '.join(entry_string), notice=True, private=True, prefixNick=False)\n\n else:\n try:\n if ircdb.checkCapability(msg.prefix, 'admin') or ircdb.checkCapability(msg.prefix, 'owner'):\n if self.polls is None:\n self.polls = []\n if self.polls is []:\n irc.reply(\"No Polls.\")\n for idx, entry in enumerate(self.polls[channel]):\n entry_string = []\n question = entry['question']\n yays = entry['yays']\n nays = entry['nays']\n added_by = entry['added_by']\n # concluded = entry['concluded']\n entry_string.append(\"%d: %s\" % (idx, question))\n entry_string.append(\"Yays: %s\" % (' '.join(yays) if yays != [] else 'none'))\n entry_string.append(\"Nays: %s\" % (' '.join(nays) if nays != [] else 'none'))\n entry_string.append(\"Question asked by %s\" % added_by)\n irc.reply(' / '.join(entry_string), notice=True, private=True, prefixNick=False)\n else:\n irc.errorInvalid('argument', channel)\n\n except KeyError:\n return", "async def set_channel(self, ctx: commands.Context, channel: discord.TextChannel = None):\n if channel is not None:\n await self.config.guild(ctx.guild).autopostchannel.set(channel.id)\n await ctx.send(\"Auto-post channel has been set to {}\".format(channel.mention))\n else:\n await self.config.guild(ctx.guild).autopostchannel.set(None)\n await ctx.send(\"Auto-post channel has been cleared\")", "def addCoachesPollVote(votes, userID):\n\n # votes should be in the form [\"team|3\", \"team|4\", \"team|1\", etc...]\n db = getDB()\n time = getTimeframe()\n db.coaches_polls.update(\n {\"user_id\": userID, \"week\": time[\"week\"], \"season\": time[\"season\"]},\n {\n \"user_id\": userID,\n \"week\": time[\"week\"],\n \"season\": time[\"season\"],\n \"rankings\": votes,\n },\n upsert=True,\n )", "def vote(request, comment_id, vote):\n rating = {'up': 1, 'down': -1}.get(vote, False)\n if not rating:\n raise Http404, \"Invalid vote\"\n if request.user.is_anonymous():\n raise Http404, \"Anonymous users cannot vote\"\n try:\n comment = comments.get_object(pk=comment_id)\n except comments.CommentDoesNotExist:\n raise Http404, \"Invalid comment ID\"\n if comment.user_id == request.user.id:\n raise Http404, \"No voting for yourself\"\n karma.vote(request.user.id, comment_id, rating)\n # Reload comment to ensure we have up to date karma count\n comment = comments.get_object(pk=comment_id)\n return render_to_response('comments/karma_vote_accepted', {'comment': comment}, context_instance=DjangoContext(request))", "def default_channel(self) -> int:\r\n ...", "def channel(self):\n raise NotImplementedError", "async def monitor(self, ctx, channel):\n author = ctx.message.author\n author_channel = ctx.message.channel\n\n def check(m):\n try:\n return channels[int(m.content)]\n except:\n return False\n\n channels = self.bot.get_all_channels()\n channels = [c for c in channels\n if c.name.lower() == channel or c.id == channel]\n channels = [c for c in channels if c.type == discord.ChannelType.text]\n\n\n if not channels:\n await self.bot.say(\"No channels found. Remember to type just \"\n \"the channel name, no `#`.\")\n return\n\n if len(channels) > 1:\n msg = \"Multiple results found.\\nChoose a server:\\n\"\n for i, channel in enumerate(channels):\n msg += \"{} - {} ({})\\n\".format(i, channel.server, channel.id)\n for page in pagify(msg):\n await self.bot.say(page)\n choice = await self.bot.wait_for_message(author=author,\n timeout=30,\n check=check,\n channel=author_channel)\n if choice is None:\n await self.bot.say(\"You haven't chosen anything.\")\n return\n channel = channels[int(choice.content)]\n else:\n channel = channels[0]\n\n rift = OpenRift(source=author_channel, destination=channel)\n msgfilter = ['$', 'pp', 'paypal', 'moneypak', 'giftcard', 'gift card', 'PM me', 'DM', 'cash']\n\n self.open_rifts[author] = rift\n await self.bot.say(\"Monitor started\")\n msg = \"\"\n while msg == \"\" or msg is not None:\n msg = await self.bot.wait_for_message(author=author,\n channel=author_channel)\n if msg is not None and msg.content.lower() != \"exit\":\n try:\n blankvar = \"blankvar\"\n except:\n await self.bot.say(\"Script error #1\")\n elif msg.content.lower() in msgfilter:\n try:\n await self.bot.say(\"Your message may contain words referring to RMT. Your message has been logged and will be reviewed by Discord staff.\")\n except:\n await self.bot.say(\"Script error #2\")\n else:\n break\n del self.open_rifts[author]\n await self.bot.say(\"Stopping monitor.\")", "def PYDSO010SETCHAN(self):\n ctx = self.item_start() # always first line of test\n\n chan = ctx.item.chan\n if not (0 < chan < 5):\n self.logger.error(\"Invalid channel number: {} (1-4 accepted)\".format(chan))\n self.item_end(ResultAPI.RECORD_RESULT_INTERNAL_ERROR)\n return\n\n self.shared_lock(self.DSO).acquire()\n\n # reset the scope to a known state\n self.dso.write('*RST')\n if chan != 1: # after reset, chan 1 is already on\n self.dso.write(':CHANnel1:DISPlay OFF') # turn off channel 1\n self.dso.write(':CHANnel{}:DISPlay ON'.format(chan)) # turn off channel 1\n\n self.dso.write(':CHANnel{}:SCALe 100mV'.format(chan))\n\n vpp = self.dso.query(':MEASure:VPP? CHANnel{}'.format(chan))\n value = float(vpp)\n _result, _bullet = ctx.record.measurement(\"VPP{}\".format(chan), value, ResultAPI.UNIT_VOLTS)\n\n self.log_bullet(\"Switched to channel {}\".format(chan))\n self.log_bullet(_bullet)\n time.sleep(0.1) # give it some time to sit here, else its too fast\n self.shared_lock(self.DSO).release()\n self.item_end() # always last line of test", "def chat_voteskip(self, msg, *args):\n msg.to = msg.username\n self.emit('voteskip', {})", "def veto(self):\n self.console.write('veto')\n self.callvote = None", "def test_update_preference_communication_channel_id(self):\n # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration.\n pass", "def channel_pressure(self, channel, pressure):\n log.debug(\"channel aftertouch: %d\", pressure)\n return", "def upvote_question(self):\n self.post_question()\n return self.client.patch('api/v2/questions/1/upvote',\n headers={\"Authorization\": \"{}\".format(self.token())})" ]
[ "0.6393847", "0.6147241", "0.59954494", "0.5947859", "0.5874632", "0.5840181", "0.5776968", "0.5758922", "0.5722969", "0.57202333", "0.5576358", "0.5565325", "0.5534625", "0.548925", "0.54667646", "0.5447865", "0.539419", "0.53278154", "0.53092563", "0.53039765", "0.528682", "0.5217865", "0.5210522", "0.51699525", "0.5149859", "0.51340574", "0.5121524", "0.5089862", "0.50854814", "0.5067571", "0.50372195", "0.50208616", "0.4998481", "0.49887964", "0.49835667", "0.4980362", "0.49647012", "0.49591675", "0.49555165", "0.4914191", "0.4910692", "0.48931557", "0.48701376", "0.48621157", "0.4861364", "0.4853501", "0.48471212", "0.48453197", "0.4837394", "0.4830702", "0.48294726", "0.4819881", "0.47973186", "0.4795108", "0.4791016", "0.47852337", "0.47684386", "0.47610304", "0.476088", "0.47584093", "0.47503167", "0.47487944", "0.474469", "0.47364414", "0.47334397", "0.47307572", "0.4727745", "0.4726798", "0.47220793", "0.47079095", "0.47078377", "0.46991667", "0.46800086", "0.4677242", "0.4674602", "0.4668383", "0.46663094", "0.4663285", "0.46583658", "0.4653366", "0.46450543", "0.46401134", "0.46305412", "0.46241793", "0.46161732", "0.46021903", "0.45966214", "0.45935506", "0.4590613", "0.45895207", "0.4588293", "0.4588162", "0.458647", "0.45761424", "0.45675707", "0.45457923", "0.4542984", "0.45364395", "0.45356405", "0.45197383" ]
0.6149552
1
[channel] Retrieves the vote count for a poll.
def votes(self, irc, msg, args, channel, pid): if channel and msg.args[0] in irc.state.channels: if msg.args[0] != channel: if ircdb.checkCapability(msg.prefix, 'admin') or ircdb.checkCapability(msg.prefix, 'owner'): irc.error("Not Implemented") else: irc.errorInvalid('argument', channel) elif msg.args[0] == channel: irc.error("Not Implemented")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_vote_count(self, post):\n return post.vote_set.count()", "async def _vote_count(\n self, ctx: Context, *, channel: discord.TextChannel = None\n ):\n\n guild: discord.Guild = ctx.guild\n\n if not channel:\n channel = await self.get_vote_channel(guild)\n if isinstance(channel, str):\n return await ctx.send(channel)\n\n history = await channel.history(oldest_first=True).flatten()\n if len(history) > 100:\n return await ctx.send(_(\n \"I couldn't identify a voting channel. Please specify one explicitly.\"\n ))\n else:\n history = await channel.history(oldest_first=True).flatten()\n if len(history) > 100:\n return await ctx.send(_(\n \"That channel has too many messages!\"\n \" Please ask a host for manual vote count.\"\n ))\n\n if len(history) < 1:\n return await ctx.send(_(\"{} is empty.\").format(channel.mention))\n\n user_votes = {}\n player_role = guild.get_role(\n await self.config.guild(guild).player_id()\n )\n\n for message in history:\n author = message.author\n if player_role not in author.roles:\n continue\n vote = self.get_vote_from_message(message)\n if not vote:\n continue\n user_votes[f\"{author.name}#{author.discriminator}\"] = vote\n\n user_votes = await self.get_non_voters(guild, user_votes)\n\n votes = {}\n for user in user_votes:\n val = user_votes[user].capitalize()\n try:\n votes[val].append(user)\n except KeyError:\n votes[val] = [user]\n\n # max votes first\n votes = dict(sorted(\n votes.items(), key=lambda item: len(item[1]), reverse=True\n ))\n\n # Pop and add stuff back to dict for ordering purpose.\n try:\n votes[\"VTNL\"] = votes.pop(\"Vtnl\")\n except KeyError:\n pass\n try:\n votes[\"No vote\"] = votes.pop(\"No vote\")\n except KeyError:\n pass\n\n txt = \"\"\n\n for i, vote in enumerate(votes, start=1):\n voters = votes[vote]\n\n if vote == \"VTNL\":\n txt += _(\"\\n\\n**{}** - {} ({})\").format(vote, len(voters), \", \".join(voters))\n elif vote == \"No vote\":\n txt += _(\"\\n\\n**Not voting** - {} ({})\").format(len(voters), \", \".join(voters))\n else:\n txt += _(\"\\n{}. **{}** - {} ({})\").format(i, vote, len(voters), \", \".join(voters))\n\n title = _(\"Vote Count\")\n\n embed = discord.Embed(\n color=0x00CDFF, title=title,\n description=_(\"__Counting from {} channel.__\\n\\n{}\").format(\n channel.mention, txt.strip()\n )\n )\n\n try:\n await ctx.send(embed=embed)\n except discord.Forbidden:\n await ctx.send(\n f\"**{title}**\\n\\n__Counting from {channel.mention}\"\n f\" channel.__\\n\\n{txt.strip()}\"\n )", "def vote_count(self):\n return QuestionVotes.objects.filter(question=self).count()", "def get_poll_progress(self, poll_key):\n poll_data = self.get_poll(poll_key)\n part_keys = poll_data['participants'].keys()\n num_participants = len(part_keys)\n num_voted = 0\n for part_key in part_keys:\n part_data = self.get_participant(part_key)\n if part_data['voted']:\n num_voted += 1\n return (num_voted, num_participants)", "def num_votes(self):\n return sum(self.votes_per_count)", "def count(self) -> int:\n return pulumi.get(self, \"count\")", "def count(self) -> float:\n return pulumi.get(self, \"count\")", "def snmpqosqos_sch_poll_countrate(self) :\n\t\ttry :\n\t\t\treturn self._snmpqosqos_sch_poll_countrate\n\t\texcept Exception as e:\n\t\t\traise e", "def count_votes(self):\n return self.annotate(sum=Sum('value'))", "def get_count(self):\n return self.count", "def get_count(self):\n return self.count", "def get_count(self):\r\n return self.count", "def snmpqosqos_sch_poll_count(self) :\n\t\ttry :\n\t\t\treturn self._snmpqosqos_sch_poll_count\n\t\texcept Exception as e:\n\t\t\traise e", "def GetCount(self):\n return self._server.get_count()", "def get_total_view_count(self):\n done = self.cur.execute(\"SELECT CAST(SUM(view_count) AS DECIMAL(10, 0)) FROM videos\")\n count = self.cur.fetchone()[0]\n return count", "def count(self):\n return self.get_count()", "def get_vote_count(php, vote_id):\n page = requests.get(php)\n soup = BeautifulSoup(page.text, \"html.parser\")\n total = list(soup.find_all(\"td\"))\n for i in range(len(total)):\n if vote_id in str(total[i].text):\n return int(total[i + 1].text[1:])\n return 0", "def get_track_count(self):\n self.app.curs.execute('select count(*) c from track')\n if self.app.curs.rowcount == 1:\n row = self.app.curs.fetchone()\n return row['c']\n else: # pragma: no cover\n return 0", "def number_of_subscribers(subreddit):\n response = requests.get('https://www.reddit.com/r/{}/about.json'\n .format(subreddit),\n headers={'User-Agent': 'Camilo@holberton'},\n allow_redirects=False)\n if response.status_code == 200:\n response = response.json()\n data = response.get('data')\n subs_count = data.get('subscribers')\n if data and subs_count:\n return subs_count\n return 0", "def get_count(self):\n return self._count", "def nay_voter_cnt(self):\n\n return len(self._nay_voters())", "def getCount(self):\n return self.count", "def count(self):\n with self._block:\n counter = re.search(r'count=(\\d+) ', repr(self))\n return int(counter.group(1))", "def get_count(self):\n\n\t\treturn self.__count", "def count(self):\n\n return self._get(\"count\", rtype=UInt)", "def count(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'count')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def count(self):\n return self.vcount", "async def on_count(ctx):\n count = get_count()\n await ctx.send(f'current count {count}')", "def GetCount(self):\n return(self.count)", "def count(self):\n return self._lift(\"count\")", "def number_of_subscribers(subreddit):\n url = requests.get(\"https://www.reddit.com/r/{}/about.json\"\n .format(subreddit), headers={\"User-Agent\": \"kalkidan\"})\n if url.status_code == 200:\n return url.json().get(\"data\").get(\"subscribers\")\n else:\n return 0", "def number_of_subscribers(subreddit):\n header = {\"User-agent\": \"darth\"}\n url = \"https://www.reddit.com/r/{}/about.json\".format(subreddit)\n response = (requests.get(url, headers=header))\n if response.status_code != 200:\n return 0\n return response.json().get('data').get('subscribers')", "def op_count(cls, crawler, stage=None):\n if stage:\n total_ops = cls.conn.get(make_key(crawler, stage))\n else:\n total_ops = cls.conn.get(make_key(crawler, \"total_ops\"))\n return unpack_int(total_ops)", "def number_of_subscribers(subreddit):\n url_rsubs = \"https://api.reddit.com/r/{}/about\".format(subreddit)\n headers = {'User-Agent': 'Python3'}\n response = requests.get(url_rsubs, headers=headers,\n allow_redirects=False)\n if str(response) != \"<Response [200]>\":\n return 0\n r_json = response.json()\n subs_count = r_json.get('data').get('subscribers')\n return subs_count", "def count(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"count\")", "def number_of_subscribers(subreddit):\n url = \"https://www.reddit.com/r/{}.json\".format(subreddit)\n r = requests.get(url, headers={'User-agent': 'shoji'},\n allow_redirects=False)\n data = r.json()\n if not r.status_code == 200:\n return 0\n try:\n sub = data.get(\"data\")\n children = sub.get(\"children\")\n subreddit = children[0].get(\"data\")\n subscriber_count = subreddit.get(\"subreddit_subscribers\")\n except Exception as e:\n print(\"Something went wrong\\n {}\".format(e))\n return 0\n\n return subscriber_count", "def VplsCount(self):\n return self._get_attribute('vplsCount')", "def get_tweet_count(self):\n return self.tweet_count.text", "def count(self) -> Optional[int]:\n return pulumi.get(self, \"count\")", "def count(self) -> Optional[int]:\n return pulumi.get(self, \"count\")", "def number_of_subscribers(subreddit):\n\n url = \"https://www.reddit.com/r/{}/about.json\".format(subreddit)\n headers = {\"User-Agent\": \"my-integration/1.2.3\"}\n\n response = get(url=url, headers=headers)\n\n if response.status_code == 200:\n # print(response.json())\n\n response_json = response.json()\n data = response_json.get('data')\n subscribers = data.get(\"subscribers\")\n\n return subscribers\n\n return 0", "def count_upvotes(self):\n return self.filter(value=1).count()", "def number_of_subscribers(subreddit):\n\n import requests\n\n resInf = requests.get(\"https://www.reddit.com/r/{}/about.json\"\n .format(subreddit),\n headers={\"User-Agent\": \"My-User-Agent\"},\n allow_redirects=False)\n if resInf.status_code >= 300:\n return 0\n\n return resInf.json().get(\"data\").get(\"subscribers\")", "def count(self) -> Optional[float]:\n return pulumi.get(self, \"count\")", "def get_count(owner, repo_slug, auth_tokens, endpoint):\n count_url = make_req_url(owner, repo_slug, endpoint, 0)\n response = send_bitbucket_request(count_url, auth_tokens)\n if response and 'count' in response:\n return response['count']-1\n return 0", "def number_of_subscribers(subreddit):\n URL = 'https://api.reddit.com/r/{}/about'.format(subreddit)\n header = {'User-Agent': 'Custom-User'}\n\n resp = requests.get(URL, headers=header).json()\n try:\n return resp['data']['subscribers']\n except Exception:\n return 0", "def _get_count(self, msg, subtype=\"all\"):\n try:\n counts = self.get_local(msg, \"counts\")\n return counts.get(subtype, 0)\n except KeyError:\n return 0", "def get_subscriber_count(self, response):\n return response.css('.yt-subscriber-count')\\\n .extract_first(default='')", "def number_of_subscribers(subreddit):\n url = \"https://api.reddit.com/r/{}/about\".format(subreddit)\n header = {'User-Agent': 'CustomClient/1.0'}\n request = requests.get(url, headers=header, allow_redirects=False)\n\n if request.status_code != 200:\n return 0\n jreq = request.json()\n\n if 'data' in jreq:\n return jreq.get(\"data\").get(\"subscribers\")\n else:\n return 0", "def number_of_subscribers(subreddit):\n if subreddit is None or type(subreddit) is not str:\n return 0\n BASE_URL = 'http://www.reddit.com/r/{}/about.json'\n head = {'User-Agent': 'Mozilla/5.0'}\n r = requests.get(BASE_URL.format(subreddit), headers=head)\n return r.json().get('data', {}).get('subscribers', 0)", "def get_count(self, entry):\n return entry.count", "def count(self):\n \n return self._count", "def count(self):\n return self.properties.get('count')", "def response_count(self) -> int:\n return pulumi.get(self, \"response_count\")", "def getCount(self):\n return self.base.get(\"count\", [])", "def number_of_subscribers(subreddit):\n\n url = \"https://www.reddit.com/r/{}/about.json\".format(subreddit)\n headers = {'user-agent': 'request'}\n response = requests.get(url, headers=headers, allow_redirects=False)\n if str(response) != '<Response [200]>':\n return 0\n response_json = response.json()\n subs = response_json.get('data').get('subscribers')\n return subs", "def get_winning_votes(self):\n try:\n votes = self.get_winner().votes\n except ValueError:\n votes = -1\n return votes", "def number_of_subscribers(subreddit):\n r = requests.get('https://api.reddit.com/r/{}/about.json'\n .format(subreddit),\n headers={'user-agent': 'ianscustomthing'},\n allow_redirects=False)\n rj = r.json()\n if rj.get('message') == 'Not Found':\n return 0\n s = rj.get('data').get('subscribers')\n return s", "def get_views(self, video_ID): # WORKS\n self.cur.execute(\"SELECT view_count FROM videos WHERE video_ID = \\\"{}\\\"\".format(video_ID))\n return self.cur.fetchone()[0]", "def consumer_count(self, obj):\n return obj.get_or_set_consumer_count()", "def number_of_subscribers(subreddit):\n url = \"https://www.reddit.com/r/{}/about.json\".format(subreddit)\n header = {\"Content-Type\": \"application/json\",\n \"User-Agent\": \"Mozilla/5.0\"}\n request = requests.get(\n url,\n headers=header,\n allow_redirects=False)\n if request.status_code >= 300:\n return 0\n return json.loads(request.content.decode(\"utf-8\"))[\"data\"][\"subscribers\"]", "def get_count(username):\n return get_contributor(username)[\"count\"]", "def number_of_subscribers(subreddit):\n import requests\n headers = {'User-Agent': 'Godfather'}\n about = requests.get(\n 'https://www.reddit.com/r/{}/about.json'.format(\n subreddit), headers=headers).json()\n try:\n subscribers = about.get('data').get('subscribers')\n if subscribers is None:\n raise TypeError\n return subscribers\n except:\n return 0", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def count(self):\n return self._count", "def number_of_subscribers(subreddit):\n\n url = 'https://www.reddit.com/r/{}/about.json'.format(subreddit)\n\n headers = {'User-Agent': 'My User Agent 1.0'}\n\n request = requests.get(url, headers=headers)\n req = request.json()\n\n if request.status_code == 404:\n return 0\n\n subs = req.get('data').get('subscribers')\n return subs", "def Count(self, limit=None):\n if limit is None:\n count = 0\n for i in self.Run():\n count += 1\n return count\n else:\n return len(self.Get(limit))", "def yay_voter_cnt(self):\n\n return len(self._yay_voters())", "def number_of_subscribers(subreddit):\n link = 'http://www.reddit.com/r/{}/about.json'.format(subreddit)\n red = requests.get(link, headers={'User-Agent': 'tope628'}).json()\n try:\n subs = red.get('data').get('subscribers')\n except:\n return 0\n if red is None:\n return 0\n return subs", "def number_of_subscribers(subreddit):\n url = 'https://www.reddit.com/r/{}/about.json'\n headers = {'user-agent': 'X-Modhash'}\n url_format = requests.get(url.format(subreddit), headers=headers).json()\n try:\n name = url_format['data']['subscribers']\n return name\n except:\n return 0", "def rtt_read_channel_count(self):\n down_channel_number = ctypes.c_uint32()\n up_channel_number = ctypes.c_uint32()\n\n result = self._lib.NRFJPROG_rtt_read_channel_count(ctypes.byref(down_channel_number), ctypes.byref(up_channel_number))\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)\n\n return down_channel_number.value, up_channel_number.value", "def get_toprated_with_count(self):\n\t\tconnection = self.connect_to_db()\n\t\tcursor = connection.cursor()\n\t\tcursor.execute('''select count(*) from movies;''')\n\t\tpage_count = cursor.fetchone()[0]\n\t\tconnection.close()\n\t\tpage_count = int(ceil(page_count))\n\t\treturn page_count", "def cmd_gallery_comment_count(client, args):\n gallery_comment_count = client.gallery_comment_count(args.item_id)\n generate_output({'gallery_comment_count': gallery_comment_count})", "def get_likes_count(self, instance):\n return instance.likes.count()", "def get_likes_count(self, instance):\n return instance.likes.count()", "def get_likes_count(self, instance):\n return instance.likes.count()", "def count(self) -> pulumi.Input[float]:\n return pulumi.get(self, \"count\")", "def count(self) -> pulumi.Input[float]:\n return pulumi.get(self, \"count\")", "def count_downvotes(self):\n return self.filter(value=-1).count()", "def TunnelCount(self):\n if self.force_auto_sync:\n self.get('TunnelCount')\n return self._TunnelCount", "def _get_count(visibility: str) -> int:\n if visibility == 'private':\n queue = gql(count_private)\n else:\n queue = gql(count_public)\n result = DB.client.execute(queue)\n count: int = int(result[f'flickr_{visibility}_aggregate']['aggregate']['count'])\n DB.count[visibility] = count\n return count", "def number_of_subscribers(subreddit):\n h = {'user-agent': 'GEEK1050'}\n link = \"https://www.reddit.com/r/{}/about.json\".format(subreddit)\n req = requests.get(link, headers=h)\n\n req_data = req.json().get(\"data\").get(\"subscribers\")\n for element in req_data['children']:\n print(element['children']['title'])", "def get_related_list_count(self, heading):\n locator = lex_locators[\"record\"][\"related\"][\"count\"].format(heading)\n count = self.selenium.get_webelement(locator).text\n count = count.replace(\"(\", \"\").replace(\")\", \"\")\n return int(count)", "async def count(self, **kw):\n\n pass", "def number_of_subscribers(subreddit):\n header = {'User-Agent': 'Chrome/90.0.4430.212 Safari/537.36'}\n req = requests.get('https://www.reddit.com/r/{}/about.json'\n .format(subreddit), allow_redirects=False,\n headers=header)\n if req.status_code == 200:\n subscribers = req.json().get('data').get('subscribers')\n return subscribers\n else:\n return 0", "def Count(self):\n return self._get_attribute('count')", "def Count(self):\r\n\t\treturn self._get_attribute('count')", "def Count(self):\r\n\t\treturn self._get_attribute('count')", "def count(self) -> int:\n return self._count", "def count(self) -> int:\n return self._count", "def count(self) -> int:\n return self._count", "def _get_conversation_counts(business_id, conversation_tab, survey_id, category, all_conversation_types):\n params = _get_secure_message_threads_params(\n survey_id, business_id, conversation_tab, category, all_conversation_types\n )\n url = f'{current_app.config[\"SECURE_MESSAGE_URL\"]}/messages/count'\n response = requests.get(url, headers={\"Authorization\": _get_jwt()}, params=params)\n return response", "def comments_count(self) -> int:\n return pulumi.get(self, \"comments_count\")" ]
[ "0.6773994", "0.6712171", "0.66255003", "0.64222467", "0.64080846", "0.6403009", "0.6307542", "0.627525", "0.62508446", "0.6187831", "0.6187831", "0.61792505", "0.6178627", "0.61383927", "0.61028653", "0.6074583", "0.60575265", "0.60141224", "0.6011861", "0.60046905", "0.6000088", "0.5976957", "0.5962545", "0.596217", "0.59541976", "0.594887", "0.59440905", "0.59365857", "0.5923276", "0.5910513", "0.5906501", "0.5905327", "0.5898566", "0.5882738", "0.5878404", "0.58675784", "0.5855856", "0.5855459", "0.58459395", "0.58459395", "0.5845567", "0.58441", "0.58269763", "0.581663", "0.5810808", "0.57987356", "0.5786094", "0.5780461", "0.57659245", "0.57611114", "0.5753507", "0.5739175", "0.57372034", "0.57358915", "0.57150143", "0.5712062", "0.5711803", "0.5705236", "0.57018864", "0.5700292", "0.5690942", "0.5688967", "0.56859916", "0.5683976", "0.5683976", "0.5683976", "0.5683976", "0.5683976", "0.5683976", "0.5683976", "0.5683976", "0.5683976", "0.5683976", "0.56825036", "0.5678411", "0.5678033", "0.56687", "0.56567216", "0.5655409", "0.5652856", "0.5651997", "0.565168", "0.565168", "0.565168", "0.56457514", "0.56457514", "0.5636421", "0.5631721", "0.5628996", "0.5628924", "0.56284714", "0.56273633", "0.56225115", "0.56121117", "0.56104976", "0.56104976", "0.5608401", "0.5608401", "0.5608401", "0.55936396", "0.5591424" ]
0.0
-1
Marks a poll as finished. This is limited to channel ops.
def conclude(self, irc, msg, args, channel, pid): if msg.nick in irc.state.channels[channel].ops: if channel in self.polls.keys(): try: self.polls[channel][pid]['concluded'] = True self._dump(self.polls) irc.reply("Marked poll #%s (%s) as concluded." % (pid, self.polls[channel][pid]['question'])) except IndexError: irc.error("'%s' does not have a poll with that index.") except KeyError: irc.error("This may be a bug with the bot or poll file, please submit an issue at\ <https://github.com/IotaSpencer/supyplugins> with all pertinent information.") else: irc.error("Access Denied.")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def finish(self):\n self.cond.acquire()\n try:\n self.closed = True\n self.cond.notify()\n finally:\n self.cond.release()", "def finish(self):\n with self._lock: # just to be tidy; lock not really needed to set a boolean\n self._done = True", "def set_status_update_waiter_finished(self):\n self.set_state(CHANNEL_MOVE_STATE_FINISHED)\n self.set_status_update_waiter()", "def finish(self):\n sev = 'info'\n desc = 'successful'\n col = 0\n if not self.derive_values and not self.gauge_values:\n sev = 'error'\n desc = 'unsuccessful'\n else:\n col = len(self.derive_values) + len(self.gauge_values)\n\n dur = time.time() - self.poll_start_time\n getattr(self.logger, sev)('%s poll %s' % (self.__class__.__name__, desc),\n extra={\"duration\": \"%.3f\" % dur, \"collected\": col})", "def done(self):\n self._ready.clear()\n self._done.set()", "def _done(self):\n self._doneFlag = True\n self._executionCompletedNotifier.notify(self)", "def task_done(self):\n self._queue.task_done()", "def finish(self):\r\n\r\n self._is_finished = True", "def done(self):\n self.__queue.task_done()", "def jobComplete(self):\n self._Finished = True\n return", "def set_task_finished(self):\n self.busy = False", "def mark_as_done(self):\n self.status = \"DONE\"", "def finish(self):\n self.finished = self.finished + 1\n if self._finishedDeferreds is not None:\n observers = self._finishedDeferreds\n self._finishedDeferreds = None\n for obs in observers:\n obs.callback(None)", "def complete(self, update):\n self.reactor.callFromThread(self._complete, update)", "def Finish(self):\n\t\n self.queue.join()", "def finished(self, reply):\n pass", "def send_finish_event(self):\n self.status['type'] = '__end__'\n self._send()", "def mark_chunk_completed(self, chunk):\n self._chunk_done[chunk] = True\n if self.is_completed:\n self.completed.trigger()\n self.close()\n self.chunk_completed.trigger(chunk)", "def wait_complete(self):\n self.join()", "def tellIfEnded(self):\n self.congratulate()", "def notify_finish_event(self):\n self.notify(self._finish_event_type())", "def thread_finished(self):\n # self.worker.join()\n self.worker = None\n self.want_to_abort = False", "def finished(self):\n pass", "def mark_as_done(self):\n grade_event = {'value': self.points, 'max_value': self.points}\n self.runtime.publish(self, 'grade', grade_event)", "def done(self) -> None:\n\n self.event_.set()\n for cb in self.callbacks:\n cb()", "def end(self):\n self.my_print(\"\\t[DONE]\", msg_types.INFO)\n self.in_progress = False", "def finished(self, result):\n raise NotImplementedError(\"Subclasses mut override finished()\")", "def task_done(self):\n self.__data[\"status\"] = TASK.DONE # Set status done for task\n self.__data[\"eor\"] = time.time() # Update last end of run\n self.task_changed([\"status\", \"eor\"]) # Send changed event", "def finished(self):\n\t\telog(\"finished\")", "def finished(self):\n raise NotImplementedError()", "def task_done(self) -> None:\n pass", "def finish(self):\r\n self.start_finish()\r\n self.wait_finish()", "def finished(self):\r\n raise NotImplementedError", "def complete(self, results):\n self.api.debug(f\"{self}.complete({repr(results)[:DEBUG_TRUNCATE_RESULTS]})\")\n self.elapsed = time.time() - self.started\n if not self.done:\n ret = self.format_results(results)\n # TODO: fix sendString test\n if self.callable.callback.__name__ == 'sendString':\n ret = '%s.%s: %s' % (self.api.channel, self.label, ret)\n self.callable.callback(ret)\n self.callable = None\n self.done = True\n else:\n self.api.error_handler(\n self.id, '%s completed after timeout: callback=%s elapsed=%.2f' % (self.label, f\"{self}\", self.elapsed)\n )\n self.api.debug(f\"{self} results={repr(results)[:DEBUG_TRUNCATE_RESULTS]}\")\n self.api.record_callback_metrics(self.label, int(self.elapsed * 1000), self.expired)", "def wait_finish(self):\r\n self.proc.join()", "def finished():\n radio.finished()", "def acq_done(self, pvname=None, **kws):\n if kws['value'] == 0:\n self.eventq.put('finish')", "def _finish(self):\n if self.consumer:\n self.consumer.unregisterProducer()\n self.consumer = None\n\n if not self.deferred.called:\n self.deferred.callback(None)", "def thread_complete(self):\n print(\"TREAD COMPLETE (good or bad)!\")", "def job_done(self, success):\n run_usage = self._attempt.get_usage()\n self._usage.append(run_usage)\n\n log.debug(\"job_done job_id=%s success=%s (last attempt %s\", self.job_id, success, self._attempt_ids[-1])\n self._attempt = None", "def complete(self):\n self._is_complete = True", "def complete(self):\n self.chunk_percentage = [1.0] * self.total_steps\n self.progress_updated.emit(self.percentage)\n self.initialized = False", "def succeeded(self):\n self.did_end = True", "def finalize(self):\r\n if self._cut_off:\r\n return\r\n self._cut_off = True\r\n self.queue.put((None,))\r\n if self.progressbar_overall and not self.determinate:\r\n self.progressbar_overall.set_max(True, self._total)", "def done_action(self) -> None:\n self.end = datetime.now()", "def shutdown(self):\r\n self.done = True", "def _do_done(self, event):\n self._done(event.result)", "def _do_done(self, event):\n self._done(event.result)", "def _finished(self):\n try:\n self._close()\n except Exception, e:\n log.warn(\"Failed to close device %s: error %s\" %\n (self._devId, str(e)))", "def task_done(self):\n if hasattr(self._input, \"task_done\"):\n self._input.task_done()", "def done(self):\n self._flush()", "def finish(self, result=CallResult.no_result):\n assert not self.state is CallState.finished\n\n self.state = CallState.finished\n self.finish_time = time()\n self.result = result", "def _onEnd(self, name, completed):\n logging.debug(\"onEnd...\")", "async def wait_until_done(self) -> None:\n ...", "def __finish(self):\n self.finished.emit()", "def finish(self, block=True):\n # Notify the thread to finish\n with self.c:\n self.alive = False\n self.c.notify()\n\n if block:\n self.join()", "def finish(self, id, result=NO_RESULT):\n def _finish(pipe):\n if pipe.zrank(self.feed_claimed, id) is None:\n return # raise exception?\n pipe.multi()\n pipe.zrem(self.feed_claimed, id)\n pipe.hdel(self.feed_cancelled, id)\n pipe.zrem(self.feed_published, id)\n pipe.incr(self.feed_finishes)\n if result is not self.NO_RESULT:\n self.thoonk._publish(self.job_finish, (id, result), pipe)\n pipe.hdel(self.feed_items, id)\n \n self.redis.transaction(_finish, self.feed_claimed)", "def finished(self):", "def sync_end(self):", "def notify_end(self, status, objective):\n pass # pragma: no cover", "def mark_as_done(self, task):\n raise NotImplementedError('')", "def _end(self):\n\n self.logger.msg1(\"Done\")", "def jobFinished(self):\n if not self._job.result and not self._job.aborted:\n self.showErrorMessage()\n del self._job\n self.deleteLater()", "def _notify_listeners_end_operation(self):\n op = self.operation\n for lstnr in self.listeners:\n lstnr.end_operation(self, op)\n self.operation = None", "def close_poll(self, poll_key):\n poll_data = self.get_poll(poll_key)\n poll_data['ongoing'] = False\n self.client.set(poll_key, dumps(poll_data))", "def submit_complete( self ):\n cfunc.submit_complete( self )", "def complete(self):\n pass", "def task_done(self):\r\n if self._unfinished_tasks <= 0:\r\n raise ValueError('task_done() called too many times')\r\n self._unfinished_tasks -= 1\r\n if self._unfinished_tasks == 0:\r\n self._finished.set()", "def _finished(self) -> None:", "def exit(self) -> None:\n\n self.result = self.handle_success('finished-task')", "def end(t=None):\n if not task.done():\n # cancel timeout if needed\n if t is False:\n timeout.cancel()\n # detach events\n context.detach_events(*events_)\n # clean refs\n events_[:] = []\n # set results\n task.set_result(process_results(results=results, timeout=bool(t)))", "def report_finish(self):\n # TODO: remove changing the pended time descriptive, as the signal does this already!\n self.status = Status.FINISHED\n self.pended_time_descriptive = self.pending_time_descriptive\n self.save()", "def syncDone (self) :\r\n self.ongoing_sync_count -= 1", "def finish_subsystem(self):\n self.__channel.close()", "def task_done(self):\n if self.message is None:\n raise Exception('no message to acknowledge')\n self.handle.delete_message(self.message)\n self.message = None", "def notify_complete(self, status=\"Done\") -> None:\n\n self.progress = 100\n self.update(status)", "def setToFinish(self):\n self.finish = True", "def onDone(self):\n pass", "async def finalize(self, timed_out: bool) -> None:\n\n async with self.condition:\n self.condition.notify()\n\n if timed_out:\n await self.stop()", "def finishWait(self):\r\n self.scheduler.finishWait()", "def endWrite(self):\n self.writing = False\n if len(self.okToRead._waiters) > 0:\n self.okToRead.notify()\n else:\n self.okToWrite.notify()\n self.okToRead.release()\n self.okToWrite.release()", "def finished(self):\n return self._state == FINISHED_STATE", "async def end(self, roles, dialogs):\n self.ended = True", "def task_done(self) -> None:\n if self._unfinished_tasks <= 0:\n raise ValueError(\"task_done() called too many times\")\n self._unfinished_tasks -= 1\n if self._unfinished_tasks == 0:\n self._finished.set()", "def is_poll_complete(self, poll_resp):\n success_list = ['UpdatesComplete', True, 'COMPLETE']\n status = poll_resp.get('Status', poll_resp.get('status'))\n if not status:\n raise RuntimeError('Unable to get poll response status.')\n return status in success_list", "def completed(self, completed):\n\n self._completed = completed", "def completed(self, completed):\n\n self._completed = completed", "def completed(self, completed):\n\n self._completed = completed", "def tellJobDone(self, clipboard=None):\n origid = self.jobclient.getOriginatorId()\n if clipboard:\n if clipboard.has_key(\"originatorId\"):\n origid = clipboard.get(\"originatorId\")\n else:\n self.log.log(Log.WARN, \"OriginatorId not found on clipboard\")\n print \"DEBUG: clipboard keys:\", str(clipboard.keys())\n if len(self.dataclients) > 0:\n self.log.log(Log.INFO-5, \"reporting the completed files\")\n self.tellDataReady(clipboard)\n self.jobclient.tellDone(self.jobsuccess, origid)", "def end(self):\n self._watch_file = False\n self.experiment.end()\n if self.thread:\n self.thread.join(timeout=self._monitor_thread_timeout)", "def finished(self):\n self.update(self._total)", "def end(self):\n self._log.debug('%s: doing ..', __class__.__name__)\n self._log.debug('%s: done.', __class__.__name__)", "def finalize(self):\n self.busy = False\n self.pipe_start.send((\"FINISH\",None))\n self.process.join()\n if self.process.is_alive():\n self.process.terminate()", "def on_finish(self):\n return self._on_finish", "def handle_done(future):\n results[in_progress[future]] = future.result()\n del in_progress[future]", "def tellDone(self, success, originatorId):\n self.jobSender.send(self.jobSender.createJobDoneEvent(self.name,\n success,\n originatorId))", "def finishThread(self):\n logging.info(\"Fin Thread\")\n self.buildCreatedDict()\n self.cleanThread()\n self.accept()", "def complete(self):\n if self._completion_status is None:\n raise RuntimeError(\"No collection in progress\")\n \n while self.motor.moving:\n time.sleep(self.poll_delay_s)\n \n return self._completion_status", "def on_message_callback_complete(self, basic_deliver, future):\n self._connection.ioloop.add_callback_threadsafe(\n partial(self.finish_message, basic_deliver, future)\n )", "def done(self):\n if self.pbar is not None:\n self.pbar.close()\n self.pbar = None\n self.counter = 0", "def finalize(self):\n sys.stderr.write(f\"{self._message} finished after {(time.time()-self._startTime):.1f}s \"\n \"at \"+time.strftime(\"%H:%M:%S\", time.localtime())+\" \\n\")" ]
[ "0.66311806", "0.6554593", "0.65156335", "0.6456887", "0.6304259", "0.62279236", "0.6223889", "0.6176054", "0.61735076", "0.61373097", "0.6120679", "0.60965186", "0.60947", "0.6072286", "0.5981773", "0.5979095", "0.5972629", "0.59477633", "0.5941002", "0.59198123", "0.5911216", "0.5908918", "0.5887443", "0.5855468", "0.5855097", "0.5855058", "0.5815562", "0.5790749", "0.57672393", "0.5767007", "0.5763369", "0.5757481", "0.575658", "0.5732167", "0.5728328", "0.57189184", "0.5711091", "0.57097113", "0.57062083", "0.5705702", "0.56813264", "0.56807405", "0.56582147", "0.5632656", "0.561876", "0.5614949", "0.56111455", "0.56111455", "0.56002533", "0.55911577", "0.5585485", "0.5583063", "0.5573861", "0.5564157", "0.5559544", "0.55555475", "0.5554155", "0.5526592", "0.55257094", "0.55197173", "0.5501481", "0.5500032", "0.5489843", "0.5476144", "0.5473061", "0.54665554", "0.5464104", "0.54588765", "0.545435", "0.54529095", "0.5448337", "0.5441369", "0.54351896", "0.5428899", "0.54253024", "0.5423931", "0.54135656", "0.5392532", "0.5392002", "0.53910446", "0.5386831", "0.53823453", "0.5366167", "0.53646404", "0.53583306", "0.5350086", "0.5350086", "0.5350086", "0.53443176", "0.5343209", "0.53416526", "0.5339863", "0.5338745", "0.5335297", "0.5333738", "0.5322988", "0.5317891", "0.53158355", "0.53125304", "0.53089607", "0.5308732" ]
0.0
-1
Removes a poll entirely.
def rempoll(self, irc, msg, args, channel, pid): if msg.nick in irc.state.channels[channel].ops: del self.polls[channel][pid] self._dump(self.polls)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove(self):\n if self.removed:\n return\n self._remove()\n self.removed = True", "async def will_remove_from_hass(self) -> None:\n if self.unsub_update:\n self.unsub_update()\n self.unsub_update = None", "async def async_will_remove_from_hass(self):\n self._unsub_dispatcher()", "def unwatch(self, tid):\n link = self._refs.pop(tid, None)\n current = greenlet.getcurrent()\n if hasattr(current, 'unlink'):\n # This is a Gevent enhanced Greenlet. Remove the SpawnedLink we\n # linked to it.\n current.unlink(link)", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def remove(self):\n self._delete()", "def unwatch(self):\n pass", "def clear(self):\n self.queue.clear()", "def delete_last(self):\n self.deque.pop()", "def delete(self) -> None:\n self.pop()", "def _rem_offer(self, matchid, reason):\n logging.debug('Removing offer %s' % matchid)\n o = self.offers.pop(matchid, None)\n if o:\n self.comm.rescind_offer(o, reason)", "def pop(self):\n self.list.pop()", "def remove(self, resource, _no_min=False):\n\t\tif isinstance(resource, ResourceWrapper):\n\t\t\twrapper, resource = resource, resource._resource\n\t\t\twrapper._pool = None\n\t\tif resource not in self.members:\n\t\t\traise ValueError(\"Given resource is not owned by this pool\")\n\t\tfor collection in (self.to_clean, self.used, self.members):\n\t\t\tif resource in collection:\n\t\t\t\tcollection.remove(resource)\n\t\tif not _no_min:\n\t\t\t# create back up to min if needed\n\t\t\tself._ensure_min()", "def _async_untrack_subscription(self, subscription: Subscription) -> None:\n topic = subscription.topic\n try:\n if _is_simple_match(topic):\n simple_subscriptions = self._simple_subscriptions\n simple_subscriptions[topic].remove(subscription)\n if not simple_subscriptions[topic]:\n del simple_subscriptions[topic]\n else:\n self._wildcard_subscriptions.remove(subscription)\n except (KeyError, ValueError) as ex:\n raise HomeAssistantError(\"Can't remove subscription twice\") from ex", "def remove(self):\r\n if self.first() is not None:\r\n self.dec_size()\r\n self.set_first(self.first().next())\r\n if self.size() == 0: # when there are no more elements in the list,\r\n self.__last = None # remove the pointer to the last element\r", "def remove(self):\r\n\t\tself._delete()", "def unsubscribe(self):\n\n # Unsubscribe\n self.pyrps.redis.srem(self.pyrps._ns_subscriptions(self.queue), self.consumer_id) \n\n # Remove message queue\n self.pyrps.redis.delete(self.pyrps._ns_queue(self.queue, self.consumer_id))", "def remove(self, pid):\n self._pids.discard(pid)", "def _remove_timer(self, timer):\n if timer.end is None:\n try:\n self._callbacks.remove(timer)\n except ValueError:\n pass # Callback not present.\n else:\n try:\n self._deferreds.remove(timer)\n except ValueError:\n pass # Callback not present.", "def remove_to_deletes(self):\n go = True\n while go:\n go = False\n for op in self.queue:\n if op.delete:\n self.queue.remove(op)\n go = True\n break", "def clear(self):\n self.pointscontroller.pop(self.currentlyadded)", "def remove_measurement(self):\n idx = self.measurementsListWidget.currentRow()\n if len(self.mgr.obj.measurements) > 0:\n key = list(self.mgr.obj.measurements)[idx]\n del self.mgr.obj.measurements[key]\n\n # Flag the Survey as changed\n self.mgr.changed = True\n\n # Refresh lists/tables\n self.load_measurements()\n nmeas = len(self.mgr.obj.measurements)\n if nmeas > 0:\n self.measurementsListWidget.setCurrentRow(min(idx, nmeas-1))", "def remove(self) -> T:\n if not self.is_empty():\n return self._queue.pop()", "def remove_slide_timeout(self, widget):\n try:\n gobject.source_remove(self.timeouts.pop(widget)[0])\n except KeyError:\n pass", "def delete_first(self):\n self.deque.pop(0)", "def clear_queue(self):\n while not self.queue.empty():\n self.queue.get()", "def remove(self):\n return self.queue.popleft()", "def clear(self):\n self.queue = Queue()", "def discard(self):\r\n self.pushes.pop()", "def clear(self):\r\n try:\r\n while True:\r\n self.pop()\r\n except KeyError:\r\n pass", "def pop(self):\n temp = self.tail.prev\n self.tail.prev = None\n self.tail = temp\n self.tail.next = None\n self.len -= 1", "def clear_socket(self):\n if hasattr(self, \"_socket\"):\n if isinstance(self.poller.sockets, dict):\n sockets = list(self.poller.sockets.keys())\n for socket in sockets:\n log.trace(\"Unregistering socket: %s\", socket)\n self.poller.unregister(socket)\n else:\n for socket in self.poller.sockets:\n log.trace(\"Unregistering socket: %s\", socket)\n self.poller.unregister(socket[0])\n del self._socket", "def remove_update_listener(self, listener: WorklistUpdateListener):\n self.__worklist_update_listeners.remove(listener)", "def clear_queue(self):\n self.queue = deque()", "def _remove_sub(sub):\n # counting publisher instance per topic name\n TopicBack.sub_instance_count[sub.name] -= 1\n\n # Be aware of https://github.com/ros/ros_comm/issues/111\n return sub.unregister()", "def remove_streaming(self):\n self.streaming = None", "def remove_song(self):\n self.stop()\n self.listbox.delete(\"anchor\")\n pygame.mixer.music.stop()", "def remove_piece(self) -> None:\r\n if self.has_piece():\r\n self.piece.square = None\r\n self.piece = None", "def clean(self):\r\n # Note that we do not close the connection here -- somebody\r\n # may still be reading from it.\r\n while len(self.queue) > 0 and self._pair_stale(self.queue[0]):\r\n self.queue.pop(0)", "def unsubscribe(self, id):\n self._signal_pool_uids.pop(id)\n self._signal_pool.unsubscribe(id)", "def remove(self):\n self._switch.odlclient._request(self._path, method=\"delete\")", "def remove(self):\n if self._parent:\n self._parent.removeChild(self)\n else:\n self.clear()", "def unsubscribe_from_perf_stats(self):\n\t\treturn Job(SDK.PrlSrv_UnsubscribeFromPerfStats(self.handle)[0])", "def clear(self):\r\n try:\r\n while not self._queue.empty():\r\n self._queue.get().close()\r\n except:\r\n pass", "def _remove_worker(self, worker: Worker) -> None:\n self._workers.discard(worker)", "def remove_sweep(self, sweep):\n\n try:\n self._sweeps.remove(sweep)\n except ValueError:\n pass", "def remove_sweep(self, sweep):\n\n try:\n self._sweeps.remove(sweep)\n except ValueError:\n pass", "def disarm(self):\n return self.__qf.removeTimer(self)", "def remove():", "async def clear(self):\n async with self._cond:\n await self._do_clear()", "def clear(self):\n self._clear_without_update()\n self.update()", "def clear_playlist(self, playlist_name):\n print(\"clears_playlist needs implementation\")", "def pop(self):\r\n while self.pq:\r\n priority, count, task = heapq.heappop(self.pq)\r\n if task is not self.REMOVED:\r\n del self.entry_finder[task]\r\n return task\r\n raise KeyError('pop from an empty priority queue')", "async def async_clear_playlist(self):\n await self._player.async_clear_playlist()", "def clear_stream_backlog(self, subscription):\n\n request = Request(\n method='post',\n endpoint='/streams/clearbacklog/{}'.format(subscription)\n )\n\n def response_handler(resp):\n code = resp.status_code\n if resp.is_success:\n return 'OK'\n elif code == 403:\n raise ex.StreamPermissionError(resp, request)\n raise ex.StreamConnectionError(resp, request)\n\n return self._execute(request, response_handler)", "def clear(self):\n wait(self.proto.vanish())", "def clear(self):\n with self._not_full:\n with self._not_empty:\n with self._mutex:\n self.close()\n self._queue.clear()\n self._cur_size = 0", "async def async_will_remove_from_hass(self) -> None:\n async_unsubscribe_topics(self.hass, self._sub_state)\n self._sub_state = None", "def poll(self):\n assert len(self.heap) > 0, \"ERROR: Heap is empty.\"\n item = self.heap[0]\n self.heap[0] = self.heap.pop()\n self.heapify_down()\n return item", "def clear(self):\n self.head = None", "def purge(self):\n self.remaining = 0", "def unsubscribe_from_perf_stats(self):\n\t\treturn Job(SDK.PrlVm_UnsubscribeFromPerfStats(self.handle)[0])", "def remove_watch(callback: Callable[[Event], None]) -> None:\n if callback not in _event_watch_handles:\n warnings.warn(f\"{callback} is not an active event watcher, nothing was removed.\", RuntimeWarning)\n return\n handle = _event_watch_handles[callback]\n lib.SDL_DelEventWatch(lib._sdl_event_watcher, handle)\n del _event_watch_handles[callback]", "def drop_message(self):\n heapq.heappop(self._message_queue)", "def remove_op(self, op):\n self._operations.remove(op)", "def drop(self, pid):\n return self._pids.pop(pid, None)", "def remove_from_queue(self, confid):\n\n queued_ids = self.c.select(queued=1, gaid=confid)\n ids = [q.id for q in queued_ids]\n self.c.delete(ids)", "def clear(self):\n\n self.__fasteners.clear()\n self.__update()", "def _remove_buffer(self):\n if self._buffer is not None:\n self._engine.remove_window(self._buffer)\n self._buffer = None\n self._region = None", "def unscheduleFirst(self, runnable):\n for evt in self.store.query(TimedEvent, TimedEvent.runnable == runnable, sort=TimedEvent.time.ascending):\n evt.deleteFromStore()\n break", "def _remove_listener(self):\n if self._listener:\n self.hass.bus.remove_listener(EVENT_TIME_CHANGED,\n self._listener)\n self._listener = None", "def remove_tail(self):\n pointer = self.head\n while pointer.next_node.next_node:\n pointer = pointer.next_node\n pointer.next_node = None\n return self", "async def async_will_remove_from_hass(self) -> None:\n self._disconnect_dispatcher()", "def unsubscribe(self):\n pass # pragma: no cover", "def unsubscribe(self):\r\n self._unregister()", "def _defunct(self):\n while self._popen.poll() is None:\n time.sleep(0.1)", "def clear(self):\n self._clear()\n self._update()", "def remove(self):\n pass", "def remove(self):\n pass", "def remove(self):\n pass", "def remove_from_tail(self):\n\n if self.size == 0: # if list is empty\n return None # nothing to remove; return out\n\n tail_to_remove = self.tail # copy value of current tail before deletion (for return)\n tail_to_remove.prev = tail_to_remove.next = None # remove any ties to list\n\n if self.size == 1: # if only one item in list\n self.head = self.tail = None # list will now be empty\n\n else:\n self.tail.prev.next = None # reassign new tail's prev to None (last item)\n self.tail = self.tail.prev # shift tail left\n\n self.size -= 1 # decrease size (deleting el)\n return tail_to_remove.value # return value of removed tail", "def pop(self):\n if not self.empty():\n self.size -= 1\n return heapq.heappop(self.queue)\n else:\n return None", "def clear_broadcast(param=None):\n global command_queue\n for i, command in enumerate(command_queue):\n if (command['param'] == param or param is None) and command['type'] == RECURRING:\n command_queue.pop(i)\n break", "def remove_queue(self, queue):\n with self.mutex:\n self.queues.remove(queue)", "def unlisten(self, prefix: str) -> None:\n assert len(prefix) == 1\n del self.queues[prefix]\n self.logger.info(\"No longer polling for message type: %s\", prefix)", "def clear(self):\n self.events[get_ident()][0].clear()", "def _onremove(self):\n self._channellist.remove(self)\n self.deleteLater()", "def unblock( self, fn ):\n\n self._once.remove( fn )", "def remove_subscription(self, url):\n deleted_resource_id = self.client_URI_endpoints.pop(url)\n # TODO del subscription resource from EventDestinationCollection resrc\n # TODO implement remove resource method for CollectionResources\n self.write_subscriptions_to_tmp(self.client_URI_endpoints)" ]
[ "0.5543764", "0.55292016", "0.54858243", "0.5470905", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54448193", "0.54426503", "0.5439495", "0.5427946", "0.54251266", "0.54209596", "0.5400807", "0.5398049", "0.5372225", "0.5361379", "0.53577787", "0.5350196", "0.5309843", "0.5309354", "0.5287839", "0.5287701", "0.5287231", "0.52845657", "0.5282971", "0.52758384", "0.52644366", "0.52628773", "0.5250934", "0.5247665", "0.5239269", "0.5232853", "0.5227145", "0.5214089", "0.5210711", "0.5190007", "0.5183337", "0.5177781", "0.5169103", "0.5169072", "0.5163265", "0.5158443", "0.51557964", "0.5143185", "0.51372963", "0.5129696", "0.51191044", "0.51191044", "0.51041627", "0.5099899", "0.50965744", "0.5084805", "0.508461", "0.50787324", "0.5073907", "0.50645936", "0.50640893", "0.50611675", "0.5059191", "0.50544405", "0.50497985", "0.50453585", "0.50346607", "0.50318134", "0.5030563", "0.50282717", "0.50253993", "0.50206685", "0.50148064", "0.5010821", "0.50064707", "0.5003943", "0.4998843", "0.49946025", "0.49942327", "0.49894187", "0.49869105", "0.4986714", "0.49860457", "0.49860457", "0.49860457", "0.49836075", "0.49791655", "0.4977415", "0.49706432", "0.4965082", "0.49601483", "0.49532634", "0.4951197", "0.49511617" ]
0.0
-1
Lists all the RuntimeConfig resources within project.
def ListConfigs(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list():\n project_root = get_project_root()\n config_file = os.path.join(project_root, CONFIG_DIR, CONFIG_FILE)\n if os.path.exists(config_file):\n kwargs = load_yaml_file(config_file)\n if PACKAGE_INDEX_KEY in kwargs:\n packages = load_yaml_url(kwargs[PACKAGE_INDEX_KEY])\n # Should update and look in .carme/config\n else:\n packages = load_yaml_url(PACKAGE_INDEX)\n\n ruamel.yaml.dump(packages, sys.stdout, Dumper=ruamel.yaml.RoundTripDumper)", "def list_config():\n console = Console()\n _config = loadConfig()\n json_data = richJSON.from_data({**asdict(_config)})\n console.print(Panel(json_data, title=\"SubmarineCliConfig\"))", "def list(self):\n for item in self._config:\n item.list()", "def list_configurations(MaxResults=None, NextToken=None):\n pass", "def _config_list(res, ctx):\n\n if _has_error_code(res):\n return print_errors(res, ctx)\n\n lines = []\n for config in res['configs']:\n line = '* ' if config['current'] else ' '\n\n if ctx.verbose:\n line += config['mtime'] + ' '\n\n line += config['name']\n lines.append(line)\n\n return \"\\n\".join(lines)", "def resources(self):\n return list(self.get_resources_for_type(gdef.ResType_All))", "def get_resources(self):\n return []", "def _load_resources(self):\n puts = (getattr(self, 'project', None) or self).puts\n for resource_type, resource_cls in six.iteritems(AVAILABLE_RESOURCES):\n for name in self.settings.get(resource_type, {}):\n extra = {\n 'project': getattr(self, 'project', None) or self,\n 'app': self if hasattr(self, 'project') else None,\n }\n\n with indent(4 if hasattr(self, 'project') else 2):\n puts(colored.green(u\"✓ {}:{}\".format(resource_type, name)))\n\n self._resources[resource_type].append(\n resource_cls.factory(\n name=name,\n settings=self.settings.get(resource_type, {})[name],\n **extra\n )\n )", "def test_config_list():\n client = TestClient()\n client.run('config list')\n assert \"Supported Conan *experimental* global.conf and [conf] properties:\" in client.out\n for key, description in BUILT_IN_CONFS.items():\n assert \"{}: {}\".format(key, description) in client.out", "def list_runtimes(self, runtime_name='all'):\n return self.compute_handler.list_runtimes(runtime_name)", "def loadconfig():\n CONFIG['static_folder'] = str(Path(Path(APP.root_path).parent, 'static'))\n\n for cfile in Path(APP.instance_path).iterdir():\n if cfile.name[-5:] == '.json' and cfile.name != 'config.json':\n name = cfile.name[:-5]\n LOG.debug(\"Loading \" + name)\n with cfile.open() as json_data_file:\n CONFIG[name] = json.load(json_data_file)", "def list_configurations(ctx):\n config_set = __ensure_configuration_exists(ctx)\n formatter = ConfigSetListFormatter.build(config_set, format='plain')\n out = formatter.format()\n\n click.echo(out)", "def test_list_deployment_config_for_all_namespaces(self):\n pass", "def list_deployment_config(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_deployment_config\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/deploymentconfigs'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1DeploymentConfigList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def list_runtimes(config, backend, debug):\n log_level = logging.INFO if not debug else logging.DEBUG\n setup_lithops_logger(log_level)\n\n if config:\n config = load_yaml_config(config)\n\n config_ow = set_config_ow(backend, runtime_name='None')\n config = default_config(config, config_ow, load_storage_config=False)\n\n if config['lithops']['mode'] != SERVERLESS:\n raise Exception('\"lithops runtime list\" command is only valid for serverless backends')\n\n compute_config = extract_serverless_config(config)\n compute_handler = ServerlessHandler(compute_config, None)\n runtimes = compute_handler.list_runtimes()\n\n if runtimes:\n width = max([len(runtime[0]) for runtime in runtimes])\n\n print('\\n{:{width}} \\t {}'.format('Runtime Name', 'Memory Size (MB)', width=width))\n print('-' * width, '\\t', '-' * 20)\n for runtime in runtimes:\n name = runtime[0]\n mem = runtime[1]\n print('{:{width}} \\t {}'.format(name, mem, width=width))\n print()\n print('Total runtimes: {}'.format(len(runtimes)))\n else:\n width = 10\n print('\\n{:{width}} \\t {}'.format('Runtime Name', 'Memory Size (MB)', width=width))\n print('-' * width, '\\t', '-' * 20)\n print('\\nNo runtimes deployed')", "def list(ctx):\n # pylint: disable=redefined-builtin\n _list_apps(ctx.obj['config'], ctx.obj['client'])", "def cmd_list_resources(config=DEFAULT_LINUX_PATH):\n config = load_config_file(expand_config_path(config))\n px = connection_proxmox(config[\"proxmox\"])\n try:\n if config[\"pools\"]:\n l, h = list_resources(px, config[\"pools\"])\n return tabulate(l, h)\n else:\n print(\"Dick 'pools' is empty\")\n except KeyError:\n print(\"Missing 'pools' dict in config file\")\n sys.exit(1)", "def antenny_list_configs(self):\n return self.antenny_config.list_configs()", "def test_list_build_config_for_all_namespaces(self):\n pass", "def resources(self):\r\n return self.page.object_list", "def list_build_config(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method list_build_config\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/buildconfigs'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1BuildConfigList',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def resources(self):\n res = []\n for resource in self._resources:\n res = res + resource.resources()\n\n return res", "def resources(self):\n res = []\n for resource in self._resources:\n res = res + resource.resources()\n\n return res", "def ls():\n cfgmgr = ConfigManager()\n apps = cfgmgr['apps']\n for i in apps:\n print(fc(\"- {g}{appname}{rst}\", appname=i))", "def resources(self):\n return self.__resources", "def list_configurations(configurationType=None, filters=None, maxResults=None, nextToken=None, orderBy=None):\n pass", "def resources(self):\n return self._resources", "def resources(self):\n return self._resources", "def resources(self):\n return self._resources", "def list(self):\n resources = self._os_resource_manager.list()\n resource_list = []\n for resource in resources:\n resource_list.append(self._resource_class(id=resource.id,\n name=resource.name))\n return resource_list", "def get_project_list(config):\n eggs_dir = config.get('eggs_dir', 'eggs')\n if os.path.exists(eggs_dir):\n projects = os.listdir(eggs_dir)\n else:\n projects = []\n try:\n projects += [x[0] for x in config.cp.items('settings')]\n except NoSectionError:\n pass\n return projects", "def Run(self, args):\n variable_client = util.VariableClient()\n messages = util.Messages()\n\n config_resource = util.ParseConfigName(util.ConfigName(args))\n\n self._display_values = args.values\n\n request = messages.RuntimeconfigProjectsConfigsVariablesListRequest(\n parent=config_resource.RelativeName(),\n returnValues=self._display_values)\n\n page_size = args.page_size or self.DEFAULT_PAGE_SIZE\n\n results = list_pager.YieldFromList(\n variable_client, request, field='variables',\n batch_size_attribute='pageSize', limit=args.limit,\n batch_size=page_size\n )\n\n for result in results:\n yield util.FormatVariable(result, self._display_values)", "def runtime_config(self) -> str:\n return self._node[\"app_data\"].get(\"runtime_config\")", "def list_projects():\n if '.wcscanner' not in os.listdir(context.__BASE_PATH__):\n return []\n return os.listdir(context.__PROJECTS_PATH__)", "def list(obj):\n # lists pf9-express config files\n pf9_exp_conf_dir = obj['pf9_exp_conf_dir']\n\n if os.path.exists(pf9_exp_conf_dir):\n count = 1\n result = PrettyTable()\n result.field_names = [\"#\",\"Active\", \"Conf\", \"Management Plane\", \"Region\"]\n files = [f for f in os.listdir(pf9_exp_conf_dir) if os.path.isfile(os.path.join(pf9_exp_conf_dir, f))]\n\n for f in files:\n active = False\n if f == 'express.conf':\n active = True\n with open(pf9_exp_conf_dir + f, 'r') as config_file:\n config = Utils().config_to_dict(config_file)\n if active:\n result.add_row([count,'*', config[\"name\"], config[\"du_url\"], config[\"os_region\"]])\n else:\n result.add_row([count,' ', config[\"name\"], config[\"du_url\"], config[\"os_region\"]])\n count = count + 1\n\n click.echo(result)\n\n else:\n click.echo('No Platform9 management plane configs exist')", "def deployment_configs(self) -> pulumi.Output['outputs.PagesProjectDeploymentConfigs']:\n return pulumi.get(self, \"deployment_configs\")", "def ReadEntries(self):\n entries = []\n config = wx.Config.Get()\n config.SetPath(DEPS_CONFIG)\n step = config.GetFirstEntry()\n while (step[0]):\n entries.append(config.Read(step[1]))\n step = config.GetNextEntry(step[2])\n config.SetPath('..')\n return entries", "def get_resources(self, **extra_args):\n return [lrms for lrms in self.resources.itervalues()]", "def config(self) -> dict:\n return self._configs", "def getResources(self):\n\t\treturn deepcopy(self.server.resources)", "def GetAllResourcesSample():\n client = CreateClient()\n # Unlike client.GetResources, this returns a list of resources\n for resource in client.GetAllResources():\n PrintResource(resource)", "def list_logging_conf():\n import pkg_resources\n\n configs = set()\n for plugin in plugin_manager.load_all(__name__):\n configs.update({\n cfg for cfg in pkg_resources.resource_listdir(__name__, '.')\n if cfg.endswith('.json')\n })\n\n return configs", "def list_runtimes(self, docker_image_name='all'):\n logger.debug('Listing runtimes')\n logger.debug('Note that k8s job backend does not manage runtimes')\n return []", "def configs(self):\n return self._configs", "def get_configurations(self, obj):\n configs = obj.configs.all()\n serializer = SimpleExportConfigSerializer(configs, many=True,\n context={'request': self.context['request']})\n return serializer.data", "def list_conf(self, kwargs):\n self.display(\n self.engine.query(\n self.engine.ALL_FILTER(),\n ALL, base=','.join([\"CN=Configuration\", self.engine.base_dn])\n ),\n True\n )", "def x_list():\n\t_loadconfig()", "def loadConfig():\n global abs_path, app_list, app_api_subs\n\n # load application details\n with open(abs_path + '/../../../../config/apim.yaml', 'r') as file:\n apim_config = yaml.load(file, Loader=yaml.FullLoader)\n apps = apim_config['apps']\n\n for app in apps:\n app_list[app['name']] = []\n app_api_subs[app['name']] = app['api_subscriptions'].split(',')", "def get(self):\n configurations = g.user.get_api().get_configurations()\n result = [config_entity.to_json() for config_entity in configurations]\n return jsonify(result)", "def list(self, config_path: str, results_filter: Optional[ObjectType]) -> List[str]:\n ...", "def get_pecan_resources(self):\n return []", "def read_config_JSON(self) -> List[Dict]:\n with open(self.CONFIG_FILE_PATH) as json_file:\n j = json.load(json_file)\n\n cameras = j[\"cameras\"]\n cameras = [\n {\"name\": camera[\"name\"], \"path\": camera[\"path\"], \"config\": camera}\n for camera in cameras\n ]\n\n return cameras", "def getAllConfigInfo(self):\r\n self._update('getAllConfigInfo')\r\n\r\n configinfo = []\r\n for gconfig in self.supervisord.options.process_group_configs:\r\n inuse = gconfig.name in self.supervisord.process_groups\r\n for pconfig in gconfig.process_configs:\r\n configinfo.append(\r\n { 'name': pconfig.name,\r\n 'group': gconfig.name,\r\n 'inuse': inuse,\r\n 'autostart': pconfig.autostart,\r\n 'group_prio': gconfig.priority,\r\n 'process_prio': pconfig.priority })\r\n\r\n configinfo.sort(key=lambda r: r['name'])\r\n return configinfo", "def _config_files():\n from .plugin import plugins\n return [p for p in (p.config_file() for p in plugins()) if p is not None]", "def get_postprocess_config_files(self):\n return list(\n resources.get_files_in_folder(\n \"config/tests/postprocessing/postprocess_test_configs\"))", "def list_configurations(path):\n configurations = []\n\n for afile in os.listdir(path):\n afile = os.path.join(path, afile)\n if os.path.isfile(afile) and afile.endswith('.py'):\n configurations.append(afile)\n\n return configurations", "def configs(self):\n\n return self.__configs", "def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")", "def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")", "def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")", "def resources(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"resources\")", "def _get_config_dict():\r\n return CONFIGS", "def get_schedules():\n path = config.get('schedule', 'paths', './schedule.json')\n with open(path) as schedule_file:\n return json.load(schedule_file)", "def get_eval_config_files(self):\n return list(\n resources.get_files_in_folder(\n \"config/tests/evaluation/evaluate_test_configs\"))", "def resources(self):", "def resources(self):\n return [self]", "def _get_all_resources(self):\n all_resources = []\n for resource in ResourceModel.scan():\n all_resources.append(resource)\n return all_resources", "def list_configuration(config_file = CONFIG_FILE):\n conf = get_configuration(config_file)\n display_configuration(config_file, 'secret wallet configuration is located', conf)", "def watch_deployment_config_list(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method watch_deployment_config_list\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/watch/deploymentconfigs'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='JsonWatchEvent',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def get_app_configs(self):\n self.check_apps_ready()\n return self.app_configs.values()", "def get_all_projects():\n return jsonify(admin.get_all_projects(current_app.scoped_session()))", "async def getConfigurations(self, ):\n payload = {}\n \n\n # Parameter validation\n schema = CatalogValidator.getConfigurations()\n schema.dump(schema.load(payload))\n \n\n url_with_params = await create_url_with_params(self._conf.domain, f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/\", \"\"\"{\"required\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}],\"optional\":[],\"query\":[],\"headers\":[],\"path\":[{\"in\":\"path\",\"name\":\"company_id\",\"description\":\"A `company_id` is a unique identifier for a particular seller account.\",\"schema\":{\"type\":\"string\"},\"required\":true},{\"in\":\"path\",\"name\":\"application_id\",\"description\":\"A `application_id` is a unique identifier for a particular sale channel.\",\"schema\":{\"type\":\"string\"},\"required\":true}]}\"\"\", )\n query_string = await create_query_string()\n headers = {\n \"Authorization\": \"Bearer \" + await self._conf.getAccessToken()\n }\n for h in self._conf.extraHeaders:\n headers.update(h)\n exclude_headers = []\n for key, val in headers.items():\n if not key.startswith(\"x-fp-\"):\n exclude_headers.append(key)\n return await AiohttpHelper().aiohttp_request(\"GET\", url_with_params, headers=get_headers_with_signature(self._conf.domain, \"get\", await create_url_without_domain(f\"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/\", ), query_string, headers, \"\", exclude_headers=exclude_headers), data=\"\")", "def create_resources(self) -> List[ResourceDescription]:\r\n return self.resources", "def showconfig():\n print(yaml.dump(CONFIG))", "def test_list_config_roots(self):\n with self.override_role():\n self.config_client.list_config_roots()", "def resources(self) -> \"Resources\":\n return self._resources", "def _configFiles(self):\n import glob\n ret = [] \n for ext in self.configManager.extensions:\n ret.extend(\n glob.glob(f\"{self.pipelinesDir}/{self.pipeName}/*{ext}\"))\n return ret", "def config(self):\n return \"\\n\".join([ c.config(True) for p, c in self.configs_ ])", "def prepare_resrc_config(res_config_list):\n resrc_schedule_list = []\n\n resource_names = utils.generate_resource_name(len(res_config_list))\n for idx, cfg in enumerate(res_config_list):\n core_list = [ResourceSchedule('Core{0}'.format(i), resource_names[idx], cfg) for i in range(1, cfg.cores+1)]\n resrc_schedule_list = resrc_schedule_list + core_list\n\n return resrc_schedule_list, resource_names", "def getResources(self, folder):\n\n #-------------------- \n # Get the resource JSON\n #-------------------- \n folder += \"/resources\"\n resources = self.__getJson(folder)\n #print(\"%s %s\"%(, folder))\n #print(\" Got resources: '%s'\"%(str(resources)))\n\n\n\n #-------------------- \n # Filter the JSONs\n #-------------------- \n resourceNames = []\n for r in resources:\n if 'label' in r:\n resourceNames.append(r['label'])\n #print(\"FOUND RESOURCE ('%s') : %s\"%(folder, r['label']))\n elif 'Name' in r:\n resourceNames.append(r['Name'])\n #print(\"FOUND RESOURCE ('%s') : %s\"%(folder, r['Name']))\n\n return resourceNames", "def get_config_files(self):\n self.clear_lists()\n print self.abs_directory\n for file in os.listdir(self.abs_directory):\n print file\n if file.endswith('.json') and \"qemii\" in file:\n self.txt_files.append(file)", "def list(self):\n\n config = self.get_config()\n client = config['client']\n default_config = config[client]\n\n msg.run('Saved options for client %s' % client)\n msg.inf('Default application (%s)' % default_config.get('defapp'))\n msg.inf('environment (%s)' % default_config['environment'])\n msg.inf('databases prod (%s) test (%s)' %\n (default_config['database'],\n default_config['test_database']))\n msg.inf('Image (%s)' % default_config['image'])\n msg.inf('Nginx (%s) Debug (%s) Verbose (%s)' %\n (default_config['nginx'],\n default_config['debug'],\n default_config['verbose'])\n )\n msg.run('\\nOther clients in this environment')\n clients = [item for item in config if item != 'client']\n\n msg.inf(', '.join(clients))", "def print_config(self):\n for key in CONFIG_KEYS:\n print('--- ' + key + ' ---')\n print(CONFIG_KEYS[key])", "def get_configurations():\n res = {}\n res[\"username\"] = g.user.get_username()\n configs = []\n for c in g.user.get_api().get_configurations():\n configs.append({\"id\": c.get_id(), \"name\": c.get_name()})\n res[\"configs\"] = configs\n return jsonify(res)", "def resources(self) -> Sequence['outputs.GetResourcesResourceResult']:\n return pulumi.get(self, \"resources\")", "def list(self):\n\n for name in self.projects:\n self.projects[name].show()\n print(\"\\n\")", "async def get_resources(self, **kwargs) -> dict:\n resources = await self.request.get(self._base_path, **kwargs)\n self._sanitize_resources(resources)\n return resources", "def configs(self, request, *args, **kwargs):\n response = self.retrieve(request, *args, **kwargs)\n response.data = response.data['configures']\n return response", "def getConfigs(self, host):\n raise \"not implemented\"", "def configs(self) -> list[Config]:\n return self._configs", "def resource(self, n):\n\n cfg = self.read()\n\n for res in cfg.get('Resources', []):\n res_name = res.get('Resource')\n\n if res_name == n:\n return ConfigResource(res)", "def get_resources():\n return Response(f\"{Resource.get_all_resources()}\", 200, mimetype='text/plain')", "def ResourceList(self):\n url = AddToUrl(self, 'https://api.spiget.org/v2/resources?')\n return ApiSearch(url)", "def print_resources(self) -> None:\n for resource in self._request_get(self.url_base + 'documentacao'):\n print(\n \"Nome: {},\\nUrl: {},\\n\".format(\n resource['name'],\n self._format_url_to_resource(resource['url']),\n )\n )", "def getConfigAll(self):\n return self.configAll(False)", "def get_projects(self):\n return conf.projects", "def configs(self):\n yield \"singleimage\", build_config.BuildConfig()", "def list_vservers():\n\n result = {}\n\n for file in os.listdir(cfg.ETC_VSERVERS):\n\n cfgdir = os.path.join(cfg.ETC_VSERVERS, file)\n\n if not os.path.isdir(cfgdir) or file.startswith('.'):\n # not a config \n continue\n\n result[file] = get_vserver_config(file)\n\n return result", "def default_configuration_list(platform, ide):\n\n # All platforms support this format.\n results = [\"Debug\", \"Internal\", \"Release\"]\n\n # Xbox and Windows support link time code generation\n # as a platform\n if ide.is_visual_studio() and platform.is_windows(\n ) or platform in (PlatformTypes.xbox360,):\n results.append(\"Release_LTCG\")\n\n # Configurations specific to the Xbox 360\n if platform is PlatformTypes.xbox360:\n results.extend([\"Profile\", \"Profile_FastCap\", \"CodeAnalysis\"])\n return results", "def index(self):\n return {'projects': [p for p in self.server.projects.values()]}" ]
[ "0.6564698", "0.6287851", "0.61679363", "0.6095816", "0.5989894", "0.5769178", "0.5736151", "0.57051367", "0.5691854", "0.5687537", "0.5679123", "0.5675863", "0.56112176", "0.55890656", "0.5550873", "0.5547206", "0.5533986", "0.5531299", "0.55280936", "0.5521363", "0.5520824", "0.5518877", "0.5518877", "0.55047584", "0.55046743", "0.5498306", "0.54949135", "0.54949135", "0.54949135", "0.54901534", "0.54713917", "0.54683036", "0.54679304", "0.54341364", "0.54314244", "0.5380299", "0.53792286", "0.5371022", "0.53611904", "0.5359265", "0.5306481", "0.5301773", "0.5299419", "0.529759", "0.52874476", "0.52839977", "0.5257769", "0.52501863", "0.5210238", "0.51908374", "0.519065", "0.5188354", "0.5187915", "0.51798946", "0.51792485", "0.51680106", "0.5166369", "0.5163844", "0.5163844", "0.5163844", "0.5163844", "0.51565623", "0.51471215", "0.51421475", "0.5137515", "0.5132798", "0.51148653", "0.51072973", "0.510646", "0.5106104", "0.5105508", "0.5099231", "0.50982565", "0.5093075", "0.5092075", "0.5088175", "0.5088105", "0.50851065", "0.5080111", "0.50765103", "0.50653017", "0.5059106", "0.5057651", "0.50454617", "0.5042113", "0.50337887", "0.50324386", "0.5026718", "0.5025913", "0.5024043", "0.50222164", "0.50216126", "0.50101614", "0.50042486", "0.4998174", "0.49977675", "0.49970672", "0.49770617", "0.49656639", "0.49582896" ]
0.5060501
81
Gets information about a RuntimeConfig resource.
def GetConfig(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def runtime_config(self) -> str:\n return self._node[\"app_data\"].get(\"runtime_config\")", "def get_resource_config(target=False, force=None):\n return get_stored_property(ctx, 'resource_config', target, force)", "def config(self):\n annotations = IAnnotations(self.context)\n return annotations.get(CONFIGURATION_KEY, {})", "def access_config(self) -> pulumi.Output['outputs.RuntimeAccessConfigResponse']:\n return pulumi.get(self, \"access_config\")", "def _GetCuttlefishRuntimeConfig(runtime_cf_config_path, raw_data=None):\n if raw_data:\n # if remote instance couldn't fetch the config will return message such as\n # 'cat: .../cuttlefish_config.json: No such file or directory'.\n # Add this condition to prevent from JSONDecodeError.\n try:\n return json.loads(raw_data)\n except ValueError as e:\n raise errors.ConfigError(\n \"An exception happened when loading the raw_data of the \"\n \"cvd runtime config:\\n%s\" % str(e))\n if not os.path.exists(runtime_cf_config_path):\n raise errors.ConfigError(\n \"file does not exist: %s\" % runtime_cf_config_path)\n with open(runtime_cf_config_path, \"r\") as cf_config:\n return json.load(cf_config)", "def get(self) -> dict:\n return Config.get()", "def getConfig(self):\n \n return self.config", "def get_config():\n app = NbConvertApp()\n app.load_config_file()\n return app.config", "def get_config(self):\n return self.config", "def get_config():\n return _config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def get_config(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVm_GetConfig', self.handle))", "def config(self):\n return self.namespace['config']", "def config(self):\n return self[CONFIG_KEY]", "def get_config():\n return CONFIG", "def get_config(self):\r\n if not os.path.exists(self.config_file):\r\n return None\r\n return json.loads(file(self.config_file).read())", "def get_config(self):\n if self.allow_reco():\n return self.chs_config()\n else:\n return self.get_config_j(self.id)", "def get_details(self):\n return self.__config_data", "def access_config(self) -> Optional[pulumi.Input['RuntimeAccessConfigArgs']]:\n return pulumi.get(self, \"access_config\")", "def resource(self, n):\n\n cfg = self.read()\n\n for res in cfg.get('Resources', []):\n res_name = res.get('Resource')\n\n if res_name == n:\n return ConfigResource(res)", "def get_config_on_json(self):\n # load section CONFIG from data\n try:\n return self.json_data[\"CONFIG\"]\n except:\n constant.get_error(constant.ERROR_004)", "def get_config():\n return _CONFIG", "def _get_config_dict():\r\n return CONFIGS", "def get_config(self) -> Dict[str, Any]:\n if self.config is None:\n self.config = self.load_config()\n\n return self.config", "def get_config(self) -> Dict[str, Any]:\n if self.config is None:\n self.config = self.load_config()\n\n return self.config", "def get_configuration(self) -> dict:\n return self.config", "def config(self) -> 'outputs.DeviceConfigResponse':\n return pulumi.get(self, \"config\")", "def get_config(self):\n return {\"name\": self.name, \"tunable\": self.tunable}", "def get(self):\n return util.getJSONFile(CONFIG_PATH)", "def get_config(self, request, object_id):\n host = get_object_or_404(Host, pk=object_id)\n return get_host_config_tar_response(host)", "def get_config(param):\n _config = loadConfig()\n try:\n click.echo(f\"{param}={rgetattr(_config, param)}\")\n except AttributeError as err:\n click.echo(err)", "def _cfg(self):\n if not hasattr(self, '__config'):\n self.__config = cuegui.Utils.getResourceConfig()\n return self.__config", "def configuration_info(self) -> Optional['outputs.ConfigurationInfoResponse']:\n return pulumi.get(self, \"configuration_info\")" ]
[ "0.7309697", "0.63536143", "0.61861", "0.61123216", "0.60506386", "0.6046548", "0.60022986", "0.59997743", "0.59895045", "0.5969095", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5955346", "0.5945794", "0.5944254", "0.594386", "0.5934438", "0.5910202", "0.59014225", "0.5853149", "0.5849204", "0.5825455", "0.5823457", "0.57991266", "0.57922626", "0.5786337", "0.5786337", "0.57849544", "0.5773545", "0.5767829", "0.575775", "0.5720712", "0.5689992", "0.5683522", "0.5682665" ]
0.0
-1
Creates a new RuntimeConfig resource. The configuration name must be unique within project.
def CreateConfig(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(self, config):\n config_name = config.get(\"LaunchConfigurationName\", self._name)\n assert config_name == self._name, \"Config name mismatch {} {}\".format(config_name, self._name)\n config[\"LaunchConfigurationName\"] = self._name\n self._client.create_launch_configuration(**config)", "def create_config(self) -> None:\n pass", "def create_config(self) -> None:\n pass", "def create(name, storage, backend, memory, timeout, config, debug):\n setup_lithops_logger(logging.DEBUG)\n\n verify_runtime_name(name)\n\n if config:\n config = load_yaml_config(config)\n\n config_ow = set_config_ow(backend, storage, runtime_name=name)\n config = default_config(config, config_ow)\n\n if config['lithops']['mode'] != SERVERLESS:\n raise Exception('\"lithops runtime create\" command is only valid for serverless backends')\n\n logger.info('Creating new lithops runtime: {}'.format(name))\n storage_config = extract_storage_config(config)\n internal_storage = InternalStorage(storage_config)\n\n compute_config = extract_serverless_config(config)\n compute_handler = ServerlessHandler(compute_config, internal_storage)\n mem = memory if memory else compute_config['runtime_memory']\n to = timeout if timeout else compute_config['runtime_timeout']\n runtime_key = compute_handler.get_runtime_key(name, mem)\n runtime_meta = compute_handler.create_runtime(name, mem, timeout=to)\n\n try:\n internal_storage.put_runtime_meta(runtime_key, runtime_meta)\n except Exception:\n raise (\"Unable to upload 'preinstalled-modules' file into {}\".format(internal_storage.backend))", "def GenerateConfig(context):\n\n resources = [{\n 'name': context.env['name'],\n 'type': 'compute.v1.instance',\n 'properties': {\n 'zone': context.properties['zone'],\n 'machineType': ''.join([COMPUTE_URL_BASE, 'projects/',\n context.env['project'], '/zones/',\n context.properties['zone'], '/machineTypes/',\n context.properties['machineType']]),\n 'disks': [{\n 'deviceName': 'boot',\n 'type': 'PERSISTENT',\n 'boot': True,\n 'autoDelete': True,\n 'initializeParams': {\n 'sourceImage': ''.join([COMPUTE_URL_BASE, 'projects/',\n 'ubuntu-os-cloud/global/',\n 'images/family/ubuntu-1604-lts'])\n }\n }],\n 'networkInterfaces': [{\n 'network': '$(ref.' + context.properties['network']\n + '.selfLink)',\n 'accessConfigs': [{\n 'name': 'External NAT',\n 'type': 'ONE_TO_ONE_NAT'\n }]\n }],\n 'metadata': {\n 'items': [{\n 'key': 'startup-script',\n 'value': ''.join(['#!/bin/bash\\n',\n 'sudo apt-get install openjdk-9-jre-headless -y\\n',\n 'sudo python -m SimpleHTTPServer 80'])\n }]\n }\n }\n }]\n return {'resources': resources}", "def createConfig():\n\twith open(configPath, 'w', encoding='utf-8') as file:\n\t\tjson.dump(default_config, file, indent=3)", "def create_configuration(EngineType=None, EngineVersion=None, Name=None, Tags=None):\n pass", "def new(self, new_type, name):\n valid_types = ['image', 'rpm']\n new_type = new_type.lower()\n if new_type not in valid_types:\n raise ValueError('Type must be one of {}'.format(','.join(valid_types)))\n\n new_type = new_type + 's'\n template = os.path.join(self.runtime.metadata_dir, 'example', new_type, 'template.yml')\n new_config = os.path.join(self.runtime.group_dir, new_type, '{}.yml'.format(name))\n\n if os.path.exists(new_config):\n raise ValueError('{} already exists!'.format(new_config))\n\n shutil.copyfile(template, new_config)\n\n config_log = self._load_config_log()\n config_log.setdefault('new', []).append(new_config)\n\n self._save_config_log(config_log)\n\n self.runtime.logger.info(\"New config template created: \\n{}\".format(new_config))", "def create_app(config_log=True, register=True):\n config = os.environ.get(ENV_CONFIG_MODULE)\n if not config:\n raise ValueError('no config found')\n return create_app_by_config(conf=config, config_log=config_log, register=register)", "def create_app(config_name):\n app = Flask(__name__, instance_relative_config=True)\n app.url_map.strict_slashes = False \n app.config.from_object(app_config[config_name])\n\n \"\"\"import the blueprint from the V1 folder __init__.py file and register the blueprint\"\"\"\n from app.api.V1 import v1 \n app.register_blueprint(v1) \n return app", "def create_app(config_name):\n app = FlaskAPI(__name__)\n app.config.from_object(app_config[config_name])\n app.url_map.strict_slashes = False\n\n return app", "def runtime_config(self) -> str:\n return self._node[\"app_data\"].get(\"runtime_config\")", "def create_runtime(self, runtime_name, memory, timeout):\n return self.compute_handler.create_runtime(runtime_name, memory, timeout=timeout)", "def NewConfig(self, *args, **kwargs):\n # type: (*Any, **Any) -> None\n payload = {}\n for i in range(len(args)):\n payload[\"Arg%s\" % (i + 1)] = args[i]\n for item in kwargs.items():\n payload[item[0]] = item[1]\n return self._execute(\"newConfig\", payload=payload, response_object=None)", "def create_endpoint_config(EndpointConfigName=None, ProductionVariants=None, Tags=None, KmsKeyId=None):\n pass", "def generate_config(context):\n\n resources = []\n properties = context.properties\n project_id = properties.get('project', context.env['project'])\n name = properties.get('name', context.env['name'])\n\n resource = {\n 'name': context.env['name'],\n # https://cloud.google.com/filestore/docs/reference/rest/v1beta1/projects.locations.instances/create\n 'type': 'gcp-types/file-v1beta1:projects.locations.instances',\n 'properties': {\n 'parent': 'projects/{}/locations/{}'.format(project_id, properties['location']),\n 'instanceId': name,\n }\n }\n\n optional_props = [\n 'description',\n 'tier',\n 'labels',\n 'fileShares',\n 'networks',\n ]\n\n for prop in optional_props:\n if prop in properties:\n resource['properties'][prop] = properties[prop]\n\n resources.append(resource)\n\n return {\n 'resources':\n resources,\n 'outputs':\n [\n {\n 'name': 'name',\n 'value': name\n },\n {\n 'name': 'fileShares',\n 'value': '$(ref.{}.fileShares)'.format(context.env['name'])\n },\n {\n 'name': 'networks',\n 'value': '$(ref.{}.networks)'.format(context.env['name'])\n }\n ]\n }", "def create():\n config_file = get_config_file()\n if os.path.isfile(config_file):\n logger.info(\"The config file already exists at '%s'\", config_file)\n return\n _create_config()\n logger.info(\"Created config file at '%s'\", config_file)", "def create_config(output_dir='my-hls-test', project_name='myproject', backend='Vivado', version='1.0.0', **kwargs):\n backend_list = hls4ml.backends.get_available_backends()\n if backend.lower() not in backend_list:\n raise Exception(f'Unknown backend: {backend}')\n\n backend = hls4ml.backends.get_backend(backend)\n\n backend_config = backend.create_initial_config(**kwargs)\n\n config = {}\n config['OutputDir'] = output_dir\n config['ProjectName'] = project_name\n config['Backend'] = backend.name\n config['Version'] = version\n config.update(backend_config)\n\n return config", "def create_app(config_name='development'):\n\tapp = Flask(__name__,instance_relative_config=True)\n\tapp.config.from_object(APP_CONFIG[config_name])\n\n\turl = app.config.get('DATABASE_URL')\n\turl = app.config.get('DATABASE_URL')\n\tCORS(app, resources={r\"/api/*\": {\"origins\": \"*\"}})\n\n\tapp.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER\n\n\tcreate_tables(url)\n\tpublic_id = str(uuid.uuid4())\n\tif config_name == 'testing':\n\t\tpublic_id = \"f3b8a1c3-f775-49e1-991c-5bfb963eb419\"\n\tcreate_super_user(url, public_id)\n\n\tapp.register_error_handler(404, url_not_found)\n\tapp.url_map.strict_slashes = False\n\n\n\tapp.register_blueprint(v1)\n\tapp.register_blueprint(v2)\n\treturn app", "def AddConfig(self, file_path, type_name):\n file_utils.CheckPath(file_path, 'source')\n res_name = resource.BuildConfigFileName(type_name, file_path)\n self._AddResource(file_path, res_name, False)\n return res_name", "def create_arg_config(environment, region, template, parameters):\r\n raw_config = {\r\n 'Environment': environment,\r\n 'Region': region\r\n }\r\n if template:\r\n raw_config['Template'] = template\r\n if parameters:\r\n raw_config['Parameters'] = dict(parameters)\r\n return Config(raw_config)", "def __load_config(runtime_env):\n config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"config.ini\")\n if not os.path.exists(config_file):\n raise FileNotFoundError(config_file)\n _app_config = configparser.ConfigParser()\n _app_config.read(config_file)\n\n # Evaluate\n _app_config = _app_config[runtime_env]\n return _app_config", "def create_app(config_name):\n\n app = Flask(__name__)\n app.config.from_object(config_by_name[config_name])\n CORS(app)\n mongo.init_app(app)\n app.register_blueprint(check_bp)\n\n return app", "def generate_config(context):\n\n\n properties = context.properties\n project_id = properties.get('project', context.env['project'])\n\n network = context.properties.get('networkURL', generate_network_uri(\n project_id,\n context.properties.get('network','')\n ))\n target_vpn_gateway = context.env['name'] + '-tvpng'\n esp_rule = context.env['name'] + '-esp-rule'\n udp_500_rule = context.env['name'] + '-udp-500-rule'\n udp_4500_rule = context.env['name'] + '-udp-4500-rule'\n vpn_tunnel = context.env['name'] + '-vpn'\n router_vpn_binding = context.env['name'] + '-router-vpn-binding'\n resources = []\n if 'ipAddress' in context.properties:\n ip_address = context.properties['ipAddress']\n static_ip = ''\n else:\n static_ip = context.env['name'] + '-ip'\n resources.append({\n # The reserved address resource.\n 'name': static_ip,\n # https://cloud.google.com/compute/docs/reference/rest/v1/addresses\n 'type': 'gcp-types/compute-v1:addresses',\n 'properties': {\n 'name': properties.get('name', static_ip),\n 'project': project_id,\n 'region': context.properties['region']\n }\n })\n ip_address = '$(ref.' + static_ip + '.address)'\n\n resources.extend([\n {\n # The target VPN gateway resource.\n 'name': target_vpn_gateway,\n # https://cloud.google.com/compute/docs/reference/rest/v1/targetVpnGateways\n 'type': 'gcp-types/compute-v1:targetVpnGateways',\n 'properties':\n {\n 'name': properties.get('name', target_vpn_gateway),\n 'project': project_id,\n 'network': network,\n 'region': context.properties['region'],\n }\n },\n {\n # The forwarding rule resource for the ESP traffic.\n 'name': esp_rule,\n # https://cloud.google.com/compute/docs/reference/rest/v1/forwardingRules\n 'type': 'gcp-types/compute-v1:forwardingRules',\n 'properties':\n {\n 'name': '{}-esp'.format(properties.get('name')) if 'name' in properties else esp_rule,\n 'project': project_id,\n 'IPAddress': ip_address,\n 'IPProtocol': 'ESP',\n 'region': context.properties['region'],\n 'target': '$(ref.' + target_vpn_gateway + '.selfLink)',\n }\n },\n {\n # The forwarding rule resource for the UDP traffic on port 4500.\n 'name': udp_4500_rule,\n # https://cloud.google.com/compute/docs/reference/rest/v1/forwardingRules\n 'type': 'gcp-types/compute-v1:forwardingRules',\n 'properties':\n {\n 'name': '{}-udp-4500'.format(properties.get('name')) if 'name' in properties else udp_4500_rule,\n 'project': project_id,\n 'IPAddress': ip_address,\n 'IPProtocol': 'UDP',\n 'portRange': 4500,\n 'region': context.properties['region'],\n 'target': '$(ref.' + target_vpn_gateway + '.selfLink)',\n }\n },\n {\n # The forwarding rule resource for the UDP traffic on port 500\n 'name': udp_500_rule,\n # https://cloud.google.com/compute/docs/reference/rest/v1/forwardingRules\n 'type': 'gcp-types/compute-v1:forwardingRules',\n 'properties':\n {\n 'name': '{}-udp-500'.format(properties.get('name')) if 'name' in properties else udp_500_rule,\n 'project': project_id,\n 'IPAddress': ip_address,\n 'IPProtocol': 'UDP',\n 'portRange': 500,\n 'region': context.properties['region'],\n 'target': '$(ref.' + target_vpn_gateway + '.selfLink)',\n }\n },\n\n ])\n router_url_tag = 'routerURL'\n router_name_tag = 'router'\n\n if router_name_tag in context.properties:\n router_url = context.properties.get(router_url_tag, generate_router_uri(\n context.env['project'],\n context.properties['region'],\n context.properties[router_name_tag]))\n # Create dynamic routing VPN\n resources.extend([\n {\n # The VPN tunnel resource.\n 'name': vpn_tunnel,\n # https://cloud.google.com/compute/docs/reference/rest/v1/vpnTunnels\n 'type': 'gcp-types/compute-v1:vpnTunnels',\n 'properties':\n {\n 'name': properties.get('name', vpn_tunnel),\n 'project': project_id,\n 'description':\n 'A vpn tunnel',\n 'ikeVersion':\n 2,\n 'peerIp':\n context.properties['peerAddress'],\n 'region':\n context.properties['region'],\n 'router': router_url,\n 'sharedSecret':\n context.properties['sharedSecret'],\n 'targetVpnGateway':\n '$(ref.' + target_vpn_gateway + '.selfLink)'\n },\n 'metadata': {\n 'dependsOn': [esp_rule,\n udp_500_rule,\n udp_4500_rule]\n }\n }])\n else:\n # Create static routing VPN\n resources.append(\n {\n # The VPN tunnel resource.\n 'name': vpn_tunnel,\n 'type': 'gcp-types/compute-v1:vpnTunnels',\n 'properties': {\n 'name': vpn_tunnel,\n 'description':\n 'A vpn tunnel',\n 'ikeVersion':\n 2,\n 'peerIp':\n context.properties['peerAddress'],\n 'region':\n context.properties['region'],\n 'sharedSecret':\n context.properties['sharedSecret'],\n 'targetVpnGateway':\n '$(ref.' + target_vpn_gateway + '.selfLink)',\n 'localTrafficSelector':\n context.properties['localTrafficSelector'],\n 'remoteTrafficSelector':\n context.properties['remoteTrafficSelector'],\n\n },\n 'metadata': {\n 'dependsOn': [esp_rule, udp_500_rule, udp_4500_rule]\n }\n },\n )\n\n return {\n 'resources':\n resources,\n 'outputs':\n [\n {\n 'name': 'targetVpnGateway',\n 'value': target_vpn_gateway\n },\n {\n 'name': 'staticIp',\n 'value': static_ip\n },\n {\n 'name': 'espRule',\n 'value': esp_rule\n },\n {\n 'name': 'udp500Rule',\n 'value': udp_500_rule\n },\n {\n 'name': 'udp4500Rule',\n 'value': udp_4500_rule\n },\n {\n 'name': 'vpnTunnel',\n 'value': vpn_tunnel\n },\n {\n 'name': 'vpnTunnelUri',\n 'value': '$(ref.'+vpn_tunnel+'.selfLink)'\n }\n ]\n }", "def create_app(config_filename): \n app = Flask(__name__)\n app.config.from_object(config_filename)\n \n from app import api_bp\n app.register_blueprint(api_bp, url_prefix='/api')\n\n from Model import db\n with app.app_context():\n db.init_app(app)\n db.create_all([None])\n return app", "def create_config_file(name):\n config = {}\n config['name'] = name\n to_dir = os.getcwd() + '/' + name\n with open(os.path.join(to_dir, 'configuration.json'), 'w') as config_file:\n json.dump(config, config_file)", "def update(name, config, backend, storage, debug):\n setup_lithops_logger(logging.DEBUG)\n\n verify_runtime_name(name)\n\n if config:\n config = load_yaml_config(config)\n\n config_ow = set_config_ow(backend, storage, runtime_name=name)\n config = default_config(config, config_ow)\n\n if config['lithops']['mode'] != SERVERLESS:\n raise Exception('\"lithops runtime update\" command is only valid for serverless backends')\n\n storage_config = extract_storage_config(config)\n internal_storage = InternalStorage(storage_config)\n compute_config = extract_serverless_config(config)\n compute_handler = ServerlessHandler(compute_config, internal_storage)\n\n timeout = compute_config['runtime_memory']\n logger.info('Updating runtime: {}'.format(name))\n\n runtimes = compute_handler.list_runtimes(name)\n\n for runtime in runtimes:\n runtime_key = compute_handler.get_runtime_key(runtime[0], runtime[1])\n runtime_meta = compute_handler.create_runtime(runtime[0], runtime[1], timeout)\n\n try:\n internal_storage.put_runtime_meta(runtime_key, runtime_meta)\n except Exception:\n raise (\"Unable to upload 'preinstalled-modules' file into {}\".format(internal_storage.backend))", "def create_resource(\n service_name: str, config_name: str = None, **resource_args\n):\n session = get_session(config_name)\n return session.resource(service_name, **resource_args)", "def createConfiguration(self, input):\n resolvedInputName = envString.resolve(input)\n if self.opts.verbose:\n print(\"creating configuration using \", resolvedInputName)\n template = TemplateWriter()\n substitutes = self.defaults.copy()\n for key in self.commandLineDefaults:\n val = self.commandLineDefaults[key]\n if val is not None:\n substitutes[key] = self.commandLineDefaults[key]\n\n substitutes[\"CTRL_EXECUTE_SETUP_PACKAGES\"] = self.getSetupPackages()\n\n configDir = os.path.join(substitutes[\"LOCAL_SCRATCH\"], \"configs\")\n if not os.path.exists(configDir):\n os.mkdir(configDir)\n self.outputFileName = os.path.join(configDir, \"%s.config\" % (self.runid))\n if self.opts.verbose:\n print(\"writing new configuration to \", self.outputFileName)\n template.rewrite(resolvedInputName, self.outputFileName, substitutes)\n return self.outputFileName", "def create_app(config_name):\n\n app = Flask(__name__)\n app.config.from_object(config[config_name])\n\n bootstrap.init_app(app)\n\n from .main import main\n app.register_blueprint(main)\n\n return app", "def make_config(self, config_name=None, optional=False, **CONFIG_VARS):\n return self.make_config_params(config_name, optional, **CONFIG_VARS)[0]", "def copy(self, new_name, new_config, retain_spot_price=False, delete_old=False):\n config = self.get()\n config.update(new_config)\n config[\"LaunchConfigurationName\"] = new_name\n\n # The following fields are not allowed as launch config input.\n config.pop(\"LaunchConfigurationARN\")\n config.pop(\"CreatedTime\")\n config.pop(\"KernelId\")\n config.pop(\"RamdiskId\")\n if not retain_spot_price and config.get(\"SpotPrice\", None) != None:\n logger.info(\"Not retaining spot price!\")\n config.pop(\"SpotPrice\")\n\n new_lc = LaunchConfig(new_name, aws_profile=self._aws_profile)\n new_lc.create(config)\n if delete_old:\n self.delete()\n return new_lc", "def create_app(config_name):\n app = Flask(__name__)\n # create app instance\n app.config.from_object(config_by_name[config_name])\n flask_bcrypt.init_app(app)\n\n CORS(app)\n\n routes.init_routes(app)\n\n return app", "def create(ctx, iface, resource_config, params, **_):\n\n lb_name = params.get(LB_NAME)\n if not lb_name:\n targs = \\\n utils.find_rels_by_node_type(\n ctx.instance,\n LB_TYPE)\n lb_name = \\\n targs[0].target.instance.runtime_properties[\n EXTERNAL_RESOURCE_ID]\n params.update({LB_NAME: lb_name})\n\n ctx.instance.runtime_properties[LB_NAME] = \\\n lb_name\n\n # Actually create the resource\n iface.create(params)", "def get_config():\n name = 'dynamic_pricing'\n num_products = 5\n scale = 1\n noise_var = 10\n p_max = 1\n\n agents = collections.OrderedDict(\n [('bsPricing',\n functools.partial(BootstrapDynamicPricing,\n num_products, scale, noise_var, p_max))]\n )\n\n environments = collections.OrderedDict(\n [('env',\n functools.partial(DynamicPricing,\n num_products, scale, noise_var, p_max))]\n )\n experiments = collections.OrderedDict(\n [(name, ExperimentNoAction)]\n )\n n_steps = 80\n n_seeds = 2000\n config = Config(name, agents, environments, experiments, n_steps, n_seeds)\n return config", "def create(self, config):\n\n assert config[\"name\"] == self.name, \"Given config is not for this template\"\n\n data = self._json_encode(config)\n headers = self._default_headers()\n\n return self._request(\"\",\n ok_status=None,\n data=data,\n headers=headers)", "def generate_config(context):\n\n enable_flow_logs = context.properties.get('enableFlowLogs', False)\n\n subnetwork_resource = {\n 'name': context.properties['resourceName'],\n 'type': 'gcp-types/compute-beta:subnetworks',\n 'properties': {\n # Required properties.\n 'name':\n context.properties['name'],\n 'network':\n context.properties['network'],\n 'ipCidrRange':\n context.properties['ipCidrRange'],\n 'region':\n context.properties['region'],\n 'project':\n context.properties['projectId'],\n\n # Optional properties, with defaults.\n 'enableFlowLogs':\n enable_flow_logs,\n 'privateIpGoogleAccess':\n context.properties.get('privateIpGoogleAccess', False),\n 'secondaryIpRanges':\n context.properties.get('secondaryIpRanges', []),\n }\n }\n \n if enable_flow_logs:\n # If flow logs are enabled, we want to adjust the default config in two ways:\n # (1) Increase the sampling ratio (defaults to 0.5) so we sample all traffic.\n # (2) Reduce the aggregation interval to 30 seconds (default is 5secs) to save on\n # storage.\n subnetwork_resource['properties']['logConfig'] = {\n 'aggregationInterval': 'INTERVAL_30_SEC',\n 'enable': True,\n 'flowSampling': 1.0,\n 'metadata': 'INCLUDE_ALL_METADATA',\n }\n\n # Pass the 'dependsOn' property to the subnetwork resource if present.\n if 'dependsOn' in context.properties:\n subnetwork_resource['metadata'] = {\n 'dependsOn': context.properties['dependsOn']\n }\n\n output = [\n {\n 'name': 'name',\n 'value': subnetwork_resource['name'],\n },\n {\n 'name': 'selfLink',\n 'value': '$(ref.{}.selfLink)'.format(subnetwork_resource['name']),\n },\n ]\n\n return {'resources': [subnetwork_resource], 'outputs': output}", "def create_app(config_name=\"development\"):\n # return app with config file on config folder\n app = Flask(__name__)\n\n # get default settings for app\n app.config.from_object(\"app_name.settings\")\n\n # load according config object\n app.config.from_object(app_config.config[config_name])\n\n # run classmethod to init app with Flask-DotEnv\n app_config.config[config_name].init_app(app)\n\n # register blueprints\n app.register_blueprint(api_mod, url_prefix=\"/api\")\n app.register_blueprint(mock_module, url_prefix=\"/api\")\n app.register_blueprint(support_ticket_module, url_prefix=\"/api\")\n \n # enable cors\n CORS(app)\n\n with app.app_context():\n # if config_name != \"testing\":\n # init db instance\n db.init_app(app)\n\n # migrate for Flask-Migrate\n migrate.init_app(app, db)\n\n return app", "def dbt_config(self):\n from dbt.config.runtime import RuntimeConfig as DbtRuntimeConfig\n from dbt.adapters.factory import register_adapter\n\n self.dbt_config = DbtRuntimeConfig.from_args(\n DbtConfigArgs(\n project_dir=self._get_project_dir(),\n profiles_dir=self._get_profiles_dir(),\n profile=self._get_profile(),\n )\n )\n register_adapter(self.dbt_config)\n return self.dbt_config", "def generate_config(context):\n resources = []\n\n # Create an initial 'STARTED' pubsub notification.\n if 'pubsubTopic' in context.properties:\n resources.extend(\n create_pubsub_notification(\n context,\n depends_on=[],\n status_string='STARTED',\n ))\n\n # Required properties.\n billing_account_id = context.properties['billingAccountId']\n parent_organization = context.properties['parentOrganization']\n project_id = context.properties['projectId']\n\n # Optional properties, with defaults.\n high_security_network = context.properties.get('highSecurityNetwork', False)\n private_ip_google_access = context.properties.get('privateIpGoogleAccess', False)\n storage_bucket_lifecycle = context.properties.get('storageBucketLifecycle', 180)\n billing_account_friendly_name = context.properties.get('billingAccountFriendlyName', billing_account_id)\n # Use a project name if given, otherwise it's safe to fallback to use the\n # project ID as the name.\n project_name = context.properties.get('projectName', project_id)\n labels_obj = context.properties.get('labels', {})\n\n # Save this template's version number and all parameters inputs to the project metadata to keep track of what\n # operations were performed on a project.\n labels_obj.update({\n \"firecloud-project-template-version\" : str(FIRECLOUD_PROJECT_TEMPLATE_VERSION_ID)\n })\n\n for k, v in context.properties.items():\n label_k, label_v = satisfy_label_requirements('param--' + str(k), v)\n labels_obj.update({\n label_k: label_v\n })\n\n\n if high_security_network:\n labels_obj.update({\n \"vpc-network-name\" : FIRECLOUD_VPC_NETWORK_NAME,\n \"vpc-subnetwork-name\" : FIRECLOUD_VPC_SUBNETWORK_NAME\n })\n\n if 'parentFolder' in context.properties:\n parent_obj = {\n 'id': context.properties['parentFolder'],\n 'type': 'folder',\n }\n else:\n parent_obj = {\n 'id': context.properties['parentOrganization'],\n 'type': 'organization',\n }\n\n # Create the main project resource.\n resources.append({\n 'type': 'templates/project.py',\n 'name': 'fc-project',\n 'properties': {\n 'activateApis': FIRECLOUD_REQUIRED_APIS,\n 'billingAccountId': billing_account_id,\n 'billingAccountFriendlyName': billing_account_friendly_name,\n 'iamPolicies': create_iam_policies(context),\n 'labels': labels_obj,\n 'name': project_name,\n # The project parent. For FireCloud, this should refer to the\n # firecloud.org (or equivalent) GCP organization ID.\n 'parent': parent_obj,\n 'projectId': project_id,\n # If true, this would remove the default compute egine service\n # account. FireCloud doesn't use this SA, but we're leaving this set\n # to False to avoid changing any legacy behavior, at least initially.\n 'removeDefaultSA': False,\n # Removes the default VPC network for projects requiring stringent\n # network security configurations.\n 'removeDefaultVPC': high_security_network,\n 'createUsageExportBucket': False,\n # Always set up the storage logs and cromwell auth buckets for Firecloud\n 'storageLogsBucket': True,\n 'storageBucketLifecycle': storage_bucket_lifecycle,\n 'cromwellAuthBucket': True\n }\n })\n\n if high_security_network:\n resources.extend(create_high_security_network(context))\n resources.extend(create_firewall(context))\n if private_ip_google_access:\n resources.extend(create_private_google_access_dns_zone(context))\n else:\n resources.extend(create_default_network(context))\n\n if 'pubsubTopic' in context.properties:\n resources.extend(\n create_pubsub_notification(\n context,\n # This is somewhat hacky, but we can't simply collect the name of each\n # collected resource since template call nodes aren't \"real\" resources\n # that can be part of a dependsOn stanza. So instead, we collect the\n # names of all resources that are output by the network (which itself\n # depends on the project). It doesn't seem to be possible to concatenate\n # dependsOn arrays within the reference syntax, otherwise we could make\n # this depend explicitly on all resources from the template nodes.\n depends_on='$(ref.fc-network.resourceNames)',\n status_string='COMPLETED'))\n\n return {'resources': resources}", "def create_config_job(self,\n resource_uri,\n cim_creation_class_name,\n cim_name,\n target,\n cim_system_creation_class_name='DCIM_ComputerSystem',\n cim_system_name='DCIM:ComputerSystem',\n reboot=False,\n start_time='TIME_NOW'):\n return self._job_mgmt.create_config_job(\n resource_uri,\n cim_creation_class_name,\n cim_name,\n target,\n cim_system_creation_class_name,\n cim_system_name,\n reboot,\n start_time)", "def GenerateConfig(context):\n\n resources = [{\n 'name': 'my-build',\n 'action': 'gcp-types/cloudbuild-v1:cloudbuild.projects.builds.create',\n 'metadata': {\n 'runtimePolicy': ['UPDATE_ALWAYS']\n },\n 'properties': {\n 'steps': [\n {\n 'name': 'gcr.io/cloud-builders/gcloud',\n 'args': ['deployment-manager',\n context.properties['resourceToList'],\n 'list']\n }\n ],\n 'timeout': '120s'\n }\n }]\n return { 'resources': resources }", "def modified_dynamic_resnet(config=None):\n return DynamicResNet(config)", "def create_app(config_path: str):\n\n if not os.path.exists(config_path):\n raise OSError(f\"Configuration file {config_path} does not exist\")\n\n # create flask app\n app = Flask(__name__)\n\n # add app configration \n app.config.from_pyfile(config_path)\n\n # initialize database \n db.init_app(app)\n logger.info(f\"Initializing app with database from {app.config['SQLALCHEMY_DATABASE_URI']}\")\n\n # initialize api enpoints\n from deekoo_auth.endpoints import api_endpoints\n app.register_blueprint(api_endpoints)\n\n return app", "def create_app_by_config(conf=None, config_log=True, register=True):\n # check instance path\n instance_path = os.environ.get(ENV_INSTANCE_PATH) or None\n # create app\n app = Flask(__name__, instance_path=instance_path)\n # ensure the instance folder exists\n if app.instance_path:\n try:\n os.makedirs(app.instance_path, exist_ok=True)\n except OSError:\n pass\n # configure app\n if conf:\n app.config.from_object(conf)\n # config logger\n if config_log:\n config_logger(app)\n # register blueprints\n if register:\n register_blueprints(app)\n return app", "def create_app(name, path, settings_override=None,\n register_security_blueprint=True):\n\n app = Flask(name, instance_relative_config=True)\n app.config.from_object(\"linkedlist.config\") # public config\n app.config.from_pyfile(\"config.py\", silent=True) # instance config\n app.config.from_object(settings_override) # argument override\n\n # patch in envvar config\n environ_config_override = find_environ_config_vars()\n for key, value in environ_config_override.iteritems():\n app.config[key] = value\n\n db.init_app(app)\n security.init_app(app, SQLAlchemyUserDatastore(db, User, Role),\n register_blueprint=register_security_blueprint)\n register_blueprints(app, name, path)\n\n # create database tables\n with app.app_context():\n db.create_all()\n\n return app", "def __init__(self, config_name: str, is_top_level_config: bool = False):\n self.config_name = config_name\n self.config_variable_name = to_snake_case(config_name) + \"_config\"\n self.is_top_level_config = is_top_level_config\n self.parameters: List[CppParameter] = list()\n self.configs: List[CppConfig] = list()", "def create_app(config_name):\n app = Flask(__name__)\n app.register_blueprint(v1, url_prefix=\"/api/v1/\")\n return app", "def __new__(cls, *args, **kwargs):\n if not cls._instance:\n cls._instance = super(Config, cls).__new__(cls)\n return cls._instance", "def create(\n name: str = typer.Argument(..., help=\"The name to give to the remote.\"),\n url: Optional[str] = typer.Option(None, '-u', help=\"URL of the remote. If not provided it will be requested.\"),\n secret: Optional[str] = typer.Option(\n None, '--secret', '-s', help=\"The skill secret. If not provided it will be requested.\"\n ),\n public_key_path: Optional[Path] = typer.Option(\n None, '-k', '--key', help=\"The path to the public key. If not provided it will be requested.\"\n ),\n):\n app_dir = Path(typer.get_app_dir('skills-cli', force_posix=True))\n config_file = app_dir / 'config.json'\n\n if not app_dir.exists():\n app_dir.mkdir(parents=True)\n\n if config_file.exists():\n config = json.loads(config_file.read_text(encoding='utf-8'))\n else:\n typer.secho(f'Config file {config_file} not found, creating...')\n config_file.touch()\n config = {}\n\n remotes = config.get('remotes', {})\n existing_config = remotes.get(name, {})\n if existing_config:\n typer.confirm(\n f'A configuration with the name \"{name}\" already exists, would you like to overwrite it?', abort=True\n )\n\n if not secret:\n secret = prompt_for_secret()\n\n if not public_key_path:\n public_key_path = prompt_for_key()\n\n if not url:\n url = typer.prompt('URL to invoke the skill', default='http://localhost:8080/parse')\n\n remotes[name] = {'name': name, 'url': url, 'secret': secret, 'public_key_path': str(public_key_path.absolute())}\n config['remotes'] = remotes\n config_file.write_text(json.dumps(config, indent=2), encoding='utf-8')", "def create_app(config_name):\n app = Flask(__name__)\n app.config.from_object(config[config_name])\n config[config_name].init_app(app)\n register_extensions(app)\n\n from main import main as main_blueprint\n\n app.register_blueprint(main_blueprint, url_prefix='/')\n\n from preview import preview as preview_blueprint\n\n app.register_blueprint(preview_blueprint, url_prefix='/preview')\n\n return app", "def create_deployment_config(self, body, **kwargs):\n\n all_params = ['body', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method create_deployment_config\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `create_deployment_config`\")\n\n resource_path = '/oapi/v1/deploymentconfigs'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1DeploymentConfig',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def load(self, name):\n resolvedName = envString.resolve(name)\n configuration = CondorConfig()\n configuration.load(resolvedName)\n self.defaults = {}\n\n if configuration.platform.nodeSetRequired and self.opts.nodeSet is None:\n print(\"error: nodeset parameter required by this platform\")\n sys.exit(10)\n\n tempDefaultRoot = Template(configuration.platform.defaultRoot)\n self.defaults[\"DEFAULT_ROOT\"] = tempDefaultRoot.substitute(\n USER_NAME=self.commandLineDefaults[\"USER_NAME\"])\n\n tempLocalScratch = Template(configuration.platform.localScratch)\n self.defaults[\"LOCAL_SCRATCH\"] = \\\n tempLocalScratch.substitute(USER_NAME=self.commandLineDefaults[\"USER_NAME\"])\n self.defaults[\"IDS_PER_JOB\"] = configuration.platform.idsPerJob\n self.defaults[\"DATA_DIRECTORY\"] = envString.resolve(configuration.platform.dataDirectory)\n self.defaults[\"FILE_SYSTEM_DOMAIN\"] = configuration.platform.fileSystemDomain\n self.defaults[\"EUPS_PATH\"] = configuration.platform.eupsPath\n self.defaults[\"MANAGER_SOFTWARE_HOME\"] = configuration.platform.manager_software_home\n\n platform_dir = lsst.utils.getPackageDir(\"ctrl_platform_\"+self.opts.platform)\n self.defaults[\"PLATFORM_DIR\"] = platform_dir\n self.manager = configuration.platform.manager\n self.setup_using = configuration.platform.setup_using", "def create_app(config=DevConfig, **kwargs):\n app = Flask(__name__, **kwargs)\n app.config.from_object(config)\n\n # flask-restplus seem to use standard json lib and not the flask one\n # so we patch it here so it can handle UUIDs\n JSONEncoder.default = JSONEncoder_newdefault\n\n extensions.init_app(app)\n modules.init_app(app)\n\n return app", "def create_app(config_filename):\n app = Flask(__name__)\n app.config.from_object(app_config[config_filename])\n \n from app import api_bp\n app.register_blueprint(api_bp, url_prefix='/api')\n\n db.init_app(app)\n migrate = Migrate(app, db, compare_type=True)\n\n from models import department, employee\n\n return app", "def application_create_from_dict(self, parameters: dict):\n # check is exists\n if self.is_app_exists(parameters[KnownParameters.SITE_NAME.value], parameters[KnownParameters.APP_NAME.value]):\n return\n\n site_name = parameters[KnownParameters.SITE_NAME.value]\n app_virtual_path = parameters[KnownParameters.APP_NAME.value]\n physical_path = parameters.get(KnownParameters.PHYSICAL_PATH.value)\n if not physical_path:\n app_folder = app_virtual_path.replace('/', '\\\\')\n app_folder = remove_starting_backward_slash(app_folder)\n physical_path = self.create_physical_path_for_virtual_path(os.path.join(site_name, app_folder))\n\n # use default app pool by default\n pool = parameters.get(KnownParameters.POOL_NAME.value, None)\n self.app_create(site_name, app_virtual_path, physical_path, pool)", "def create_app(config):\n\n # Initialize app. Flatten config_obj to dictionary (resolve properties).\n app = Flask(__name__)\n config_dict = dict(\n [(k, getattr(config, k)) for k in dir(config) if\n not k.startswith('_')])\n\n app.config.update(config_dict)\n\n for bp in all_blueprints:\n import_module(bp.import_name)\n app.register_blueprint(bp)\n\n pipeline.set_enforce_auth(False)\n\n # Return the application instance.\n return app", "def create_app(config_name):\n app = Flask(__name__)\n app.config.from_object(app_config[config_name])\n # versions of api\n from app.api.v2 import version2 as v2\n\n app.register_blueprint(v2)\n\n # registered JWT manager\n app.config['JWT_SECRET_KEY'] = 'owezzy'\n jwt = JWTManager(app)\n\n create_tables()\n\n return app", "def __init__(self, config_path: str = \"config.json\"):\n # Change here if you want to relocate you config file\n self.config = {}\n self.load_configuration(config_path)\n self.app_name = self.config.get('app_name', self.APP_NAME)", "def create_app(config_filename=None, config_object=None):\n app = Flask(__name__)\n\n app.config.from_object('psephology.config.default')\n if config_filename is not None:\n app.config.from_pyfile(config_filename)\n if config_object is not None:\n app.config.from_object(config_object)\n\n db.init_app(app)\n migrate.init_app(app, db, render_as_batch=True)\n\n app.register_blueprint(ui)\n app.register_blueprint(api, url_prefix='/api')\n app.cli.add_command(cli)\n\n # Things which should only be present in DEBUG-enabled apps\n app.debug = app.config.get('DEBUG', False)\n if app.debug:\n from flask_debugtoolbar import DebugToolbarExtension\n toolbar = DebugToolbarExtension()\n toolbar.init_app(app)\n\n return app", "def create_app(self):\n\n app = create_app()\n app.config.from_object('project.config.TestingConfig')\n return app", "def create_app(configobj=ProdConfig):\n\n app = Flask(__name__)\n app.config.from_object(configobj)\n configure_blueprints(app)\n configure_extensions(app)\n configure_callbacks(app)\n configure_filters(app)\n configure_error_handlers(app)\n return app", "def new_architecture_config(self, name, config, description=''):\n return self._new_ref_config(\n ConfigRef.ARCHITECTURE, name, config, description)", "def set_config(config_name, host, port, core=''):\n global CONFIGS\n CONFIGS[config_name] = {'host': host, 'port': port, 'core': core}", "def create_hltsv_app(config_db, hltsv_host):\n config_rules = config_db.getObject(\"ConfigurationRuleBundle\",\n \"DefaultConfigurationRuleBundle\")\n hltsv_dal = dal_module(\"hltsv_dal\", 'daq/schema/hltsv.schema.xml')\n \n roib_plugin = hltsv_dal.RoIBPluginInternal(\"plugin_internal\")\n roib_plugin.Libraries.append(\"libsvl1internal\")\n\n hltsv_main = config_db.getObject(\"Binary\", \"hltsv_main\")\n \n hltsv_app = hltsv_dal.HLTSVApplication(\"HLTSV\")\n hltsv_app.ConfigurationRules = config_rules\n hltsv_app.RoIBInput = roib_plugin\n hltsv_app.Program = hltsv_main\n hltsv_app.RunsOn = hltsv_host\n \n return hltsv_app", "def _create_config_file(self, node_name, node_folder):\n node_config = dict(self.BASE_CONFIG)\n node_config[\"name\"] = node_name\n\n config_json = os.path.join(node_folder, self.CONFIG_FILE)\n with open(config_json, \"w\") as outfile:\n json.dump(node_config, outfile)", "def pre_global_system_config_create(self, resource_dict):\n pass", "def create(self, name=None, dynurl_config_data=None):\n data = DataObject()\n data.add_value_string(\"name\", name)\n data.add_value_string(\"dynurl_config_data\", dynurl_config_data)\n\n response = self.client.post_json(URL_MAPPING, data.data)\n response.success = response.status_code == 200\n return response", "def test_create_namespaced_deployment_config(self):\n pass", "def _init_config(self, configPath=None):\n # TODO: The SafeConfigParser class has been renamed to ConfigParser in Python 3.2.\n # This alias will be removed in future versions.\n # We still use SafeConfigParser for backwards compatibility with Python 2.\n self.config = SafeConfigParser()\n # Make option names case sensitive\n self.config.optionxform = str\n\n if configPath and os.path.isdir(configPath):\n configDir = configPath\n else:\n configDir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'conf')\n\n # List filenames in configDir alphabetically\n _, _, configFiles = next(os.walk(configDir))\n configFiles = sorted(configFiles, key=str.lower)\n\n # Read configuration pipeline\n for f in configFiles:\n with open(os.path.join(configDir, f)) as configFile:\n self.config.readfp(configFile)\n self._store_config_pass()\n\n if configPath and os.path.isfile(configPath):\n self.config.read(configPath)\n self._store_config_pass()\n\n appSection = 'application'\n self.appName = self._get_option_value(appSection, 'appName')\n self.appResource = self._get_option_value(appSection, 'appResource')\n self.appArgs = []\n appArgs = self._get_option_value(appSection, 'appArgs')\n if appArgs:\n self.appArgs = appArgs.split(' ')\n self.mainClass = self._get_option_value(appSection, 'mainClass')", "def create_app(**config_overrides):\n # we want to modify the global app, not a local copy\n global app\n global eventum\n\n app = Flask(__name__)\n\n # Load config then apply overrides\n app.config.from_object('config.flask_config')\n app.config.update(config_overrides)\n\n # Initialize assets\n assets = Environment(app)\n register_scss(assets)\n\n # Eventum\n eventum = Eventum(app)\n\n # Blueprints\n register_blueprints()\n\n return app", "def _create_job_config(\n self,\n experiment_id: str,\n params: Optional[dict],\n pipeline_package_path: Optional[str],\n pipeline_id: Optional[str],\n version_id: Optional[str],\n enable_caching: Optional[bool],\n ):\n\n class JobConfig:\n\n def __init__(self, spec, resource_references):\n self.spec = spec\n self.resource_references = resource_references\n\n params = params or {}\n pipeline_json_string = None\n if pipeline_package_path:\n pipeline_obj = self._extract_pipeline_yaml(pipeline_package_path)\n\n # Caching option set at submission time overrides the compile time settings.\n if enable_caching is not None:\n self._override_caching_options(pipeline_obj, enable_caching)\n\n pipeline_json_string = json.dumps(pipeline_obj)\n api_params = [\n kfp_server_api.V1Parameter(\n name=sanitize_k8s_name(name=k, allow_capital_underscore=True),\n value=str(v) if type(v) not in (list, dict) else json.dumps(v))\n for k, v in params.items()\n ]\n resource_references = []\n key = kfp_server_api.models.V1ResourceKey(\n id=experiment_id,\n type=kfp_server_api.models.V1ResourceType.EXPERIMENT)\n reference = kfp_server_api.models.V1ResourceReference(\n key=key, relationship=kfp_server_api.models.V1Relationship.OWNER)\n resource_references.append(reference)\n\n if version_id:\n key = kfp_server_api.models.V1ResourceKey(\n id=version_id,\n type=kfp_server_api.models.V1ResourceType.PIPELINE_VERSION)\n reference = kfp_server_api.models.V1ResourceReference(\n key=key,\n relationship=kfp_server_api.models.V1Relationship.CREATOR)\n resource_references.append(reference)\n\n spec = kfp_server_api.models.V1PipelineSpec(\n pipeline_id=pipeline_id,\n workflow_manifest=pipeline_json_string,\n parameters=api_params)\n return JobConfig(spec=spec, resource_references=resource_references)", "def create_config(venv_directory, project_directory, port, host, db_name, db_user, db_password):\n print(\"Erzeuge Konfigurationsdatei 'config.yaml'...\", end=\"\", flush=True)\n secret = _run(\n f'{venv_directory}/bin/python3 -c '\n '\"from django.core.management import utils; print(utils.get_random_secret_key())\"',\n capture_output=True, raise_on_error=False\n )\n with open(f\"{project_directory}/config.yaml\", 'w') as f:\n f.write(\n app_config.format(\n secret=secret.stdout.decode().strip(),\n host=host,\n port=port,\n db_name=db_name,\n db_user=db_user,\n db_password=db_password\n )\n )\n if secret.returncode:\n print(\"\\n Hinweis Konfiguration unvollständig: konnte keinen SECRET_KEY erzeugen.\")\n return False\n print(\"OK.\")\n return True", "def build_runtime(self, runtime_name, file):\n self.compute_handler.build_runtime(runtime_name, file)", "def GenerateConfig(context):\r\n \r\n module = \"frontend\"\r\n cc = config_merger.ConfigContext(context.properties, module)\r\n \r\n return {\r\n 'resources': [{\r\n 'name': 'simple_frontend',\r\n 'type': 'simple_frontend.py',\r\n 'properties': context.properties\r\n }], \r\n 'outputs': [{\r\n 'name': 'env_name',\r\n 'value': context.properties[\"envName\"]\r\n },{\r\n 'name': 'context',\r\n 'value': cc.configs['CONTEXT']\r\n },{\r\n 'name': 'HQ_Address',\r\n 'value': cc.configs['HQ_Address']\r\n },{\r\n 'name': 'ServiceName',\r\n 'value': cc.configs['ServiceName']\r\n },{\r\n 'name': 'versionNR',\r\n 'value': cc.configs['versionNR']\r\n },{\r\n 'name': 'outp_3',\r\n 'value':str(cc.configs)\r\n }]\r\n \r\n }", "def create_app(config_class=DevConfig):\n\n app = Flask(__name__)\n app.config.from_object(config_class)\n\n # Register Blueprints\n from routes import bp_main\n app.register_blueprint(bp_main)\n\n return app", "def create_new_config():\n with open('HousingPriceScraper/HousingPriceScraper/configs/input_urls/recent_urls.json') as recent_urls_json:\n urls_dict = json.load(recent_urls_json)\n config_name = input('Type a name for the new config file:\\n').replace(' ', '_').replace(':', '')\n config_desc = input('Type a brief description for the new config file:\\n')\n with open('HousingPriceScraper/HousingPriceScraper/configs/input_urls/{}.json'.format(config_name), 'w') as fp:\n json.dump(urls_dict, fp, sort_keys=True, indent=4)\n with open('HousingPriceScraper/HousingPriceScraper/configs/input_url_config_descriptions.txt', 'a') as input_descs:\n input_descs.write('\\n{}: {}'.format(config_name, config_desc))\n print('\\nSuccessfully saved recently scraped urls to new config: {}.json'.format(config_name))", "def xmodule_runtime(self, create, xmodule_runtime, **kwargs): # pylint: disable=method-hidden, unused-argument\r\n if xmodule_runtime is None:\r\n xmodule_runtime = ModuleSystemFactory()\r\n\r\n self.xmodule_runtime = xmodule_runtime", "def from_config(cls, xknx, name, config):\n group_address = config.get(\"group_address\")\n scene_number = int(config.get(\"scene_number\"))\n return cls(\n xknx, name=name, group_address=group_address, scene_number=scene_number\n )", "def create_app(config: dict) -> Flask:\n for key, value in config.items():\n app.config[key] = value\n db.init_app(app)\n ma.init_app(app)\n app.app_context().push()\n return app", "def create_namespaced_deployment_config(self, body, namespace, **kwargs):\n\n all_params = ['body', 'namespace', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method create_namespaced_deployment_config\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `create_namespaced_deployment_config`\")\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `create_namespaced_deployment_config`\")\n\n resource_path = '/oapi/v1/namespaces/{namespace}/deploymentconfigs'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='V1DeploymentConfig',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def prepare(ctx, resource_config, **_):\n # Save the parameters\n ctx.instance.runtime_properties['resource_config'] = resource_config", "def prepare(ctx, resource_config, **_):\n # Save the parameters\n ctx.instance.runtime_properties['resource_config'] = resource_config", "def create(self, project_name: str):\n \n #check naming-convention\n Utility.matchNamingConvention(project_name)\n\n projects_folder = self.config.projects_folder\n\n #check if folder already existsts\n Utility.checkNotOccupied(project_name, projects_folder)\n\n target_path = projects_folder+project_name\n\n project_godot_file_path = target_path+'/project.godot'\n\n os.mkdir(target_path)\n os.makedirs(target_path+'/bin/plugins', exist_ok=True)\n os.mknod(project_godot_file_path)\n\n project_godot_file = open(project_godot_file_path, mode='w')\n project_godot_file.write('[application]\\n\\nconfig/name=\"'+project_name+'\"\\n')", "def _build_data_connector_from_config(\n self,\n name: str,\n config: Dict[str, Any],\n ) -> DataConnector:\n new_data_connector: DataConnector = instantiate_class_from_config(\n config=config,\n runtime_environment={\n \"name\": name,\n \"datasource_name\": self.name,\n \"execution_engine\": self.execution_engine,\n },\n config_defaults={\n \"module_name\": \"great_expectations.datasource.data_connector\"\n },\n )\n new_data_connector.data_context_root_directory = (\n self._data_context_root_directory # type: ignore[assignment]\n )\n\n self.data_connectors[name] = new_data_connector\n return new_data_connector", "def test_python_custom_runtime_field(self):\n self.write_file('test.py', 'test file')\n config = testutil.AppInfoFake(runtime='custom',\n entrypoint='my_entrypoint')\n self.assertTrue(self.generate_configs(appinfo=config, deploy=True))", "def resource(self, n):\n\n cfg = self.read()\n\n for res in cfg.get('Resources', []):\n res_name = res.get('Resource')\n\n if res_name == n:\n return ConfigResource(res)", "def _create_config(env_path):\n s2e_yaml = 's2e.yaml'\n version_path = os.path.join(os.path.dirname(__file__), '..', 'dat', 'VERSION')\n\n with open(version_path, 'r', encoding='utf-8') as fp:\n context = {\n 'creation_time': str(datetime.datetime.now()),\n 'version': fp.read().strip(),\n }\n\n render_template(context, s2e_yaml, os.path.join(env_path, s2e_yaml))", "def FromDict(raw_config, factory):\r\n c = Config()\r\n c.raw_config = raw_config\r\n c.base_directory = raw_config[\"base_directory\"]\r\n c.scratch_directory = raw_config[\"scratch_directory\"]\r\n\r\n sections = [\"resources\"]\r\n for section in sections:\r\n section_list = getattr(c, section)\r\n LoadSectionList(raw_config.get(section, []), section_list, factory)\r\n # Two-phase load.\r\n for section in sections:\r\n section_list = getattr(c, section)\r\n for resource in section_list:\r\n resource.Init(c)\r\n return c", "def create_app(config_name):\n app = Flask(__name__)\n app.config.from_object(app_config[config_name])\n app.register_blueprint(party_bluprint)\n app.register_blueprint(office_bluprint)\n app.register_blueprint(register_bluprint)\n app.register_blueprint(login_bluprint)\n app.register_blueprint(office_bluprint)\n app.register_blueprint(party_bluprint)\n app.register_error_handler(400, bad_request)\n app.register_error_handler(500, internal_server_error)\n app.register_error_handler(404, page_not_found)\n app.register_error_handler(405, method_not_allowed)\n # app.register_error_handler('Type error', type_error)\n return app", "def create_app(config='dev'):\n if config == 'dev':\n from .conf.config import DevelopmentConfig as dev_config\n app = configure_app(Flask(__name__), dev_config)\n else:\n from .conf.config import ProdConfig\n app = configure_app(Flask(__name__), ProdConfig)\n\n # setup flask blueprints\n configure_blueprints(app)\n\n return app", "def create_default(cls, env: str, config_f: Path) -> None:\n # create default file\n _config = Config()\n _config.save(env)\n log.info(f\"Created config file at {config_f}\")", "def create_app(env):\n connexion_app = connexion.App(__name__, specification_dir='openapi/',\n options={'swagger_url': '/swagger'})\n app = connexion_app.app\n env_config_class_map = {\n 'prod': 'config.Prod',\n 'testing': 'config.Testing',\n 'dev': 'config.Dev'\n }\n config_class = env_config_class_map.get(env)\n app.config.from_object(config_class)\n print(app.config)\n app.redis = Redis.from_url(app.config['REDIS_URI'])\n app.default_task_queue = rq.Queue('default', connection=app.redis, ttl=-1)\n with app.app_context():\n import config as flask_config\n app.after_request(flask_config.request_logger)\n app.register_blueprint(rq_dashboard.blueprint, url_prefix='/rq')\n connexion_app.add_api('spec.yaml')\n return connexion_app", "def create(ctx, **kwargs):\n # creates and activates pf9-express config file\n\n pf9_exp_conf_dir = ctx.obj['pf9_exp_conf_dir']\n \n # Backup existing config if one exist\n if os.path.exists(pf9_exp_conf_dir + 'express.conf'):\n with open(pf9_exp_conf_dir + 'express.conf', 'r') as current:\n lines = current.readlines()\n current.close()\n for line in lines:\n if 'config_name|' in line:\n line = line.strip()\n name = line.replace('config_name|','')\n\n filename = name + '.conf'\n shutil.copyfile(pf9_exp_conf_dir + 'express.conf', pf9_exp_conf_dir + filename)\n\n if not os.path.exists(pf9_exp_conf_dir):\n try:\n access_rights = 0o700\n os.makedirs(pf9_exp_conf_dir, access_rights)\n except Exception:\n click.echo(\"Creation of the directory %s failed\" % pf9_exp_conf_dir)\n else:\n click.echo(\"Successfully created the directory %s \" % pf9_exp_conf_dir)\n\n with open(pf9_exp_conf_dir + 'express.conf', 'w') as file:\n for k,v in ctx.params.items():\n file.write(k + '|' + str(v) + '\\n')\n click.echo('Successfully wrote Platform9 management plane configuration')", "def create_app(config_name='local', queue_name=''):\n from cloudsml_computational_backend_common import kuyruk\n\n assert config_name in CONFIG_NAME_MAPPER\n\n try:\n config = import_object_str(CONFIG_NAME_MAPPER[config_name])\n except ImportError:\n if config_name == 'local':\n logging.error(\n \"You have to have `local_config.py` or `local_config/__init__.py` in order to use \"\n \"the default 'local' Flask Config.\"\n )\n sys.exit(1)\n raise\n\n kuyruk.config.from_dict(config.KUYRUK_CONFIG)\n\n args = Args()\n args.queues = [queue_name]\n\n global current_app # pylint: disable=global-statement,invalid-name\n current_app = app = Worker(kuyruk, args)\n app.config = config\n\n from . import extensions\n extensions.init_app(app)\n\n return app", "def _create(self, **kwargs):\n\n config = misc_utils.resolve_config(\n kwargs.pop('config', None),\n kwargs.pop('config_file', None)\n )\n\n return self._make_request(method='POST', config=config)", "def createConfig(couchDBName):\n\n PSetTweak = {'process': {'outputModules_': ['RECOoutput', 'ALCARECOoutput'],\n 'RECOoutput': {'dataset': {'dataTier': 'RECO',\n 'filterName': 'Filter'}},\n 'ALCARECOoutput': {'dataset': {'dataTier': 'ALCARECO',\n 'filterName': 'AlcaFilter'}}}}\n\n configCache = ConfigCache(os.environ[\"COUCHURL\"], couchDBName = couchDBName)\n configCache.createUserGroup(groupname = \"testGroup\", username = 'testOps')\n configCache.setPSetTweaks(PSetTweak = PSetTweak)\n configCache.save()\n\n return configCache.getCouchID()", "def post_global_system_config_create(self, resource_dict):\n pass", "def create_app(config_class):\n # create a Flask application instance\n app = Flask(__name__)\n\n # load configs\n app.config.from_object(config_class)\n\n register_extensions(app)\n register_blueprints(app)\n register_error_handlers(app)\n register_shell_context(app)\n register_middleware(app)\n\n return app", "def create_app(self):\n app.config.from_object('config.TestingConfig')\n return app", "def create_app(config_object):\n app = Flask(__name__)\n app.config.from_object(config_object)\n\n # add blueprint\n from app.api import api_bp\n app.register_blueprint(api_bp, url_prefix='/api/v1/')\n\n # add redis client\n from app.redis_init import redis_client\n redis_client.init_app(app)\n\n # add prometheus middleware\n from app.prometheus_middleware import setup_metrics\n setup_metrics(app)\n\n return app" ]
[ "0.6188884", "0.6090293", "0.6090293", "0.58227265", "0.57470423", "0.57029325", "0.5658915", "0.55869395", "0.55303633", "0.5527195", "0.5509801", "0.5489197", "0.5486522", "0.544036", "0.54257023", "0.5423881", "0.54184675", "0.5381031", "0.5323874", "0.532296", "0.5305365", "0.5294932", "0.5290716", "0.5286244", "0.52787", "0.5274366", "0.5273713", "0.52628815", "0.5244976", "0.5243933", "0.52050924", "0.5190959", "0.5178976", "0.51752865", "0.5174276", "0.5169658", "0.51633406", "0.5161968", "0.5146913", "0.5141444", "0.5134585", "0.5122163", "0.5121051", "0.5119277", "0.5118842", "0.51180685", "0.5096638", "0.50904584", "0.50885737", "0.50671774", "0.5044469", "0.5043991", "0.50419056", "0.5030806", "0.5030185", "0.50267696", "0.502463", "0.50212944", "0.501936", "0.5002796", "0.49902344", "0.49880868", "0.49877298", "0.49849606", "0.4981109", "0.4962353", "0.49551362", "0.4950384", "0.49419165", "0.4940633", "0.49356592", "0.4931958", "0.49304637", "0.49303213", "0.492537", "0.4922062", "0.49215925", "0.49197805", "0.4912548", "0.49090153", "0.4906443", "0.48936582", "0.48936582", "0.48906383", "0.48768458", "0.4874466", "0.48710993", "0.48630366", "0.48627007", "0.48537892", "0.48488158", "0.48463777", "0.4838908", "0.48255998", "0.481663", "0.48164928", "0.481453", "0.4809114", "0.48029184", "0.48015344", "0.48009178" ]
0.0
-1
Updates a RuntimeConfig resource. The configuration must exist beforehand.
def UpdateConfig(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(name, config, backend, storage, debug):\n setup_lithops_logger(logging.DEBUG)\n\n verify_runtime_name(name)\n\n if config:\n config = load_yaml_config(config)\n\n config_ow = set_config_ow(backend, storage, runtime_name=name)\n config = default_config(config, config_ow)\n\n if config['lithops']['mode'] != SERVERLESS:\n raise Exception('\"lithops runtime update\" command is only valid for serverless backends')\n\n storage_config = extract_storage_config(config)\n internal_storage = InternalStorage(storage_config)\n compute_config = extract_serverless_config(config)\n compute_handler = ServerlessHandler(compute_config, internal_storage)\n\n timeout = compute_config['runtime_memory']\n logger.info('Updating runtime: {}'.format(name))\n\n runtimes = compute_handler.list_runtimes(name)\n\n for runtime in runtimes:\n runtime_key = compute_handler.get_runtime_key(runtime[0], runtime[1])\n runtime_meta = compute_handler.create_runtime(runtime[0], runtime[1], timeout)\n\n try:\n internal_storage.put_runtime_meta(runtime_key, runtime_meta)\n except Exception:\n raise (\"Unable to upload 'preinstalled-modules' file into {}\".format(internal_storage.backend))", "def _update(self, **kwargs):\n\n resource_name = self._get_resource_name(**kwargs)\n config = misc_utils.resolve_config(\n kwargs.pop('config', None),\n kwargs.pop('config_file', None)\n )\n\n return self._make_request(\n uri='%s/%s' % (self._metadata['uri'], resource_name),\n method='PUT',\n config=config\n )", "def update_config(self, config):\n return self._update_config(\"config\", config)", "def update_config(self, update_dict):\n self.config = recursive_merge_dicts(self.config, update_dict)", "def config_update(self, update: io.BytesIO) -> None:\n self.__logger.debug('Eva.config_update called')\n return self.__http_client.config_update(update)", "def reconfig_runtime(remote):\n env = get_env()\n freconfig = remote.get_function(\"tvm.contrib.vta.reconfig_runtime\")\n freconfig(env.pkg.cfg_json)", "def update(self, config_dict):\r\n self._update(config_dict, allow_new_keys=True)", "def update_config(self, kv: dict):\n self._configs.update(kv)\n self._save()", "def update(self, config_dict):\n self._update(config_dict, allow_new_keys=True)", "def update_global_config(self, config, **kwargs):\n pass", "def _update(self, config_dict, allow_new_keys=True):\r\n if not config_dict:\r\n return\r\n\r\n for k, v in six.iteritems(config_dict):\r\n if k not in self.__dict__:\r\n if allow_new_keys:\r\n self.__setattr__(k, v)\r\n else:\r\n raise KeyError('Key `{}` does not exist for overriding. '.format(k))\r\n else:\r\n if isinstance(self.__dict__[k], Config) and isinstance(v, dict):\r\n self.__dict__[k]._update(v, allow_new_keys)\r\n elif isinstance(self.__dict__[k], Config) and isinstance(v, Config):\r\n self.__dict__[k]._update(v.as_dict(), allow_new_keys)\r\n else:\r\n self.__setattr__(k, v)", "def update_runtime_variables(self) -> None:\n # Opportunistic, works if SELinux not enforced\n super().update_runtime_variables()\n self.parse_sysconfig_var()", "def platform_config_update(config):\n global remote_port_map\n config[\"port_map\"] = remote_port_map.copy()\n config[\"caps_table_idx\"] = 0", "def update(self, _id=None, dynurl_config_data=None):\n data = DataObject()\n data.add_value(\"dynurl_config_data\", dynurl_config_data)\n endpoint = URL_MAPPING + \"/{}\".format(_id)\n response = self.client.put_json(endpoint, data.data)\n response.success = response.status_code == 204\n return response", "def updateconfig(self):\n\n # Initialize the yaml data\n ydata = {\"metadata\": self._metadata, \"nodes\": self._nodes}\n\n # Write the system config file\n filename = self._rootdir + self._metadata[\"system_config_file\"]\n with open(filename, \"w\") as yamlfile:\n yaml.dump(ydata, yamlfile)", "def update_config(self, config):\n self.config = {\n \"display_name\": \"\",\n \"description\": \"\",\n \"required\": 1,\n \"type\": \"string\"\n }\n self.config.update(config)\n self.API_KEY = self.config['key']", "def update_config(self, config):\n self.config = {\n \"display_name\": \"\",\n \"description\": \"\",\n \"required\": 1,\n \"type\": \"string\"\n }\n self.config.update(config)\n self.API_KEY = self.config['key']", "def do_PUT(self):\n if self.path == \"/set_config\" or self.path == \"/set_config/\":\n\n if len(self.post_dict) > 0:\n for key, value in self.post_dict.items():\n\n self.log_message(f\"Set config '{key}' to '{value}'\")\n\n try:\n value = json.loads(value)\n\n except ValueError:\n self.log_message(f\"Could not parse JSON: {value}\")\n self.send_response(400)\n\n else:\n self.server.config[key] = value\n self.send_response(200)\n\n # No parameters sent to configure, so return success by default\n else:\n self.send_response(200)\n\n else:\n self.send_response(404)", "def update(\n self,\n resource_group_name, # type: str\n resource_name, # type: str\n location=None, # type: Optional[str]\n tags=None, # type: Optional[Dict[str, str]]\n namespace=None, # type: Optional[str]\n extension_properties=None, # type: Optional[Dict[str, str]]\n maintenance_scope=None, # type: Optional[Union[str, \"models.MaintenanceScope\"]]\n visibility=None, # type: Optional[Union[str, \"models.Visibility\"]]\n start_date_time=None, # type: Optional[str]\n expiration_date_time=None, # type: Optional[str]\n duration=None, # type: Optional[str]\n time_zone=None, # type: Optional[str]\n recur_every=None, # type: Optional[str]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MaintenanceConfiguration\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MaintenanceConfiguration\"]\n error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}\n error_map.update(kwargs.pop('error_map', {}))\n\n _configuration = models.MaintenanceConfiguration(location=location, tags=tags, namespace=namespace, extension_properties=extension_properties, maintenance_scope=maintenance_scope, visibility=visibility, start_date_time=start_date_time, expiration_date_time=expiration_date_time, duration=duration, time_zone=time_zone, recur_every=recur_every)\n api_version = \"2020-07-01-preview\"\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n\n # Construct URL\n url = self.update.metadata['url'] # type: ignore\n path_format_arguments = {\n 'subscriptionId': self._serialize.url(\"self._config.subscription_id\", self._config.subscription_id, 'str'),\n 'resourceGroupName': self._serialize.url(\"resource_group_name\", resource_group_name, 'str'),\n 'resourceName': self._serialize.url(\"resource_name\", resource_name, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n query_parameters['api-version'] = self._serialize.query(\"api_version\", api_version, 'str')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = 'application/json'\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(_configuration, 'MaintenanceConfiguration')\n body_content_kwargs['content'] = body_content\n request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.MaintenanceError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MaintenanceConfiguration', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def post_global_system_config_update(self, resource_id, resource_dict):\n pass", "def edit_config_data(self, resource_data, **kwargs):\n if not isinstance(resource_data, ResourceData) or not resource_data.value:\n raise KeyError(\n \"'resource_data' should be ResourceData with 'value' attribute\")\n if not kwargs or 'feed_id' not in kwargs or 'resource_id' not in kwargs:\n raise KeyError(\"'feed_id' and 'resource_id' are mandatory field!\")\n r = self._put('entity/f;{}/r;{}/d;configuration'\n .format(kwargs['feed_id'], kwargs['resource_id']),\n {\"value\": resource_data.value})\n return r", "def _refreshconfig(self):\n self.config = ConfigGenerator(os.path.join(self.rundir, const.CONFIG_FILE))", "def update_config(update):\n global _config\n new_config = copy.deepcopy(_config)\n _update_dict_recursive(new_config, update)\n logging.config.dictConfig(new_config)\n _configure_ulog_bridge()\n _config = new_config", "def update_config(self, data):\n self.config.data = dict_merge(self.config.data, data)\n self.config.save()", "def pre_global_system_config_update(self, resource_id, resource_dict):\n pass", "def update_runtime_variables(self) -> None:\n\n self.update_defines()\n self.update_includes()\n self.update_modules()", "def define_config(\n cls, config_dict: dict = {}, config_path: Optional[Union[str, Path]] = None\n ):\n config = {}\n if config_path is not None:\n with open(Path(config_path), \"r\") as f:\n config = json.load(f)\n UpdateDict(config, config_dict)\n cls.update_config(**config)", "def _update_config_dict(self, config_fpath: str, config_dict: Optional[dict[str, Any]] = None) -> dict[str, Any]:\n if config_dict is None:\n to_update = {}\n else:\n to_update = deepcopy(config_dict)\n with open(config_fpath, 'rb') as f:\n to_update.update(tomli.load(f))\n return to_update", "def _auto_update_configuration(self) -> None:\n self.config = rasa.utils.train_utils.update_confidence_type(self.config)\n rasa.utils.train_utils.validate_configuration_settings(self.config)\n self.config = rasa.utils.train_utils.update_similarity_type(self.config)\n self.config = rasa.utils.train_utils.update_evaluation_parameters(self.config)", "def refresh_config(self):\n with open(config_name, 'rb') as f:\n self.CONFIG = simplejson.load(f)\n\n return self", "def update(self, config):\n if not isinstance(config, dict):\n raise ValueError(\"Argument `config` should be dictionary\")\n self.__data.update(config)", "def updateConfig(self):\n # Make sure to keep the default values in place.\n if self.newConfig['sensor'] == 0:\n self.newConfig['sensor'] = self.config['sensor']\n if self.newConfig['camera'] == 0:\n self.newConfig['camera'] = self.config['camera']\n if not self.newConfig['auto']['times']:\n self.newConfig['auto']['times'] = self.config['auto']['times']\n if not self.newConfig['auto']['days']:\n self.newConfig['auto']['days'] = self.config['auto']['days']\n\n # Show the changes.\n if self.verbosity >= 1:\n print('%s: Updating configuration file...' % self.feederName)\n try:\n for key in self.config.keys():\n if type(self.config[key]) is dict:\n for subkey in self.config[key].keys():\n if self.config[key][subkey] != self.newConfig[key][subkey]:\n print('%s: Updating %s from %s to %s.' % (self.feederName, subkey, self.config[key][subkey], self.newConfig[key][subkey]))\n elif self.config[key] != self.newConfig[key]:\n print('%s: Updating %s from %s to %s.' % (self.feederName, key, self.config[key], self.newConfig[key]))\n except ValueError:\n if self.verbosity >= 1:\n print('%s: Configuration file does not contain a valid JSON object.' % self.feederName)\n if self.verbosity == 2:\n print('%s: Overwriting configuration file to: %s.' % (self.feederName, self.config))\n\n # Change the configuration file.\n self.config = self.newConfig\n self.writeConfig()", "def move_runtime_configs(self):\n from_section = 'config'\n to_section = 'runtime'\n RUNTIME_CONFS = [\n 'CLOCK_TIME',\n 'METPLUS_VERSION',\n 'MET_INSTALL_DIR',\n 'CONFIG_INPUT',\n 'METPLUS_CONF',\n 'TMP_DIR',\n 'STAGING_DIR',\n 'CONVERT',\n 'GEMPAKTOCF_JAR',\n 'GFDL_TRACKER_EXEC',\n 'INPUT_MUST_EXIST',\n 'USER_SHELL',\n 'DO_NOT_RUN_EXE',\n 'SCRUB_STAGING_DIR',\n 'MET_BIN_DIR',\n ]\n more_run_confs = [item for item in self.keys(from_section)\n if item.startswith('LOG') or item.endswith('BASE')]\n\n # create destination section if it does not exist\n if not self.has_section(to_section):\n self._conf.add_section(to_section)\n\n for key in RUNTIME_CONFS + more_run_confs:\n if not self.has_option(from_section, key):\n continue\n\n # add conf to [runtime] section\n self.set(to_section, key, super().getraw(from_section, key))\n\n # remove conf from [config] section\n self._conf.remove_option(from_section, key)", "def fusion_api_update_sflow_configuration(self, body=None, uri=None, api=None, headers=None):\n param = '/sflow-configuration'\n return self.li.update(body=body, uri=uri, api=api, headers=headers, param=param)", "def fusion_api_reapply_li_configuration(self, uri, api=None, headers=None):\n param = '/configuration'\n return self.li.update(body=None, uri=uri, api=api, headers=headers, param=param)", "def load_config(self):\n if os.path.exists(self.config_file):\n with open(self.config_file) as f:\n conf = json.load(f)\n\n self.update_attributes_from_config(conf)", "def fusion_api_reapply_le_configuration(self, uri, api=None, headers=None):\n param = '/configuration'\n return self.logical_enclosure.put(body=None, uri=uri, param=param, api=api, headers=headers)", "def do_PUT(self):\r\n if self.path == \"/set_config\" or self.path == \"/set_config/\":\r\n\r\n if len(self.post_dict) > 0:\r\n for key, value in self.post_dict.iteritems():\r\n\r\n # Decode the params as UTF-8\r\n try:\r\n key = unicode(key, 'utf-8')\r\n value = unicode(value, 'utf-8')\r\n except UnicodeDecodeError:\r\n self.log_message(\"Could not decode request params as UTF-8\")\r\n\r\n self.log_message(u\"Set config '{0}' to '{1}'\".format(key, value))\r\n\r\n try:\r\n value = json.loads(value)\r\n\r\n except ValueError:\r\n self.log_message(u\"Could not parse JSON: {0}\".format(value))\r\n self.send_response(400)\r\n\r\n else:\r\n self.server.config[key] = value\r\n self.send_response(200)\r\n\r\n # No parameters sent to configure, so return success by default\r\n else:\r\n self.send_response(200)\r\n\r\n else:\r\n self.send_response(404)", "def conf_update(self):\n pass", "def update_config(self, config):\n toolkit.add_template_directory(config, 'templates')\n toolkit.add_resource('fanstatic', 'mingus')\n return", "def update(self):\n self.save_config_file()", "def load(self):\n try:\n _config_file = open(self.config, 'r+')\n data = json.loads(_config_file.read())\n except (ValueError, IOError):\n data = {}\n\n self.update(data)", "def update(name, value, config_dir=None):\n if name not in Config.__ALLOWED:\n msg = f'Cannot update configuration; value \"{name}\" is not allowed.'\n raise ConfigurationError(msg)\n config_dir = Config.resolve_config_dir(config_dir)\n config_dat, config_file = Config.get_config_file(\n config_dir,\n round_trip_load=True,\n quiet=True,\n )\n config_dat.update({name: value})\n Config.write_config_file(config_dat, config_file)\n if Config.is_set:\n Config.__conf[name] = value", "def post_config_root_update(self, resource_id, resource_dict):\n pass", "def _update(self, config_dict, allow_new_keys=True):\n if not config_dict:\n return\n\n for k, v in six.iteritems(config_dict):\n if k not in self.__dict__.keys():\n if allow_new_keys:\n self.__setattr__(k, v)\n else:\n raise KeyError('Key `{}` does not exist for overriding. '.format(k))\n else:\n if isinstance(v, dict):\n self.__dict__[k]._update(v, allow_new_keys)\n else:\n self.__dict__[k] = copy.deepcopy(v)", "def __update(self):\n if self.__file:\n target_file = open(self.__file)\n for attr in dir(self):\n if not attr.startswith(\"_\") and \\\n (self.__overwrite or (attr not in self.__exclude)) \\\n and not self.__is_attr_callable(attr):\n try:\n delattr(self, attr)\n except AttributeError:\n pass\n pool = yaml.load(target_file)\n target_file.close()\n if pool: # could be None\n for key, val in pool.iteritems():\n if not key.startswith(\"_\") and \\\n (self.__overwrite or (key not in self.__exclude)) \\\n and not self.__is_attr_callable(key):\n setattr(self, key, val)\n if hasattr(self, 'log_config_file_changes')\\\n and self.log_config_file_changes:\n logging.getLogger(__name__).info(\"Config file has updated.\")", "def update(self, obj):\n\n self.cfg.update(obj)", "def pre_config_root_update(self, resource_id, resource_dict):\n pass", "def pre_config_node_update(self, resource_id, resource_dict):\n pass", "def update_config(self, config):\n toolkit.add_template_directory(config, 'templates')\n toolkit.add_public_directory(config, 'public')\n toolkit.add_resource('fanstatic', 'syngenta')", "def configure(new_config: Mapping):\n config.update(new_config)", "def configure(self, config: dict):\n self.config.update(config)", "def update(d):\n # get (or create) config path\n p= initialize()['config']\n\n with lockfile.LockFile(p):\n # load current configuration\n cnf = load_config(open(p))\n\n # merge \n def dict_merge(a, b):\n '''recursively merges dict's. not just simple a['key'] = b['key'], if\n both a and bhave a key who's value is a dict then dict_merge is called\n on both values and the result stored in the returned dictionary.\n from https://www.xormedia.com/recursively-merge-dictionaries-in-python/\n '''\n if not isinstance(b, dict):\n return b\n result = copy.deepcopy(a)\n for k, v in b.items():\n if k in result and isinstance(result[k], dict):\n result[k] = dict_merge(result[k], v)\n else:\n result[k] = copy.deepcopy(v)\n return result\n cnf = dict_merge(cnf, d)\n\n # save \n dump_config(cnf, open(p,'w'))", "def update_config(config, args):\n if args.n_train is not None:\n config['data']['n_train'] = args.n_train\n if args.n_valid is not None:\n config['data']['n_valid'] = args.n_valid\n if args.real_weight is not None:\n config['data']['real_weight'] = args.real_weight\n if args.lr is not None:\n config['optimizer']['learning_rate'] = args.lr\n if args.hidden_dim is not None:\n config['model']['hidden_dim'] = args.hidden_dim\n if args.n_graph_iters is not None:\n config['model']['n_graph_iters'] = args.n_graph_iters\n if args.batch_size is not None:\n config['data']['batch_size'] = args.batch_size\n if args.n_epochs is not None:\n config['training']['n_epochs'] = args.n_epochs\n if args.weight_decay is not None:\n config['optimizer']['weight_decay'] = args.weight_decay\n\n return config", "def update_config(config, args):\n if args.cfg:\n _update_config_from_file(config, args.cfg)\n config.defrost()\n if args.dataset:\n config.DATA.DATASET = args.dataset\n if args.batch_size:\n config.DATA.BATCH_SIZE = args.batch_size\n config.DATA.BATCH_SIZE_EVAL = args.batch_size\n if args.batch_size_eval:\n config.DATA.BATCH_SIZE_EVAL = args.batch_size_eval\n if args.image_size:\n config.DATA.IMAGE_SIZE = args.image_size\n if args.accum_iter:\n config.TRAIN.ACCUM_ITER = args.accum_iter\n if args.data_path:\n config.DATA.DATA_PATH = args.data_path\n if args.output:\n config.SAVE = args.output\n if args.eval:\n config.EVAL = True\n if args.pretrained:\n config.MODEL.PRETRAINED = args.pretrained\n if args.resume:\n config.MODEL.RESUME = args.resume\n if args.last_epoch:\n config.TRAIN.LAST_EPOCH = args.last_epoch\n if args.amp: # only for training\n config.AMP = not config.EVAL\n config.freeze()\n return config", "def fusion_api_reapply_interconnect_configuration(self, uri, api=None, headers=None):\n param = '/configuration'\n return self.ic.put(body=None, uri=uri, api=api, headers=headers, param=param)", "def reload(self):\n self.load_config()\n # Seems we need to explicitly refresh this\n if self.main_instance:\n self.main_instance.config = self.config", "def set_config(self, aConfig):\n \n # we update the dict of the existing config with the passed\n # parameter. This means that the new config is merged with\n # the old, but all new members overwrite old one. This is\n # more robust.\n self._config.__dict__.update(aConfig.__dict__)\n # apply the config to the underlying logic\n self.config_to_logic()\n # bring it back all the way up to the view\n self.logic_to_config()\n\n # but only if we are in view mode\n if self.view_initialised:\n self.config_to_view()\n\n # the config has been set, so we assumem that the module has\n # now been modified. \n self._module_manager.modify_module(self)", "def set_config(self):\n str_config = cmds.getAttr(\"{}.{}\".format(self.root_node,\n CONFIG[\"config_attr\"]))\n try:\n # THIS NEEDS TO BE REVISTED. I am adding shit from file\n stored_config = ast.literal_eval(str_config)\n self.setup_config = get_added_dicts(stored_config, CONFIG)\n except Exception:\n cmds.warning(\"Could not retrieve CONFIG stored on setup!\")\n self.setup_config = CONFIG", "def _config_worker(self, operation_type=None, pybind_object=None, rest_leaf_name=None, resource_depth=None, timeout=''):\n\n rest_operation = ''\n rest_uri = ''\n rest_data = ''\n rest_commands = []\n\n if 'create' == operation_type:\n rest_operation = 'POST'\n rest_uri = pybind_object._rest_uri_for_post()\n elif 'update_patch' in operation_type:\n rest_operation = 'PATCH'\n rest_uri = pybind_object._rest_uri()\n elif 'update_put' in operation_type:\n rest_operation = 'PUT'\n rest_uri = pybind_object._rest_uri()\n elif 'delete' == operation_type:\n rest_operation = 'DELETE'\n rest_uri = pybind_object._rest_uri()\n\n label_list_items = lambda x: x\n\n if 'update' in operation_type:\n update_object_rest_data = ''\n rest_data = dicttoxml(json.loads(pybindJSON.dumps(pybind_object, mode='rest'), object_pairs_hook=OrderedDict), root=False, attr_type=False, item_func=label_list_items)\n\n for key in pybind_object.elements():\n update_object_name = getattr(pybind_object, '_get_' + key)\n update_object = update_object_name()\n rest_uri = update_object._rest_uri()\n rest_uri_end_element = rest_uri.split('/')[-1]\n\n if update_object._is_keyval == False and (update_object._changed() == True or (update_object.default() and update_object == update_object.default())):\n rest_name = update_object.rest_name()\n yang_leaf_name = update_object.yang_name()\n temp_pybind_obj = update_object\n\n while(temp_pybind_obj._parent and (rest_name == '' or rest_name != rest_uri_end_element)):\n rest_name = temp_pybind_obj.rest_name()\n yang_leaf_name = temp_pybind_obj.yang_name()\n temp_pybind_obj = temp_pybind_obj._parent\n\n if hasattr(temp_pybind_obj, '_pyangbind_elements'):\n rest_data = dicttoxml(json.loads(pybindJSON.dumps(temp_pybind_obj, mode='rest'), object_pairs_hook=OrderedDict), root=False, attr_type=False, item_func=label_list_items)\n elif update_object.default() and update_object == update_object.default():\n rest_data += '<{0}>{1}</{0}>'.format(rest_name, update_object)\n\n match = re.match(r'.*(<{0}>.*</{0}>).*'.format(rest_name), rest_data)\n\n if match:\n update_object_rest_data = match.group(1)\n\n if repr(temp_pybind_obj) is 'False':\n rest_operation = 'DELETE'\n else:\n if 'update_patch' in operation_type:\n rest_operation = 'PATCH'\n elif 'update_put' in operation_type:\n rest_operation = 'PUT' \n\n if rest_operation == 'DELETE':\n rest_commands.append([rest_operation, rest_uri, '', 'config', resource_depth])\n elif 'bulk' not in operation_type:\n rest_commands.append([rest_operation, rest_uri, update_object_rest_data, 'config', resource_depth])\n \n if 'bulk' in operation_type:\n if 'update_patch' in operation_type:\n rest_operation = 'PATCH'\n elif 'update_put' in operation_type:\n rest_operation = 'PUT'\n\n update_object = update_object._parent\n rest_uri = update_object._rest_uri()\n rest_uri_end_element = rest_uri.split('/')[-1]\n update_object_rest_data = '<{0}>{1}</{0}>'.format(rest_uri_end_element, rest_data)\n\n rest_commands.append([rest_operation, rest_uri, update_object_rest_data, 'config', resource_depth])\n\n rest_commands.reverse()\n else:\n uri = pybind_object._rest_uri() \n\n pybind_object = pybind_object._parent\n \n rest_data = dicttoxml(json.loads(pybindJSON.dumps(pybind_object, mode='rest'), object_pairs_hook=OrderedDict), root=False, attr_type=False, item_func=label_list_items)\n\n if rest_data:\n end_marker = rest_data.rsplit('<', 1)[1].strip('/')\n\n rest_data = rest_data.rsplit('<', 1)[0]\n rest_data = rest_data.split(end_marker, 1)[-1]\n\n if operation_type == 'create' and end_marker[:-1] + '/' in uri:\n rest_uri = uri.rsplit(end_marker[:-1] + '/', 1)[0]\n\n if len(rest_uri) > 1:\n rest_uri = rest_uri.rstrip('/')\n else:\n if operation_type == 'create':\n uri = uri.split('/')[-1]\n rest_data = '<' + uri + '> </' + uri + '>'\n \n if operation_type == 'delete' and rest_leaf_name:\n rest_uri += '/' + rest_leaf_name\n\n rest_commands.append([rest_operation, rest_uri, '', 'config', resource_depth])\n else:\n rest_commands.append([rest_operation, rest_uri, rest_data, 'config', resource_depth])\n\n return(rest_commands, '', timeout)", "def post_config_node_update(self, resource_id, resource_dict):\n pass", "def update(self, newer_config, merge=False):\n # removed 'merge' kw arg - and it was passed to constructor\n # make a note to not do that, consume it on the param list\n fresh_config = self.__class__(schema=self._schema)\n logger.debug('from parent update merge %s', merge)\n if not isinstance(newer_config, MergableConfig):\n raise util.ConfigError('Attempting to update a %s with a %s' % (\n self.__class__,\n newer_config.__class__))\n for prop_name in self._schema.keys():\n # get the specified property off of the current class\n prop = self.__class__.__dict__.get(util.as_attr(prop_name))\n assert prop\n new_value = prop.fget(newer_config)\n old_value = prop.fget(self)\n if merge and new_value is not None:\n if isinstance(new_value, dict) and isinstance(old_value, dict):\n new_value = old_value.update(new_value)\n elif isinstance(new_value, list) and isinstance(old_value, list):\n new_value = old_value.extend(new_value)\n if new_value is not None:\n prop.fset(fresh_config, new_value)\n elif merge and old_value is not None:\n prop.fset(fresh_config, old_value)\n return fresh_config", "async def update_config(desired_config: ConfigName, value: dict):\n redis = app.state.redis\n if desired_config == \"server\":\n await redis.set_key(\"influxdb_server\", orjson.dumps(value))\n return orjson.loads(await redis.get_key(\"influxdb_server\"))\n if desired_config == \"organizations\":\n await redis.set_key(\"influxdb_organizations\", orjson.dumps(value))\n return orjson.loads(await redis.get_key(\"influxdb_organizations\"))\n if desired_config == \"buckets\":\n await redis.set_key(\"influxdb_buckets\", orjson.dumps(value))\n return orjson.loads(await redis.get_key(\"influxdb_buckets\"))\n if desired_config == \"measurements\":\n await redis.set_key(\"influxdb_measurements\", orjson.dumps(value))\n return orjson.loads(await redis.get_key(\"influxdb_measurements\"))", "def update_host_config(self, hostid, config, **kwargs):\n pass", "def __load_config(runtime_env):\n config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), \"config.ini\")\n if not os.path.exists(config_file):\n raise FileNotFoundError(config_file)\n _app_config = configparser.ConfigParser()\n _app_config.read(config_file)\n\n # Evaluate\n _app_config = _app_config[runtime_env]\n return _app_config", "def update(self, other: Mapping[str, Any]) -> None:\n self._config.update(self._flatten_dict(other))", "def update_config_external_template(config):\r\n\r\n # best parameters from the paper\r\n config['train_batch_size'] = 16384\r\n config['lr'] = 3e-4\r\n config['sgd_minibatch_size'] = 4096\r\n config['num_sgd_iter'] = 4\r\n config['rollout_fragment_length'] = 100\r\n\r\n # run ID to communicate to the http trainer\r\n config['run_uid'] = '_setme'\r\n\r\n # stable baselines accepts full episodes\r\n config[\"batch_mode\"] = \"complete_episodes\"\r\n\r\n # stable baselines server address\r\n config[\"http_remote_port\"] = \"http://127.0.0.1:50001\"\r\n\r\n # no gpus, stable baselines might use them\r\n config['num_gpus'] = 0\r\n\r\n # set trainer class\r\n config['_trainer'] = \"External\"\r\n config['_policy'] = \"PPO\"\r\n\r\n # tuned\r\n config['num_envs_per_worker'] = 10\r\n config['num_workers'] = 3\r\n return config", "def post_global_vrouter_config_update(self, resource_id, resource_dict):\n pass", "def test_config_update(get_config):\n cfg = get_config(Config, {'test': 'main'})\n update_from = {\"name\": \"new_name\"}\n cfg.update(update_from)\n\n assert cfg.data.get('name') == \"new_name\", \"config was not updated\"", "def set_config(self, config):\r\n if self.config:\r\n raise ValueError(\"Already configured: %s\" % self.config)\r\n file(self.config_file, \"w\").write(\"\")\r\n config = self.manifest.config_schema.validate(config)\r\n for template in self.manifest.get(\"templates\", []):\r\n print \"Applying template %s with %s\" % (template, config)\r\n EJSTemplate(self.unchroot_path(template)).apply(self.unchroot_path(template), config)\r\n file(self.config_file, \"w\").write(json.dumps(config, indent=1))", "def update(self, enabled=None, cnames=None, comment=None):\r\n new_config = DistributionConfig(self.connection, self.config.origin,\r\n self.config.enabled, self.config.caller_reference,\r\n self.config.cnames, self.config.comment,\r\n self.config.trusted_signers,\r\n self.config.default_root_object)\r\n if enabled != None:\r\n new_config.enabled = enabled\r\n if cnames != None:\r\n new_config.cnames = cnames\r\n if comment != None:\r\n new_config.comment = comment\r\n self.etag = self.connection.set_distribution_config(self.id, self.etag, new_config)\r\n self.config = new_config\r\n self._object_class = Object", "def update_flask(self, flask_app):\n flask_app.config.update(self.flask_config_dict)", "def update_configuration(ConfigurationId=None, Data=None, Description=None):\n pass", "def update_from_dict(self, dct):\n if not dct:\n return\n all_props = self.__class__.CONFIG_PROPERTIES\n for key, value in six.iteritems(dct):\n attr_config = all_props.get(key)\n if attr_config:\n setattr(self, key, value)\n else:\n self.update_default_from_dict(key, value)", "def update_from_file(self):\n config_path = os.environ.get('MINDINSIGHT_CONFIG', '')\n if not config_path:\n return\n\n config_module = None\n\n # python:full.path.for.config.module\n if config_path.startswith('python:'):\n config_module = import_module(config_path[len('python:'):])\n\n # file:full/path/for/config.py\n elif config_path.startswith('file:'):\n config_path = config_path[len('file:'):]\n module_name = '__mindinsightconfig__'\n config_module = types.ModuleType(module_name)\n machinery = import_module('importlib.machinery')\n loader = machinery.SourceFileLoader(module_name, config_path)\n loader.exec_module(config_module)\n\n if config_module is None:\n return\n\n for setting in dir(config_module):\n if setting.isupper() and setting in self._default_settings:\n setting_value = getattr(config_module, setting)\n setattr(self, setting, setting_value)\n self._explicit_settings.add(setting)", "def _GetCuttlefishRuntimeConfig(runtime_cf_config_path, raw_data=None):\n if raw_data:\n # if remote instance couldn't fetch the config will return message such as\n # 'cat: .../cuttlefish_config.json: No such file or directory'.\n # Add this condition to prevent from JSONDecodeError.\n try:\n return json.loads(raw_data)\n except ValueError as e:\n raise errors.ConfigError(\n \"An exception happened when loading the raw_data of the \"\n \"cvd runtime config:\\n%s\" % str(e))\n if not os.path.exists(runtime_cf_config_path):\n raise errors.ConfigError(\n \"file does not exist: %s\" % runtime_cf_config_path)\n with open(runtime_cf_config_path, \"r\") as cf_config:\n return json.load(cf_config)", "def runtime_config(self) -> str:\n return self._node[\"app_data\"].get(\"runtime_config\")", "def config(self, config_dict):\r\n self._cfg.config = config_dict", "def pre_global_vrouter_config_update(self, resource_id, resource_dict):\n pass", "def fusion_api_reapply_sas_li_configuration(self, uri, api=None, headers=None):\n param = '/configuration'\n return self.sasli.put(body=None, uri=uri, api=api, headers=headers, param=param)", "def config_update(cls, **options) -> None:\n cls._logger.debug(\"[%s]: Update config from kwargs.\", cls.__name__)\n\n config_update: Dict = {k: options[k] for k in options.keys() if \"graph_\" in k}\n\n cls._config.update(config_update)\n\n cls._logger.debug(\"[%s]: Final config: %s\", cls.__name__, cls._config)", "def update_impala_config(self, impala_config):\n path = \"%s/services/impala/config\" % self.__base_path\n response = self.__session.put(path, data=impala_config, headers={\"Content-Type\": \"application/json\"})\n ImpalaApiResource.__check_status_code(response.status_code)\n return response.json()", "def refresh_resources_properties(state, output, update_runtime_props=True):\n resources = {}\n for resource in state.get('resources', []):\n resources[resource[NAME]] = resource\n for module in state.get('modules', []):\n for name, definition in module.get('resources', {}).items():\n resources[name] = definition\n if update_runtime_props:\n ctx.instance.runtime_properties['resources'] = resources\n # Duplicate for backward compatibility.\n ctx.instance.runtime_properties[STATE] = resources\n ctx.instance.runtime_properties['outputs'] = \\\n filter_state_for_sensitive_properties(output)\n store_sensitive_properties(output=output)", "def override_config_field(self, update_conf: dict):\n if not update_conf:\n return\n\n forbidden_modify_fields = self.contains_forbidden_modify_field(update_conf)\n if forbidden_modify_fields:\n raise ConfigException(f'Config field cannot be modified: {forbidden_modify_fields}')\n\n self.add_config(update_conf, type='api_patch', apply_now=True)\n\n logger.debug(f'Need update config fields: {update_conf}')\n self.config.update(update_conf)\n logger.debug(f'Update done. config: {self.config}')\n\n application.server['event'].publish('config_update', {'config_update' : {'data': update_conf}})", "def update_config(self, config, priority, source):\n for key, value in config.items():\n self._config[key].add(value, priority, source)", "def _do_update(self, meta, k, v):\n self.runtime.logger.info('{}: [{}] -> {}'.format(meta.in_group_config_path, k, v))\n meta.config[k] = v\n meta.save()", "def update_config(self):\n self.channel_count = self.config_global['channel_count']\n self.pixel_count = self.config_global['pixel_count']\n self.pixel_index_max = self.pixel_count - 1\n self.repeat_count = self.config_global['repeat_count']\n self.repeat_snake = self.config_global['repeat_snake']\n\n self.update_interval = self.config_global['update_interval']\n self.mode_16bit = self.config_global['mode_16bit']\n\n self.color_channels = self.config_global['color_channels']\n # self.color_channels = collections.namedtuple(\n # 'color_channels',\n # **self.color_channels_dict\n # )\n self.color_channels_count = len(self.color_channels)\n if self.mode_16bit:\n self.color_channels_count = self.color_channels_count * 2\n\n self.total_channel_count = (\n self.pixel_count *\n self.color_channels_count\n )\n if self.repeat_count > 0:\n self.total_channel_count *= self.repeat_count", "def refresh_config(self):\n\t\treturn Job(SDK.PrlVm_RefreshConfig(self.handle)[0])", "async def async_update_config(self, path: str, dev_id: str, device: Device) -> None:\n async with self._is_updating:\n await self.hass.async_add_executor_job(\n update_config, self.hass.config.path(YAML_DEVICES), dev_id, device\n )", "def fusion_api_edit_server_hardware_environmental_config(self, body, uri, api=None, headers=None):\n return self.sh.update(body, uri, api, headers, param='/environmentalConfiguration')", "def test_update_global_system_config(self):\n new_config = self._create_global_system_config()\n update_name = data_utils.rand_name('test')\n with self.override_role():\n self.config_client.update_global_system_config(\n new_config['uuid'],\n display_name=update_name)", "def update_config(config_dict):\n\n # Get the version number. If it does not appear at all, then\n # assume a very old version:\n config_version = config_dict.get('version') or '1.0.0'\n\n # Updates only care about the major and minor numbers\n parts = config_version.split('.')\n major = parts[0]\n minor = parts[1]\n\n # Take care of the collation problem when comparing things like\n # version '1.9' to '1.10' by prepending a '0' to the former:\n if len(minor) < 2:\n minor = '0' + minor\n\n # I don't know how to merge older, V1.X configuration files, only\n # newer V2.X ones.\n if major == '1':\n raise ValueError(\"Cannot merge version %s. Too old\" % config_version)\n\n if major == '2' and minor < '07':\n update_to_v27(config_dict)\n\n if major < '3':\n update_to_v30(config_dict)\n \n update_to_v32(config_dict)\n \n update_to_v36(config_dict)", "def updated(self, newConfiguration):\n log.debug('ConfigListener: configuration %s updated' % newConfiguration)", "def update_configuration(config_id, new_config):\n session = connection.Session()\n try:\n my_config = session.query(Configuration).filter(\n Configuration.config_id == config_id).first()\n updated_config = merge_configurations([my_config.config, new_config])\n my_config.config = updated_config\n session.commit()\n except:\n session.rollback()\n raise\n finally:\n session.close()\n connection.engine.dispose()\n \n return my_config", "def _updated_config(self):\n from tensorflow.python.keras._impl.keras import __version__ as keras_version # pylint: disable=g-import-not-at-top\n\n config = self.get_config()\n model_config = {\n 'class_name': self.__class__.__name__,\n 'config': config,\n 'keras_version': keras_version,\n 'backend': K.backend()\n }\n return model_config", "def _update_config_parameter_value(self):\n field = self.env.ref(\"base.field_ir_config_parameter_value\")\n for r in self:\n if r.fields_id != field:\n # It's not for ir.config_parameter\n continue\n if r.company_id:\n # it's not default value\n continue\n if not r.res_id:\n # Paramater is not specified\n continue\n # Default value is updated. Set new value in column \"value\"\n model, res_id = r.res_id.split(\",\")\n value = r.get_by_record()\n param = self.env[\"ir.config_parameter\"].browse(int(res_id))\n param._update_db_value(value)", "def fusion_api_update_snmp_configuration(self, body=None, uri=None, api=None, headers=None):\n param = '/snmp-configuration'\n return self.li.update(body=body, uri=uri, api=api, headers=headers, param=param)", "def update_zoo_config(self, site_name, virt_path, new_config):\n root_path = self.map_path(site_name, virt_path)\n zoo_config_path = os.path.join(root_path, \".zoo\")\n config = get_zoo_config(zoo_config_path) or {}\n\n app = config.get('application')\n # disabled ability\n if 'selected-engine' in new_config :\n new_engine = new_config.get('selected-engine')\n if 'parameters' in app:\n app['parameters']['selected-engine'] = new_engine\n else:\n app['parameters'] = OrderedDict()\n app['parameters']['selected-engine'] = new_engine\n\n\n if 'engines' in new_config:\n engines = new_config.get('engines')\n app['engines'] = engines\n\n if 'locations' in new_config:\n app['locations'] = new_config['locations']\n\n if \"description\" in app:\n app[\"description\"] = Literal(app[\"description\"])\n\n if \"find_installed_command\" in app:\n app[\"find_installed_command\"] = Literal(app[\"find_installed_command\"])\n\n if \"install_command\" in app:\n app[\"install_command\"] = Literal(app[\"install_command\"])\n\n if \"uninstall_command\" in app:\n app[\"uninstall_command\"] = Literal(app[\"uninstall_command\"])\n\n if \"upgrade_command\" in app:\n app[\"upgrade_command\"] = Literal(app[\"upgrade_command\"])\n\n # save .zoo\n YamlHelper.save(config, zoo_config_path)", "def _update_params(self):\n log.debug(\"Updating parameter dict\")\n old_config = self._param_dict.get_config()\n self._get_config()\n new_config = self._param_dict.get_config() \n if (new_config != old_config):\n self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)", "def test_edit_configuration(self):\n configuration = copy.deepcopy(self.configuration)\n configuration['settings'] = {'DB_HOST': 'other_scale_db'}\n configuration['mounts'] = {\n 'dted': {\n 'type': 'host',\n 'host_path': '/some/new/path'\n }\n }\n\n url = '/%s/job-types/%s/%s/' % (self.api, self.job_type.name, self.job_type.version)\n json_data = {\n 'configuration': configuration,\n 'auto_update': False\n }\n response = self.client.generic('PATCH', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n \n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertDictEqual(results, {u'errors': [], u'is_valid': True, u'warnings': []})" ]
[ "0.66292256", "0.60551757", "0.5964276", "0.59138215", "0.5715593", "0.55989873", "0.5577988", "0.55546343", "0.5550815", "0.54435796", "0.5437534", "0.5437106", "0.5432855", "0.5415902", "0.54149926", "0.5393029", "0.5393029", "0.5380091", "0.53799874", "0.53699434", "0.53560823", "0.5347029", "0.5341624", "0.53333646", "0.53294283", "0.53167856", "0.5308816", "0.5299319", "0.5292373", "0.52839726", "0.52783406", "0.5271939", "0.52647233", "0.5250052", "0.5235837", "0.52148557", "0.5213916", "0.52108556", "0.52091146", "0.52044916", "0.5199433", "0.51904684", "0.5189458", "0.5188752", "0.5173936", "0.5153783", "0.5146603", "0.5145469", "0.5140387", "0.51350445", "0.5129174", "0.51222867", "0.511669", "0.51134795", "0.5109264", "0.5094492", "0.50799215", "0.50604975", "0.50517917", "0.5045256", "0.50443155", "0.50393474", "0.503799", "0.5035457", "0.50313044", "0.5027743", "0.5019478", "0.50134367", "0.5012873", "0.5009152", "0.5003062", "0.5001051", "0.49916574", "0.498925", "0.4988286", "0.49870858", "0.49851322", "0.49804583", "0.4978498", "0.49692953", "0.4965418", "0.496099", "0.49606645", "0.49567693", "0.49474478", "0.49380326", "0.49371845", "0.49257553", "0.49133065", "0.49100688", "0.4907758", "0.4899436", "0.48990574", "0.48952177", "0.48810694", "0.4870985", "0.48636517", "0.48625594", "0.48580748", "0.48531246" ]
0.49852085
76
Deletes a RuntimeConfig resource.
def DeleteConfig(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post_global_system_config_delete(self, resource_id, resource_dict):\n pass", "def post_global_vrouter_config_delete(self, resource_id, resource_dict):\n pass", "def delete(name, config, backend, storage, debug):\n setup_lithops_logger(logging.DEBUG)\n\n verify_runtime_name(name)\n\n if config:\n config = load_yaml_config(config)\n\n setup_lithops_logger(logging.DEBUG)\n\n config_ow = set_config_ow(backend, storage, runtime_name=name)\n config = default_config(config, config_ow)\n\n if config['lithops']['mode'] != SERVERLESS:\n raise Exception('\"lithops runtime delete\" command is only valid for serverless backends')\n\n storage_config = extract_storage_config(config)\n internal_storage = InternalStorage(storage_config)\n compute_config = extract_serverless_config(config)\n compute_handler = ServerlessHandler(compute_config, internal_storage)\n\n runtimes = compute_handler.list_runtimes(name)\n for runtime in runtimes:\n compute_handler.delete_runtime(runtime[0], runtime[1])\n runtime_key = compute_handler.get_runtime_key(runtime[0], runtime[1])\n internal_storage.delete_runtime_meta(runtime_key)", "def _delete(self, **kwargs):\n\n resource_name = self._get_resource_name(**kwargs)\n config = misc_utils.resolve_config(\n kwargs.pop('config', None),\n kwargs.pop('config_file', None),\n required=False\n )\n\n return self._make_request(\n uri='%s/%s' % (self._metadata['uri'], resource_name),\n method='DELETE',\n config=config\n )", "def delete(self):\r\n return self.connection.delete_launch_configuration(self.name)", "def pre_global_vrouter_config_delete(self, resource_id):\n pass", "def delete(ctx, **_):\n # Delete the resource\n azure_config = ctx.node.properties.get('azure_config')\n if not azure_config.get(\"subscription_id\"):\n azure_config = ctx.node.properties.get('client_config')\n else:\n ctx.logger.warn(\"azure_config is deprecated please use client_config, \"\n \"in later version it will be removed\")\n resource_group_name = utils.get_resource_group(ctx)\n vm_name = ctx.instance.runtime_properties.get('virtual_machine')\n name = ctx.instance.runtime_properties.get('name')\n api_version = \\\n ctx.node.properties.get('api_version', constants.API_VER_COMPUTE)\n vm_extension = VirtualMachineExtension(azure_config, ctx.logger,\n api_version)\n utils.handle_delete(ctx, vm_extension, resource_group_name, name, vm_name)", "def delete(self):\n try:\n self._client.delete_launch_configuration(LaunchConfigurationName=self._name)\n except botocore.exceptions.ClientError as e:\n if \"not found\" in e.response[\"Error\"][\"Message\"]:\n logger.warn(\"Launch configuration %s not found\", self._name)\n else:\n raise", "def post_config_root_delete(self, resource_id, resource_dict):\n pass", "def post_config_node_delete(self, resource_id, resource_dict):\n pass", "def pre_global_system_config_delete(self, resource_id):\n pass", "def delete(ctx, iface, resource_config, **_):\n vpn_connection = ctx.instance.runtime_properties.get('VPN_CONNECTION_ID')\n cider_block = ctx.instance.runtime_properties.get('DESTINATION_CIDR_BLOCK')\n\n params = dict(VpnConnectionId=vpn_connection,\n DestinationCidrBlock=cider_block) \\\n if not resource_config else resource_config.copy()\n iface.delete(params)", "def delete(ctx, iface, resource_config, **_):\n\n # Create a copy of the resource config for clean manipulation.\n params = \\\n dict() if not resource_config else resource_config.copy()\n\n lb = params.get(LB_NAME) or ctx.instance.runtime_properties.get(LB_NAME)\n policy = \\\n params.get(RESOURCE_NAME) or \\\n ctx.instance.runtime_properties.get(RESOURCE_NAME)\n\n lb_delete_params = {\n LB_NAME: lb,\n RESOURCE_NAME: policy\n }\n\n try:\n iface.delete(lb_delete_params)\n except ClientError as e:\n if _.get('force'):\n raise OperationRetry('Retrying: {0}'.format(text_type(e)))\n pass", "def delete_runtime(self, runtime_name, memory):\n self.compute_handler.delete_runtime(runtime_name, memory)", "def pre_config_node_delete(self, resource_id):\n pass", "def pre_config_root_delete(self, resource_id):\n pass", "def delete_endpoint_config(EndpointConfigName=None):\n pass", "def test_delete_config_node(self):\n config_node = self._create_config_node()\n config_node_uuid = config_node['config-node']['uuid']\n with self.override_role():\n self.config_client.delete_config_node(\n config_node_uuid)", "def delete_config_callback(self, trigger_id, config):\n self.tcex.log.trace('delete config callback')", "def kubeconfig_delete(self):\n\n self._client.delete(\n \"{}/kubeconfig\".format(LKECluster.api_endpoint), model=self\n )", "def delete_handler(event, context):\n delete_endpoint_config(event)", "def post_namespace_delete(self, resource_id, resource_dict):\n pass", "def test_delete_global_system_config(self):\n new_config = self._create_global_system_config()\n with self.override_role():\n self.config_client.delete_global_system_config(new_config['uuid'])", "def do_DELETE(self): # pylint: disable=C0103\r\n if self.path == \"/del_config\" or self.path == \"/del_config/\":\r\n self.server.config = dict()\r\n self.log_message(\"Reset Server Configuration.\")\r\n self.send_response(200)\r\n else:\r\n self.send_response(404)", "def test_delete_namespaced_deployment_config(self):\n pass", "def delete_configuration(self, node, ports):\n return hpssa_manager.delete_configuration()", "def test_config_remove(self):\n server = self.start_server(\"hello world\", 200)\n try:\n self.setup_dynamic()\n\n cfg_file = \"test.yml\"\n\n self.write_dyn_config(\n cfg_file, self.http_cfg(\"myid\", \"http://localhost:{}\".format(server.server_port)))\n\n self.wait_until(lambda: self.output_has(lines=2))\n\n self.assert_last_status(\"up\")\n\n os.remove(self.monitors_dir() + cfg_file)\n\n # Ensure the job was removed from the schduler\n self.wait_until(lambda: self.log_contains(\"Remove scheduler job 'myid'\"))\n self.wait_until(lambda: self.log_contains(\"Job 'myid' returned\"))\n\n self.proc.check_kill_and_wait()\n finally:\n server.shutdown()", "def test_delete_hyperflex_sys_config_policy(self):\n pass", "def rpc_delete_config(self, unused_session, rpc, *unused_params):\n raise ncerror.OperationNotSupportedProtoError(rpc)", "def remove_config_object() -> None:\n if G_CONFIG_OBJECT:\n G_CONFIG_OBJECT.clear()", "async def websocket_lovelace_delete_config(\n hass: HomeAssistant,\n connection: websocket_api.ActiveConnection,\n msg: dict[str, Any],\n config: LovelaceStorage,\n) -> None:\n await config.async_delete()", "def delete():\n run('rm -r {}'.format(utils.home('apps', env.PROJECT_NAME)))", "def remove_prompt(name, delete_config):\n\n with open(DATABASE_FILE_PATH) as f:\n config = json.load(f)\n path = config[name]\n del config[name]\n\n with open(DATABASE_FILE_PATH, 'w') as f:\n json.dump(config, f)\n\n if delete_config:\n os.remove(path)", "def delete(self, wait=True, force=False):\n # Avoid accidental delete of default storageclass and secret\n if (\n self.name == constants.DEFAULT_STORAGECLASS_CEPHFS\n or self.name == constants.DEFAULT_STORAGECLASS_RBD\n ):\n log.info(\"Attempt to delete default Secret or StorageClass\")\n return\n\n if self._is_deleted:\n log.info(\n f\"Attempt to remove resource: {self.name} which is\"\n f\"already deleted! Skipping delete of this resource!\"\n )\n result = True\n else:\n result = self.ocp.delete(resource_name=self.name, wait=wait, force=force)\n self._is_deleted = True\n return result", "def deleted(self, configurationId):\n log.debug('ConfigListener: configuration %s deleted' % configurationId)\n ZenProcessTask.DEVICE_STATS.pop(configurationId, None)", "def test_delete_hyperflex_node_config_policy(self):\n pass", "def delete(\n self,\n resource_group_name, # type: str\n resource_name, # type: str\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MaintenanceConfiguration\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MaintenanceConfiguration\"]\n error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}\n error_map.update(kwargs.pop('error_map', {}))\n api_version = \"2020-07-01-preview\"\n\n # Construct URL\n url = self.delete.metadata['url'] # type: ignore\n path_format_arguments = {\n 'subscriptionId': self._serialize.url(\"self._config.subscription_id\", self._config.subscription_id, 'str'),\n 'resourceGroupName': self._serialize.url(\"resource_group_name\", resource_group_name, 'str'),\n 'resourceName': self._serialize.url(\"resource_name\", resource_name, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n query_parameters['api-version'] = self._serialize.query(\"api_version\", api_version, 'str')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = 'application/json'\n\n request = self._client.delete(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.MaintenanceError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MaintenanceConfiguration', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def test_delete_hyperflex_vcenter_config_policy(self):\n pass", "def delete(openstack_resource):\n openstack_resource.delete()", "def delete_k8s_configuration(client, resource_group_name, cluster_name, name, cluster_type):\n # Determine ClusterRP\n cluster_rp = __get_cluster_type(cluster_type)\n\n source_control_configuration_name = name\n\n return client.delete(resource_group_name, cluster_rp, cluster_type, cluster_name, source_control_configuration_name)", "def test_delete_config_root(self):\n config_root = self._create_config_root()\n config_root_uuid = config_root['config-root']['uuid']\n with self.override_role():\n self.config_client.delete_config_root(\n config_root_uuid)", "def remove_config(name):\n db = dbm.open(config_file, 'c')\n del db[name]\n db.close()", "def dynamic_vnic_conn_policy_delete(handle, name, parent_dn=\"org-root\"):\n mo = dynamic_vnic_conn_policy_get(handle, name, parent_dn)\n if not mo:\n raise UcscOperationError(\"dynamic_vnic_conn_policy_delete\",\n \"Dynamic vNIC Connectivity Policy \"\n \"does not exist\")\n handle.remove_mo(mo)\n handle.commit()", "def delete_app_instance(self, instance_id):\n self.stop_app_instance(instance_id)\n aic = self.get_app_instances_configs(instance_id=instance_id)\n # invoking on_uninstall callback , so app can run cleanup routines .\n ai_obj = self.get_app_instance_obj(instance_id)\n try:\n if hasattr(ai_obj,\"on_uninstall\"):\n ai_obj.on_uninstall()\n except Exception as ex:\n log.exception(ex)\n if aic:\n self.app_instances_configs.remove(aic[0])\n self.serialize_instances_config()", "def post_routing_instance_delete(self, resource_id, resource_dict):\n pass", "def pre_namespace_delete(self, resource_id):\n pass", "def delete_conf(src_ip):\n return delete_route(src_ip)", "def delete_config(self, trigger_id: int, message: str, status: bool | str):\n try:\n # always delete config from configs dict, even when status is False\n del self.configs[trigger_id]\n\n # send ack response\n self.message_broker.publish(\n json.dumps(\n {\n 'command': 'Acknowledged',\n 'message': message,\n 'status': 'Success' if status is True else 'Failed',\n 'type': 'DeleteConfig',\n 'triggerId': trigger_id,\n }\n ),\n self.model.tc_svc_client_topic,\n )\n except Exception:\n self.log.exception(\n f'feature=service, event=delete-config-callback-exception, trigger-id={trigger_id}'\n )", "def remove_resource(self, graph_db):\n with mutex:\n neo_resource.delete_node(graph_db, self.index)", "def fusion_api_delete_resource(self, uri, api=None, headers=None):\n if api:\n headers = self.fusion_client._set_req_api_version(api=api)\n elif not headers:\n headers = self.fusion_client._headers\n uri = 'https://%s%s' % (self.fusion_client._host, uri)\n return self.fusion_client.delete(uri, headers)", "def delete_config_set(self, req, id):\n try:\n deleted_config_set = self.db_api.config_set_destroy(\n req.context, id)\n msg = _LI(\"Successfully deleted config_set %(id)s\") % {'id': id}\n LOG.info(msg)\n return dict(config_set=deleted_config_set)\n except exception.ForbiddenPublicImage:\n msg = _LI(\"Delete denied for public config_set %(id)s\") % {\n 'id': id}\n LOG.info(msg)\n raise exc.HTTPForbidden()\n except exception.Forbidden as e:\n # If it's private and doesn't belong to them, don't let on\n # that it exists\n LOG.info(e)\n return exc.HTTPForbidden(e)\n except exception.NotFound:\n msg = _LI(\"config_set %(id)s not found\") % {'id': id}\n LOG.info(msg)\n return exc.HTTPNotFound()\n except Exception:\n LOG.exception(_LE(\"Unable to delete config_set %s\") % id)\n raise", "def process_delete_config_command(self, message: dict):\n status = True\n trigger_id = int(message['triggerId'])\n self.log.info(f'feature=service, event=delete-config, trigger_id={trigger_id}')\n\n # unregister config apiToken\n self.token.unregister_token(str(trigger_id))\n\n msg = 'Delete Config'\n if callable(self.delete_config_callback):\n try:\n # call callback for delete config and handle exceptions to protect thread\n # pylint: disable=not-callable\n status: bool | None = self.delete_config_callback(trigger_id)\n\n # if callback does not return a boolean value assume it worked\n if not isinstance(status, bool):\n status = True\n except Exception as e:\n self.log.error(\n f'feature=service, event=delete-config-callback-exception, error=\"\"\"{e}\"\"\"'\n )\n self.log.trace(traceback.format_exc())\n status = False\n\n # delete config\n self.delete_config(trigger_id, msg, status)", "def delete(self):\n import labstep.entities.resource.repository as resourceRepository\n\n return resourceRepository.editResource(self, deleted_at=getTime())", "def clean():\n Log.d(DEBUG_TAG, \"Delete config file...\")\n try:\n os.remove(CONFIG_FILE)\n except os.error as e:\n Log.e(DEBUG_TAG, \"Delete config file%s error, reason:%s\"%(CONFIG_FILE, e))", "def removeConfigFile(alg):\r\n configPath = alg.getParameterValue('config')\r\n if isWindows():\r\n command = \"DEL {}\".format(os.path.join(rliPath(), configPath))\r\n else:\r\n command = \"rm {}\".format(os.path.join(rliPath(), configPath))\r\n alg.commands.append(command)", "def remove_stored_config(self):\n stored_config_filename = self.stored_config_filename\n if stored_config_filename.exists():\n stored_config_filename.remove()\n self._stored_cmake_generator = self._stored_config.cmake_generator", "def _delete_external_keypair():\n\n if not utils.use_external_resource(ctx.node.properties):\n return False\n\n ctx.logger.info('External resource. Not deleting keypair.')\n\n utils.unassign_runtime_properties_from_resource(RUNTIME_PROPERTIES,\n ctx.instance)\n return True", "def delete(self, request, checksum=None):\n if not checksum:\n raise core_exceptions.InvalidRequestException()\n\n try:\n HaProxyConfigModel.objects.filter(checksum=checksum).delete()\n except HaProxyConfigModel.DoesNotExist:\n raise core_exceptions.DoesNotExistException()\n\n return Response({'deleted': True})", "def delete_image(config):\n\n try:\n os.unlink(config.image_path())\n iotests.log(\"unlink %s\" % config.image_path(),\n filters=[iotests.filter_test_dir])\n except Exception as e:\n pass", "def del_conf(self, path):\n\t\tself.monitor.removePath(path)\n\t\tself.cache.pop(path, None)", "def post_subnet_delete(self, resource_id, resource_dict):\n pass", "def post_virtual_DNS_delete(self, resource_id, resource_dict):\n pass", "def test_del_property():\n\n contents = (\"[Info]\\n\"\n \"sdk = 23\")\n\n testutils.deploy_config_raw(contents)\n\n prop.del_prop('info', 'sdk')\n\n testutils.undeploy()\n\n return 0", "def del_resource(self, service_name, resource_name, base_class=None):\n # Unlike ``get_resource``, this should be fire & forget.\n # We don't really care, as long as it's not in the cache any longer.\n try:\n classpath = self.build_classpath(base_class)\n opts = self.services[service_name]['resources'][resource_name]\n del opts[classpath]\n except KeyError:\n pass", "def delete(self):\n # type: () -> bool\n headers = Headers({\"content-type\": \"application/json\", \"accept\": \"application/json\"})\n return self.connection.api_call(\"DELETE\", [\"v1\", \"resources\", self.id], headers=headers)", "def deleteNodeNetworkConfig(self,node):\n data = self.connect('delete',\"nodes/%s/network\" % (node),None)\n return data", "def post_logical_router_delete(self, resource_id, resource_dict):\n pass", "def deleteCampaignConfig(docName, url=reqmgr_url):\n outcome = True\n headers = {\"Content-type\": \"application/json\", \"Accept\": \"application/json\",\n \"Content-Length\": 0} # this is required for DELETE calls\n conn = make_x509_conn(url)\n url = '/reqmgr2/data/campaignconfig/%s' % docName\n conn.request(\"DELETE\", url, headers=headers)\n resp = conn.getresponse()\n if resp.status >= 400:\n print(\"FAILED to delete campaign: %s. Response status: %s, response reason: %s\"\n % (docName, resp.status, resp.reason))\n outcome = False\n conn.close()\n return outcome", "def do_delete_configured_volume(self, arg):\n args = self.parse_arguments(arg)\n if len(args) == 0:\n self.perror(\"No storage specified.\")\n return\n self.do_coroutine(self._localStorageRoutines.delete_configured_volume_routine(args[0]))", "def post_service_appliance_delete(self, resource_id, resource_dict):\n pass", "def delete_notebook_instance_lifecycle_config(NotebookInstanceLifecycleConfigName=None):\n pass", "def post_virtual_network_delete(self, resource_id, resource_dict):\n pass", "def pre_routing_instance_delete(self, resource_id):\n pass", "def delete(self, name):\n path = self.directory / f\"{name}.yaml\"\n if path.exists():\n path.unlink()", "def delete(self, xact, path):\n self._log.debug(\"Deleting NSR xact:%s, path:%s\", xact, path)\n self.regh.delete_element(path)\n self._log.debug(\"Deleted NSR xact:%s, path:%s\", xact, path)", "def delete_nginx_config(config):\n nginx_config = nginx_sites_enabled + '/' + config\n if os.path.isfile(nginx_config):\n os.remove(nginx_config)\n call([\"/usr/sbin/service\", \"nginx\", \"restart\"])\n message = \"%s site deleted\" % config\n return jsonify(message=message, config_count=config_count())\n else:\n abort(404)", "def delete_website_configuration(self, headers=None):\r\n response = self.connection.make_request('DELETE', self.name,\r\n query_args='website', headers=headers)\r\n body = response.read()\r\n boto.log.debug(body)\r\n if response.status == 204:\r\n return True\r\n else:\r\n raise self.connection.provider.storage_response_error(\r\n response.status, response.reason, body)", "def delete(context, namespace_name, resource_type_name, session):\n\n namespace = namespace_api.get(\n context, namespace_name, session)\n\n resource_type = resource_type_api.get(\n context, resource_type_name, session)\n\n deleted = _delete(context, namespace_name, resource_type_name,\n namespace['id'], resource_type['id'], session)\n\n return _to_model_dict(resource_type_name, deleted)", "async def before_cleanup(self, invoker: PluginInvoker):\n config_file = invoker.files[\"config\"]\n try:\n config_file.unlink()\n except FileNotFoundError:\n pass\n logging.debug(f\"Deleted configuration at {config_file}\")", "def delete(self, configuration):\n configuration = g.user.get_api().get_configuration(configuration)\n configuration.delete()\n return '', 204", "def test_delete_hyperflex_ucsm_config_policy(self):\n pass", "def delete_uptime_check_config(config_name: str) -> None:\n client = monitoring_v3.UptimeCheckServiceClient()\n client.delete_uptime_check_config(request={\"name\": config_name})\n print(\"Deleted \", config_name)", "def post_project_delete(self, resource_id, resource_dict):\n pass", "def post_domain_delete(self, resource_id, resource_dict):\n pass", "def fusion_api_delete_os_deploymentserver(self, name=None, uri=None, param='', api=None, headers=None):\n return self.osds.delete(name=name, uri=uri, param=param, api=api, headers=headers)", "def remove(self):\n with CachedCKAN(**self.ckan_kwargs) as ckan:\n ckan.delete_resource(resource_id=self.resource_id)", "def remove_trap_config(self, context, storage_id, trap_config):\n # Currently not implemented\n pass", "def post_bgp_router_delete(self, resource_id, resource_dict):\n pass", "def delete(self, context, id_):\n try:\n db_resource_data = self.db_api.get_resource(\n context, id_)\n\n if db_resource_data['type'] == (eon_const.\n EON_RESOURCE_TYPE_ESX_CLUSTER):\n msg = _(\"Delete operation not supported for type %s\"\n % db_resource_data['type'])\n raise exception.DeleteException(err=msg)\n\n _resource_data = _make_response(\n db_resource_data)\n _resource_data_log = deepcopy(_resource_data)\n _resource_data_log.pop(\"meta_data\", None)\n LOG.info(\"Details for the ID %s is: %s\" % (\n id_, logging.mask_password(_resource_data_log)))\n driver_obj = driver.load_resource_driver(\n db_resource_data['type'])\n driver_obj.validate_delete(db_resource_data)\n driver_obj.delete(context, id_)\n self.db_api.delete_resource(context, id_)\n # delete the data from hlm input model\n try:\n LOG.info(\"[%s] remove resource from input model\" % id_)\n hux_obj = HLMFacadeWrapper(context)\n resource_id = db_resource_data[eon_const.EON_RESOURCE_ID]\n hux_obj.delete_server(resource_id)\n hux_obj.commit_changes(resource_id, \"Delete compute resource\")\n except facade_excep.NotFound:\n # log and do nothing\n LOG.warn(\"[%s] resource not found in hlm input model\" % id_)\n LOG.info(\"[%s]: Deleted resource from eon\" % id_)\n # Notify the message to consumers\n try:\n message = {\"resource_id\": id_,\n \"resource_state\": eon_const.EON_RESOURCE_STATE_REMOVED,\n \"resource_details\": _resource_data,\n }\n message_notifier.notify(context,\n message_notifier.EVENT_PRIORITY_INFO,\n message_notifier.EVENT_TYPE[\n 'removed'],\n message)\n except Exception as ex:\n LOG.exception(\n \"Exception while notifying the message : %s\" % ex)\n except exception.NotFound as e:\n msg = (\"Failed to delete resource %s. Error: %s\") % (\n _resource_data['name'], e.message)\n LOG.exception(msg)\n raise e", "def test_del_empty_config():\n\n testutils.deploy_config_raw(\"\")\n\n assert prop.del_prop('info', 'sdk') != 0\n\n testutils.undeploy()\n\n return 0", "def post_virtual_router_delete(self, resource_id, resource_dict):\n pass", "def delete(self, xact, path):\n self._log.debug(\"Deleting VLR xact = %s, %s\", xact, path)\n self.regh.delete_element(path)\n self._log.debug(\"Deleted VLR xact = %s, %s\", xact, path)", "def post_service_instance_delete(self, resource_id, resource_dict):\n pass", "def delete(self, cascade=False):\n if cascade == True:\n raise NotImplementedError()\n else:\n result = config.item_remove(self._original_attributes)\n self._event(level=\"write\", message=\"Object was deleted\")\n return result", "def remove_venv_config(name: str) -> Path:\n config_file_path = find_vsh_config(name=name, check=False)\n if config_file_path.exists():\n config_file_path.unlink()\n return config_file_path", "def delete_namespaced_deployment_config(self, body, namespace, name, **kwargs):\n\n all_params = ['body', 'namespace', 'name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method delete_namespaced_deployment_config\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `delete_namespaced_deployment_config`\")\n # verify the required parameter 'namespace' is set\n if ('namespace' not in params) or (params['namespace'] is None):\n raise ValueError(\"Missing the required parameter `namespace` when calling `delete_namespaced_deployment_config`\")\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `delete_namespaced_deployment_config`\")\n\n resource_path = '/oapi/v1/namespaces/{namespace}/deploymentconfigs/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'namespace' in params:\n path_params['namespace'] = params['namespace']\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def Delete(url):\n\n prefix = ''.join([url, config_encoder.NAMESPACE_SEPARATOR])\n\n # Remove Test Suites\n test_keys = _GetEntityKeysByPrefix(ndb_models.Test, prefix)\n ndb.delete_multi(test_keys)\n\n # Remove Device Actions\n device_action_keys = _GetEntityKeysByPrefix(ndb_models.DeviceAction, prefix)\n ndb.delete_multi(device_action_keys)\n\n # Remove Test Run Actions\n test_run_action_keys = _GetEntityKeysByPrefix(\n ndb_models.TestRunAction, prefix)\n ndb.delete_multi(test_run_action_keys)\n\n # Remove Config Set Info\n config_set_info_key = mtt_messages.ConvertToKey(ndb_models.ConfigSetInfo, url)\n config_set_info_key.delete()", "def DeleteResourceSample():\n client = CreateClient()\n doc = gdata.docs.data.Resource(type='document', title='My Sample Doc')\n doc = client.CreateResource(doc)\n # Delete the resource we just created.\n client.DeleteResource(doc)", "def delete_dev_endpoint(self):\n self.glue_engine.delete_dev_endpoint(EndpointName=self.dev_endpoint_name)", "def delete_host(self, conf, tenant_id, network_id, host_id):\n\t\tpass" ]
[ "0.63476586", "0.6339625", "0.6323798", "0.6240317", "0.62242293", "0.619534", "0.61792046", "0.61713314", "0.6144527", "0.6141145", "0.6128976", "0.60849786", "0.6033858", "0.60115296", "0.5929646", "0.57731843", "0.5771558", "0.5689029", "0.5676266", "0.5670987", "0.55607647", "0.5529616", "0.5493606", "0.5489311", "0.54470575", "0.54290944", "0.5406233", "0.5405059", "0.53991896", "0.53746307", "0.5357122", "0.5353871", "0.5351812", "0.53397995", "0.52796996", "0.52784264", "0.52722776", "0.5254659", "0.52491754", "0.5236711", "0.5230166", "0.522718", "0.52209854", "0.5213861", "0.5194472", "0.51871717", "0.5162811", "0.5160698", "0.51463205", "0.5135421", "0.51339144", "0.51179713", "0.51099825", "0.5106873", "0.5102458", "0.51010007", "0.5095783", "0.508773", "0.5087388", "0.50866437", "0.50757205", "0.50730443", "0.5059327", "0.50443447", "0.5033488", "0.50306976", "0.502895", "0.50279504", "0.5025088", "0.5019297", "0.49816668", "0.4978321", "0.49682823", "0.49655724", "0.49468583", "0.4943267", "0.49412787", "0.49400732", "0.49320385", "0.4929792", "0.49220496", "0.49213496", "0.49203584", "0.49148735", "0.49141297", "0.49107355", "0.48966512", "0.48863998", "0.4882925", "0.48821977", "0.4882106", "0.4877444", "0.48757285", "0.4868259", "0.48646188", "0.48633337", "0.48573244", "0.48531538", "0.48525247", "0.48504478" ]
0.56942225
17
Lists variables within given a configuration, matching any provided filters. This only lists variable names, not the values, unless `return_values` is true, in which case only variables that user has IAM permission to GetVariable will be returned.
def ListVariables(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_variables(self, request, context):\n response = ListVariablesResponse()\n for variable in self._delegator.list_variables(request.component):\n response.variables.append(variable)\n return response", "def Run(self, args):\n variable_client = util.VariableClient()\n messages = util.Messages()\n\n config_resource = util.ParseConfigName(util.ConfigName(args))\n\n self._display_values = args.values\n\n request = messages.RuntimeconfigProjectsConfigsVariablesListRequest(\n parent=config_resource.RelativeName(),\n returnValues=self._display_values)\n\n page_size = args.page_size or self.DEFAULT_PAGE_SIZE\n\n results = list_pager.YieldFromList(\n variable_client, request, field='variables',\n batch_size_attribute='pageSize', limit=args.limit,\n batch_size=page_size\n )\n\n for result in results:\n yield util.FormatVariable(result, self._display_values)", "def _list(self, variables):\n variables_dict = self._get_vars(variables)\n\n container_name = variables_dict.pop('container', None)\n\n filters = {\n 'marker': variables_dict.pop('marker', None),\n 'limit': variables_dict.pop('limit', None),\n 'prefix': variables_dict.pop('prefix', None),\n 'end_marker': variables_dict.pop('end_marker', None)\n }\n\n if container_name:\n list_data = self.swift.get_container(container_name, **filters)[1]\n else:\n list_data = self.swift.get_account(**filters)[1]\n\n return self._facts(facts=list_data)", "def get_list_vars(my_vars):\n lists = []\n for var in my_vars:\n try:\n temp = my_vars[var].getValue()\n #print var + '=' + str(temp)\n except ValueError:\n lists.append(var)\n return lists", "def filter_variables(var_list, include_patterns=None, exclude_patterns=None,\n reg_search=True):\n if reg_search:\n reg_exp_func = re.search\n else:\n reg_exp_func = re.match\n\n # First include variables.\n if include_patterns is None:\n included_variables = list(var_list)\n else:\n included_variables = []\n for var in var_list:\n if any(reg_exp_func(ptrn, var.name) for ptrn in include_patterns):\n included_variables.append(var)\n\n # Afterwards, exclude variables.\n if exclude_patterns is None:\n filtered_variables = included_variables\n else:\n filtered_variables = []\n for var in included_variables:\n if not any(reg_exp_func(ptrn, var.name) for ptrn in exclude_patterns):\n filtered_variables.append(var)\n\n return filtered_variables", "def get_variable_values(self, vars):\n raise NotImplementedError()", "def _varfilter(self, vrs, response, predictor) -> List[str]:\n if not response and not predictor:\n return vrs\n if response:\n vrs = _list_union(vrs, self.response_vars)\n if predictor:\n vrs = _list_union(vrs, self.predictor_vars)\n return vrs", "def getVariableInfo(self, variables, name):\r\n\r\n return [var.return_variable_dict() for var in variables if var.name == name][0]", "def retrieve_variables(content):\n variables = []\n in_var_section = False\n for line in content.splitlines():\n #print line\n if in_var_section:\n var_def = re.split(' +', line)\n if len(var_def) > 1:\n #print var_def[0], ':', var_def[1]\n var_name = var_def[0]\n def_value = var_def[1]\n if not def_value.startswith('%'): #not environment variable which would be directly passed to robot\n variables.append([var_name.strip('${').strip('}'), def_value])\n if '*** Variables ***' in line:\n in_var_section = True\n elif in_var_section and '*** ' in line:\n #end of Variables section\n break\n return variables", "def get_vehicle_variable_values_list(self, var='battery'):\n return self.get('vehicles/GetVehicleVariableValuesList/{}'.format(var))", "def variables(self):\r\n return self.get_field('variable')", "def read_variables(var_or_list):\n session = ph.get_session()\n return session.run(var_or_list)", "def cb_get_variables(self) -> Callable:\n\n def get_variables():\n networks = (self.representation_network, self.value_network, self.policy_network,\n self.dynamic_network, self.reward_network)\n return [variables\n for variables_list in map(lambda v: v.weights, networks)\n for variables in variables_list]\n\n return get_variables", "def config_variables(self) -> Sequence['outputs.ConfigVariableResponse']:\n return pulumi.get(self, \"config_variables\")", "def get_variables_list(self):\n variables = self.variables.values()\n # handle reference variables\n for variable in variables:\n name = variable['name']\n if name in self.references:\n variable['data'] = self.references[name]\n return variables", "def variable_names(self):\n\n status, stdout, stderr = self.__xcall__(['--print-variables'])\n\n if status != 0:\n raise RuntimeError(\"error querying --print-variables for package `%s': %s\" % (self.name, stderr))\n\n return stdout.strip().split()", "def get_vehicle_variables_list(self):\n return self.get('vehicles/GetVehicleVariableList')", "def list(self, config_path: str, results_filter: Optional[ObjectType]) -> List[str]:\n ...", "def variables(self, setname='data file', numeric=True, string=True,\n date=True, boolean=True, blacklist=None):\n varlist = []\n except_list = []\n dsvars = self._variables_from_set(setname)\n if not numeric: except_list.extend(['int', 'float'])\n if not string: except_list.append('string')\n if not date: except_list.append('date')\n if not boolean: except_list.append('boolean')\n for dsvar in dsvars:\n if self._get_type(dsvar) in except_list: continue\n if dsvar in blacklist: continue\n varlist.append(dsvar)\n return varlist", "def get_all_variables(self):\n return []", "def list_variables(self):\n return list(self._properties.keys())", "def _iapp_build_variables(self, config):\n variables = []\n for key, value in list(config['variables'].items()):\n var = {'name': key, 'value': value}\n if var['name'] == \"pool__addr\":\n var['value'] = normalize_address_with_route_domain(\n var['value'], self._default_route_domain)[0]\n variables.append(var)\n\n return variables", "def test_simple_extraction_of_values(self):\n\t\tself.assertEqual([\"a\", \"b\"], au.extract_variables(bf.And([bf.Var(\"b\"), bf.Var(\"a\")])), \"Invalid variables extracted, expected [a, b].\")", "def vars(self):\n return self._return_if('_vars')", "def regex_findall_variables(raw_string: Text) -> List[Text]:\n try:\n match_start_position = raw_string.index(\"$\", 0)\n except ValueError:\n return []\n\n vars_list = []\n while match_start_position < len(raw_string):\n\n # Notice: notation priority\n # $$ > $var\n\n # search $$\n dollar_match = dolloar_regex_compile.match(raw_string, match_start_position)\n if dollar_match:\n match_start_position = dollar_match.end()\n continue\n\n # search variable like ${var} or $var\n var_match = variable_regex_compile.match(raw_string, match_start_position)\n if var_match:\n var_name = var_match.group(1) or var_match.group(2)\n vars_list.append(var_name)\n match_start_position = var_match.end()\n continue\n\n curr_position = match_start_position\n try:\n # find next $ location\n match_start_position = raw_string.index(\"$\", curr_position + 1)\n except ValueError:\n # break while loop\n break\n\n return vars_list", "def get_variables(self):\n\t\treturn self.variables", "def get_all_variables(self):\n return self.item.get_all_variables()", "def get_filter_values(self):\n return [f.get() for f in self._filters[:-1]] # Ignore placeholder", "def get_cmake_vars_from_user() -> List[CmakeVariable]:\n cmake_variables: List[CmakeVariable] = []\n while prompt_user_for_yes_no_value(\"Do you have any Cmake vars to add? (y/n): \"):\n try:\n cmake_variables.append(CmakeVariable(\n get_user_input_for_value(\"Cmake Variable Name: \", str),\n get_user_input_for_value(\"Cmake Variable Value: \", str),\n get_user_input_for_value(\"Cmake Variable Type: \", CmakeVarType)\n ))\n except ValueError as e:\n print(f\"Invalid CmakeVarType: {e}. Ignoring variable. Enter variable info again.\")\n\n return cmake_variables", "def get(self, name, **valuefilter):\n if not valuefilter:\n valuefilter = self.valuefilter\n varobj = Variable(name, **valuefilter)\n value = varobj.get(gid=self.gid)\n return value", "def vars(self, scope: str = '') -> VarCollection:\n return self.__wrapped__.vars(scope=scope)", "def get_variable_values(\n schema, # type: GraphQLSchema\n definition_asts, # type: List[VariableDefinition]\n inputs, # type: Any\n):\n # type: (...) -> Dict[str, Any]\n if inputs is None:\n inputs = {}\n\n values = {}\n for def_ast in definition_asts:\n var_name = def_ast.variable.name.value\n var_type = type_from_ast(schema, def_ast.type)\n value = inputs.get(var_name)\n\n if not is_input_type(var_type):\n raise GraphQLError(\n 'Variable \"${var_name}\" expected value of type \"{var_type}\" which cannot be used as an input type.'.format(\n var_name=var_name, var_type=print_ast(def_ast.type)\n ),\n [def_ast],\n )\n elif value is None:\n if def_ast.default_value is not None:\n values[var_name] = value_from_ast(\n def_ast.default_value, var_type\n ) # type: ignore\n if isinstance(var_type, GraphQLNonNull):\n raise GraphQLError(\n 'Variable \"${var_name}\" of required type \"{var_type}\" was not provided.'.format(\n var_name=var_name, var_type=var_type\n ),\n [def_ast],\n )\n else:\n errors = is_valid_value(value, var_type)\n if errors:\n message = u\"\\n\" + u\"\\n\".join(errors)\n raise GraphQLError(\n 'Variable \"${}\" got invalid value {}.{}'.format(\n var_name, json.dumps(value, sort_keys=True), message\n ),\n [def_ast],\n )\n coerced_value = coerce_value(var_type, value)\n if coerced_value is None:\n raise Exception(\"Should have reported error.\")\n\n values[var_name] = coerced_value\n\n return values", "def get_field_config_variables(config, index, search_prefixes):\n # list of field info variables to find from config\n # used as keys for dictionaries\n field_info_items = ['name',\n 'levels',\n 'thresh',\n 'options',\n 'output_names',\n ]\n\n field_configs = {}\n search_suffixes = {}\n\n # initialize field configs dictionary values to None\n # initialize dictionary of valid suffixes to search for with\n # the capitalized version of field info name\n for field_info_item in field_info_items:\n field_configs[field_info_item] = None\n search_suffixes[field_info_item] = [field_info_item.upper()]\n\n # add alternate suffixes for config variable names to attempt\n search_suffixes['name'].append('INPUT_FIELD_NAME')\n search_suffixes['name'].append('FIELD_NAME')\n search_suffixes['levels'].append('INPUT_LEVEL')\n search_suffixes['levels'].append('FIELD_LEVEL')\n search_suffixes['output_names'].append('OUTPUT_FIELD_NAME')\n search_suffixes['output_names'].append('FIELD_NAME')\n\n # look through field config keys and obtain highest priority\n # variable name for each field config\n for search_var, suffixes in search_suffixes.items():\n for prefix in search_prefixes:\n\n found = False\n for suffix in suffixes:\n var_name = f\"{prefix}VAR{index}_{suffix}\"\n # if variable is found in config,\n # get the value and break out of suffix loop\n if config.has_option('config', var_name):\n field_configs[search_var] = config.getraw('config',\n var_name)\n found = True\n break\n\n # if config variable was found, break out of prefix loop\n if found:\n break\n\n return field_configs", "def variables(self):\n return {u for u in self if u.type == 'var'}", "def format_variable(self, variablesReference, filter=None, start=None, count=None, format=None):\n\n # format is ignored, TODO?\n\n vs = None if start is None or start == 0 else start\n es = None if count is None or count == 0 else count\n\n var, name, tt, parent = self.scope_assign[variablesReference]\n\n # print(str(var) + \", \" + str(name) + \", \" + str(tt))\n\n is_slotted = False\n\n if not isinstance(var, dict) and not isinstance(var, list):\n if hasattr(var, \"__dict__\"):\n var = var.__dict__\n else:\n is_slotted = True\n\n # print (str(var))\n\n if not is_slotted and isinstance(var, dict):\n if filter is not None and filter == \"indexed\":\n return []\n keys = sorted(var.keys())\n elif not is_slotted:\n if filter is not None and filter == \"named\":\n return []\n keys = range(len(var))\n elif is_slotted:\n keys = dir(var)\n\n if \"self\" in keys:\n keys.remove(\"self\")\n keys = [\"self\"] + keys\n\n # print (str(keys))\n\n it = 0\n total = 0\n variables = []\n for vkey in keys:\n if vs is None or it >= vs:\n var_ref = self.scope_var_id\n if is_slotted:\n value = getattr(var, vkey)\n else:\n value = var[vkey]\n\n vardesc = {}\n variables.append(vardesc)\n\n vardesc[\"name\"] = vkey\n vardesc[\"value\"] = str(value)\n vardesc[\"type\"] = str(type(value))\n # vardesc[\"presentationHint\"] # TODO!!!\n vardesc[\"evaluateName\"] = vkey\n vardesc[\"variablesReference\"] = var_ref\n\n vv_inner = value\n vv_slotted = False\n if not isinstance(vv_inner, dict) and not isinstance(vv_inner, list):\n if hasattr(vv_inner, \"__dict__\"):\n vv_inner = vv_inner.__dict__\n else:\n vv_slotted = True\n\n if not vv_slotted and isinstance(vv_inner, dict):\n vardesc[\"namedVariables\"] = len(vv_inner.keys())\n elif not vv_slotted:\n vardesc[\"indexedVariables\"] = len(vv_inner)\n else:\n vardesc[\"namedVariables\"] = len(dir(vv_inner))\n\n self.scope_assign[var_ref] = (value, vkey, str(type(value)), var)\n\n self.scope_var_id += 1\n total += 1\n it += 1\n if es is not None and total >= es:\n break\n\n return variables", "def get_all_variables(self):\n return [self.item]", "def get_variables(self):\n return [self.variables[key] for key in sorted(self.variables)]", "def list_of_vars_in_user_file():\n # parser = argparse.ArgumentParser()\n # parser.add_argument(\"path\")\n # path = parser.parse_args().path\n # path = DUMMY_FILE_PATH\n path = parser.parse_args().path\n logger.info(\"Using the file: {}\".format(path))\n\n if not os.path.exists(path):\n msg = \"The file ({}) does not exist.\".format(path)\n raise RuntimeError(msg)\n with cdms2.open(path) as f:\n return f.variables.keys()", "def get_variables(self, param_instance=None):\n\n\t\tif param_instance is not None and not isinstance(param_instance, ParameterMap):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)\n\t\t\n\t\thandler_instance = CommonAPIHandler()\n\t\tapi_path = ''\n\t\tapi_path = api_path + '/crm/v2/settings/variables'\n\t\thandler_instance.set_api_path(api_path)\n\t\thandler_instance.set_http_method(Constants.REQUEST_METHOD_GET)\n\t\thandler_instance.set_category_method(Constants.REQUEST_CATEGORY_READ)\n\t\thandler_instance.set_param(param_instance)\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.response_handler import ResponseHandler\n\t\texcept Exception:\n\t\t\tfrom .response_handler import ResponseHandler\n\t\treturn handler_instance.api_call(ResponseHandler.__module__, 'application/json')", "def variables(self):\n return self._variablesDef", "def env_variables(self) -> list[\"EnvVariable\"]:\n _args: list[Arg] = []\n _ctx = self._select(\"envVariables\", _args)\n _ctx = EnvVariable(_ctx)._select_multiple(\n _name=\"name\",\n _value=\"value\",\n )\n return _ctx.execute_sync(list[EnvVariable])", "def __get_vars_list(self, template_idx, log):\n template = self.templates[template_idx].split()\n log = log.split()\n variables = []\n pt = pl = 0\n while pt < len(template) and pl < len(log):\n if template[pt] == log[pl]:\n pt += 1\n pl += 1\n continue\n elif template[pt] == '*':\n # found a variable\n while pt < len(template) and template[pt] == '*':\n # in case there are many variables together\n pt += 1\n if pt >= len(template):\n # it's the end of the template\n variables.append(' '.join(log[pl:]))\n break\n else:\n variable_tokens = []\n while pl < len(log) and log[pl] != template[pt]:\n variable_tokens.append(log[pl])\n pl += 1\n # it duplicates when many variables together for a correct output\n variables.append(' '.join(variable_tokens))\n else:\n # it is a variable not covered by the template asterisks\n # we move on on the log but stay on the template token\n pl += 1\n return variables", "def get_variables(self):\n return self.variables", "def listall(self, varName=None, all=None):\n\n if varName is None:\n return None\n var = self.getVariable(varName)\n return var.listall(all=all)", "def variables(self):\n return self._.vars", "def getSelectedVariables(self):\r\n\r\n variables = []\r\n\r\n if self.ui.variablesStackedWidget.currentWidget() == self.ui.treePage:\r\n for index in self.ui.treeView.selectionModel().selectedRows():\r\n sourceIndex = self.treeFilterModel.mapToSource(index)\r\n treeItem = sourceIndex.internalPointer()\r\n if treeItem.variable is not None:\r\n variables.append(treeItem.variable)\r\n else:\r\n for index in self.ui.tableView.selectionModel().selectedRows():\r\n sourceIndex = self.tableFilterModel.mapToSource(index)\r\n variable = sourceIndex.internalPointer()\r\n variables.append(variable)\r\n\r\n return variables", "def get_variables_func(arguments, exclude):\n names = [name for name in arguments.keys() if name not in exclude]\n return lambda obj: {name: getattr(obj, name) for\n name in names}", "def get_variables(self):\n local_variables = self._design.GetVariables(\n )+self._design.GetPostProcessingVariables()\n return {lv: self.get_variable_value(lv) for lv in local_variables}", "def __get_vars(self):\n if self.resolved_vars:\n return self.resolved_vars\n return self.config_vars", "def vars(self, scope: str = '') -> VarCollection:\n if scope:\n return VarCollection((scope + k, v) for k, v in self.vc.items())\n return VarCollection(self.vc)", "def getVariables(self):\n return [x for x in self.variables.values() if x is not None]", "def __setVarNames(self):\n result = set()\n\n # detecting variables\n for templatePart in self.inputString().split(\"{\"):\n if templatePart is '' or \"}\" not in templatePart:\n continue\n\n endIndex = templatePart.find('}')\n result.add(templatePart[:endIndex])\n\n self.__varNames = list(result)", "def variable_names(self):\n \n return [x['variable'] for x in self.variable_dicts()]", "def query_and_print_variables(md):\n\n # save x variable as dictionary with keys (s, v, t)\n x_searchers = {}\n # save beta variable as dictionary with keys (v, t)\n b_target = {}\n\n for var in md.getVars():\n my_var_name = var.varName\n my_var_value = var.x\n print('%s %g' % (my_var_name, my_var_value))\n\n if 'x' in my_var_name:\n s = int(my_var_name[2])\n v = int(my_var_name[4])\n t = int(my_var_name[6])\n\n if my_var_value >= 0.5:\n x_searchers[(s, v, t)] = 1\n else:\n x_searchers[(s, v, t)] = 0\n\n elif 'beta' in my_var_name:\n # print('%s %g' % (my_var_name, my_var_value))\n # remember b[0] is probability of capture\n v = int(my_var_name[5])\n t = int(my_var_name[7])\n b_target[v, t] = my_var_value\n\n obj = md.getObjective()\n print(obj.getValue())\n\n return x_searchers, b_target", "def parse_var_list(config, time_info=None, data_type=None, met_tool=None,\n levels_as_list=False):\n\n # validate configs again in case wrapper is not running from run_metplus\n # this does not need to be done if parsing a specific data type,\n # i.e. ENS or FCST\n if data_type is None:\n if not validate_field_info_configs(config)[0]:\n return []\n elif data_type == 'BOTH':\n config.logger.error(\"Cannot request BOTH explicitly in parse_var_list\")\n return []\n\n # var_list is a list containing an list of dictionaries\n var_list = []\n\n # if specific data type is requested, only get that type\n if data_type:\n data_types = [data_type]\n # otherwise get both FCST and OBS\n else:\n data_types = ['FCST', 'OBS']\n\n # get indices of VAR<n> items for data type and/or met tool\n indices = []\n if met_tool:\n indices = find_var_name_indices(config, data_types, met_tool).keys()\n if not indices:\n indices = find_var_name_indices(config, data_types).keys()\n\n # get config name prefixes for each data type to find\n dt_search_prefixes = {}\n for current_type in data_types:\n # get list of variable prefixes to search\n prefixes = get_field_search_prefixes(current_type, met_tool)\n dt_search_prefixes[current_type] = prefixes\n\n # loop over all possible variables and add them to list\n for index in indices:\n field_info_list = []\n for current_type in data_types:\n # get dictionary of existing config variables to use\n search_prefixes = dt_search_prefixes[current_type]\n field_configs = get_field_config_variables(config,\n index,\n search_prefixes)\n\n field_info = format_var_items(field_configs, time_info)\n if not isinstance(field_info, dict):\n config.logger.error(f'Could not process {current_type}_'\n f'VAR{index} variables: {field_info}')\n continue\n\n field_info['data_type'] = current_type.lower()\n field_info_list.append(field_info)\n\n # check that all fields types were found\n if not field_info_list or len(data_types) != len(field_info_list):\n continue\n\n # check if number of levels for each field type matches\n n_levels = len(field_info_list[0]['levels'])\n if len(data_types) > 1:\n if (n_levels != len(field_info_list[1]['levels'])):\n continue\n\n # if requested, put all field levels in a single item\n if levels_as_list:\n var_dict = {}\n for field_info in field_info_list:\n current_type = field_info.get('data_type')\n var_dict[f\"{current_type}_name\"] = field_info.get('name')\n var_dict[f\"{current_type}_level\"] = field_info.get('levels')\n var_dict[f\"{current_type}_thresh\"] = field_info.get('thresh')\n var_dict[f\"{current_type}_extra\"] = field_info.get('extra')\n var_dict[f\"{current_type}_output_name\"] = field_info.get('output_names')\n\n var_dict['index'] = index\n var_list.append(var_dict)\n continue\n\n # loop over levels and add all values to output dictionary\n for level_index in range(n_levels):\n var_dict = {}\n\n # get level values to use for string substitution in name\n # used for python embedding calls that read the level value\n sub_info = {}\n for field_info in field_info_list:\n dt_level = f\"{field_info.get('data_type')}_level\"\n sub_info[dt_level] = field_info.get('levels')[level_index]\n\n for field_info in field_info_list:\n current_type = field_info.get('data_type')\n name = field_info.get('name')\n level = field_info.get('levels')[level_index]\n thresh = field_info.get('thresh')\n extra = field_info.get('extra')\n output_name = field_info.get('output_names')[level_index]\n\n # substitute level in name if filename template is specified\n subbed_name = do_string_sub(name,\n skip_missing_tags=True,\n **sub_info)\n\n var_dict[f\"{current_type}_name\"] = subbed_name\n var_dict[f\"{current_type}_level\"] = level\n var_dict[f\"{current_type}_thresh\"] = thresh\n var_dict[f\"{current_type}_extra\"] = extra\n var_dict[f\"{current_type}_output_name\"] = output_name\n\n var_dict['index'] = index\n var_list.append(var_dict)\n\n # extra debugging information used for developer debugging only\n '''\n for v in var_list:\n config.logger.debug(f\"VAR{v['index']}:\")\n if 'fcst_name' in v.keys():\n config.logger.debug(\" fcst_name:\"+v['fcst_name'])\n config.logger.debug(\" fcst_level:\"+v['fcst_level'])\n if 'fcst_thresh' in v.keys():\n config.logger.debug(\" fcst_thresh:\"+str(v['fcst_thresh']))\n if 'fcst_extra' in v.keys():\n config.logger.debug(\" fcst_extra:\"+v['fcst_extra'])\n if 'fcst_output_name' in v.keys():\n config.logger.debug(\" fcst_output_name:\"+v['fcst_output_name'])\n if 'obs_name' in v.keys():\n config.logger.debug(\" obs_name:\"+v['obs_name'])\n config.logger.debug(\" obs_level:\"+v['obs_level'])\n if 'obs_thresh' in v.keys():\n config.logger.debug(\" obs_thresh:\"+str(v['obs_thresh']))\n if 'obs_extra' in v.keys():\n config.logger.debug(\" obs_extra:\"+v['obs_extra'])\n if 'obs_output_name' in v.keys():\n config.logger.debug(\" obs_output_name:\"+v['obs_output_name'])\n if 'ens_name' in v.keys():\n config.logger.debug(\" ens_name:\"+v['ens_name'])\n config.logger.debug(\" ens_level:\"+v['ens_level'])\n if 'ens_thresh' in v.keys():\n config.logger.debug(\" ens_thresh:\"+str(v['ens_thresh']))\n if 'ens_extra' in v.keys():\n config.logger.debug(\" ens_extra:\"+v['ens_extra'])\n if 'ens_output_name' in v.keys():\n config.logger.debug(\" ens_output_name:\"+v['ens_output_name'])\n '''\n return sorted(var_list, key=lambda x: x['index'])", "def get_variables_by_name(given_name, scope=None):\n suffix = '/' + given_name + ':|^' + given_name + ':'\n return get_variables(scope=scope, suffix=suffix)", "def get_variables_to_restore(include=None, exclude=None):\n if include is None:\n # Include all variables.\n vars_to_include = get_variables()\n else:\n if not isinstance(include, (list, tuple)):\n raise TypeError('include is provided but is not a list or a tuple.')\n vars_to_include = []\n for scope in include:\n vars_to_include += get_variables(scope)\n vars_to_exclude = set()\n if exclude is not None:\n if not isinstance(exclude, (list, tuple)):\n raise TypeError('exclude is provided but is not a list or a tuple.')\n for scope in exclude:\n vars_to_exclude |= set(get_variables(scope))\n # Exclude the variables in vars_to_exclude\n return [v for v in vars_to_include if v not in vars_to_exclude]", "def get_swp_values(self, var: str) -> List[Any]:\n return self._sweep_params[var]", "def GetVariableAttributes(template_src, env=None):\n env = env or jinja2.Environment()\n abstract_syntax_tree = env.parse(template_src)\n node_visitor = _GetattrNodeVisitor()\n node_visitor.visit(abstract_syntax_tree)\n\n output = set()\n undeclared_variables = meta.find_undeclared_variables(abstract_syntax_tree)\n used_variables = set()\n for node in node_visitor.getattr_nodes:\n attr_list = _GetAttributeList(node)\n if attr_list[0] in undeclared_variables:\n used_variables.add(attr_list[0])\n output.add('.'.join(attr_list))\n return output | (undeclared_variables - used_variables)", "def vars(cls):\n for key in dir(cls):\n if key.startswith('var_'):\n yield key[4:]", "def get_filter_fields(result, verbose=False):\n result_info = get_result(result)\n filter_fields = result_info[\"filter_fields\"]\n if verbose:\n pprint(filter_fields)\n return filter_fields", "def variables(self):\n return [i.name for i in self.inputs + self.outputs]", "def read_container_vars(container):\n\n fe_data = read_all_containers()\n\n if container not in fe_data:\n container_names = fe_data.keys()\n container_names.sort()\n raise ValueError(bcolors.FAIL + 'Invalid container : ' + container + bcolors.ENDC + '\\n'\n 'Valid containers : ' + '%s' % ' '.join(map(str, container_names)))\n else:\n container_vars = fe_data[container]\n\n if isinstance(container_vars, list):\n for container_var in container_vars:\n _complete_default_container_vars(container, container_var)\n return container_vars\n else:\n _complete_default_container_vars(container, container_vars)\n return [container_vars]", "def evaluate_filters(\n isovar_result,\n filter_thresholds,\n filter_flags=[]):\n filter_values_dict = evaluate_boolean_filters(isovar_result, filter_flags)\n filter_values_dict.update(\n evaluate_threshold_filters(isovar_result, filter_thresholds))\n return filter_values_dict", "def get_variables(\n table_id = None,\n source = None, \n language = 'en',\n base_url = 'http://data.ssb.no/api/v0',\n full_url = None):\n \n if full_url is None:\n full_url = '{base_url}/{language}/table/{table_id}'.format(\n base_url = base_url, language = language, table_id = table_id)\n \n df = pd.read_json(full_url)\n variables = [dict(values) for values in df.iloc[:,1]]\n \n return variables", "def filters(self, **kwargs):\n return config.filters(self._host, self._session, **kwargs)", "def get_variables(finetune_ckpt_path, exclude_scopes=None):\n if exclude_scopes is not None:\n exclusions = [scope.strip() for scope in exclude_scopes]\n variables_to_restore = [ var for var in slim.get_model_variables() if not np.any([var.op.name.startswith(ex) for ex in exclusions])]\n else:\n variables_to_restore = [ var for var in slim.get_model_variables()]\n return variables_to_restore", "def get_vars(scope=''):\n return [x for x in tf.trainable_variables() if scope in x.name]", "def get_addressbook_variables(self, id):\n logger.info(\"Function call: get_addressbook_variables_list: '{}'\".format(id, ))\n return self.__handle_error(\"Empty addressbook id\") if not id else self.__handle_result(self.__send_request('addressbooks/{}/variables'.format(id)))", "def _var(self, name=None, context=None):\n\t\tif name is None: name = None\n\t\tif context is None: context = self.context\n\t\tif (not name):\n\t\t\treturn context.getVariables().keys()\n\t\telif True:\n\t\t\treturn context.getVariables().get(name)", "def _get_parametered_variables(self, variables, parameters):\n cartesian_product_parameters = testcase.parse_parameters(\n parameters,\n self.testset_file_path\n ) or [{}]\n\n parametered_variables_list = []\n for parameter_mapping in cartesian_product_parameters:\n parameter_mapping = parameter_mapping or {}\n variables = utils.override_variables_binds(\n variables,\n parameter_mapping\n )\n\n parametered_variables_list.append(variables)\n\n return parametered_variables_list", "def vars(self, deep=False, with_name=None, hidden=True):\n\n # Only the variables of the main group:\n if with_name is None:\n if hidden or self.hidden_prefix is None:\n yield from self._vars\n else:\n yield from filter(\n lambda x: not x.startswith(self.hidden_prefix), self._vars)\n elif with_name in self._vars:\n yield with_name\n\n if deep:\n for group in self._groups:\n yield from (\n group + \"/\" + sub_var\n for sub_var in self[group].vars(\n deep, with_name, hidden)\n )", "def GetListVariable(self, name):\n var = self._makefile.variables.get(name, expand=True)[2]\n if not var:\n return []\n return var.resolvesplit(self._makefile, self._makefile.variables)", "def get_v1_filters(args: Dict[str, Any]) -> List[str]:\n filters = []\n args_name_to_filter_name = {\n 'alert-status': 'alert.status',\n 'policy-name': 'policy.name',\n 'policy-label': 'policy.label',\n 'policy-compliance-standard': 'policy.complianceStandard',\n 'cloud-account': 'cloud.account',\n 'cloud-account-id': 'cloud.accountId',\n 'cloud-region': 'cloud.region',\n 'alert-rule-name': 'alertRule.name',\n 'resource-id': 'resource.id',\n 'resource-name': 'resource.name',\n 'resource-type': 'resource.type',\n 'alert-id': 'alert.id',\n 'cloud-type': 'cloud.type',\n 'policy-type': 'policy.type',\n 'policy-severity': 'policy.severity',\n }\n for arg_name, filter_name in args_name_to_filter_name.items():\n if arg_value := args.get(arg_name):\n filters.append(f'{filter_name}={arg_value}')\n\n return filters", "def test_variables_get(self):\n pass", "def variables(self):\n return [term.variable for term in self.terms]", "def variables(self):\n return self._variables", "def query_variables(md):\n\n # save as dictionaries with searchers as keys\n x_searchers = {}\n b_target = {}\n\n t_max = 0\n\n for var in md.getVars():\n my_var_name = var.varName\n my_var_value = var.x\n # print('%s %g' % (my_var_name, my_var_value))\n\n if 'x' in my_var_name:\n s = int(my_var_name[2:my_var_name.find(\",\")])\n v = int(my_var_name[my_var_name.find(\",\") + 1:my_var_name.rfind(\",\")])\n t = int(my_var_name[my_var_name.rfind(\",\") + 1:-1])\n\n # print('%s = %f ' % (my_var_name, my_var_value))\n x_searchers[(s, v, t)] = my_var_value\n\n if t > t_max:\n t_max = t\n\n elif 'beta' in my_var_name and '_s' not in my_var_name:\n # print('%s %g' % (my_var_name, my_var_value))\n # remember: b[0] is probability of capture\n v = int(my_var_name[5:my_var_name.find(\",\")])\n t = int(my_var_name[my_var_name.find(\",\") + 1:my_var_name.rfind(\"]\")])\n b_target[(v, t)] = my_var_value\n\n # make sure x is binary\n x_searchers = enforce_binary(x_searchers, t_max)\n b_target = enforce_sum_1(b_target, t_max)\n\n # x_searchers[(s, v, t)] and b_target[(v, t)]\n return x_searchers, b_target", "def get_all_variables(self):\n out = []\n for i in self.items:\n out += i.get_all_variables()\n return out", "def get_all_variables(self):\n out = []\n for i in self.items:\n out += i.get_all_variables()\n return out", "def get_all_variables(self):\n out = []\n for i in self.items:\n out += i.get_all_variables()\n return out", "def variable_selection(self):\n X = []\n\n if self.cfg.variables == 'X1':\n X.append({\"name\": \"X1\", \"variables\": ['DepDelay', 'TaxiOut']})\n elif self.cfg.variables == 'all':\n X.append({\"name\": \"X1\", \"variables\": ['DepDelay', 'TaxiOut']})\n X.append({\"name\": \"X2\", \"variables\": ['DepDelay', 'TaxiOut', 'HotDepTime']})\n X.append({\"name\": \"X3\", \"variables\": ['DepDelay', 'TaxiOut', 'HotDayOfWeek', 'Speed']})\n X.append({\"name\": \"X4\", \"variables\": ['DepDelay', 'TaxiOut', 'HotDayOfWeek', 'Speed', 'HotMonth']})\n X.append({\"name\": \"X5\", \"variables\": ['DepDelay', 'TaxiOut', 'Speed', 'HotDepTime', 'HotCRSCatArrTime']})\n elif self.cfg.variables == 'best':\n X.append({\"name\": \"X5\", \"variables\": ['DepDelay', 'TaxiOut', 'Speed', 'HotDepTime', 'HotCRSCatArrTime']})\n return X", "def variables_used (self) :\r\n\t\treturn [i[0] for i in self.parameters]", "def variables(value: SupportsVariables) -> Iterator[Variable]:\n try:\n return value._variables()\n except AttributeError:\n raise TypeError(f'object of type {type(value).__name__} has no variables()')", "def variables(self) -> Optional[Sequence['outputs.VariablePatch']]:\n return pulumi.get(self, \"variables\")", "def getVars(self):\n return self.__vars", "def get_all_variables(self):\n raise NotImplementedError()", "def variables(s):\n result = set([])\n def walk(s):\n if is_variable(s):\n result.add(s)\n else:\n for arg in s.args:\n walk(arg)\n walk(s)\n return result", "def getRRDVariables(self, upToPoint=None):\n cmds = self.getFakeGraphCmds(upToPoint=upToPoint)\n names = [line[line.find(':')+1:line.find('=')]\n for line in cmds.split('\\n')\n if line[:line.find(':')] in ('DEF', 'CDEF', 'VDEF')]\n nameSet = set(names)\n result = []\n for name in names:\n #only allow -raw variables if a corresponding -rpn variable is present\n if name.endswith('-raw') and name.replace('-raw', '-rpn') in nameSet or not name.endswith('-raw'):\n result.append(name)\n return result", "def get_load_vars(self):\n all_vars = tf.compat.v1.global_variables()\n if self.params.cp_load_var is None:\n load_v = [v for v in all_vars if v not in self.full_model_load_ignore]\n else:\n load_v = []\n error_string = \"\\n\"\n for weight in self.params.cp_load_var:\n found=False\n for var in all_vars:\n error_string += \"\\t\" + var.name + \"\\n\"\n if var.name == weight:\n load_v.append(var)\n found=True\n break\n if not found:\n assert False, (\n \"Weight specified in cp_load_var \"+str(weight)+\" not found. All variables:\"+error_string)\n return load_v", "def getDefinedVars(self):\n numTabs = self.tabWidget.count()\n varList = []\n \n for i in range(numTabs):\n var = self.tabWidget.widget(i).getVar()\n name = self.tabWidget.tabText(i)\n varList.append([name, var])\n\n return varList", "def extract_variables(expected_variables, _request):\n extracted_variables = {}\n for variable in expected_variables:\n form_var = _request.form.get(variable)\n args_var = _request.args.get(variable)\n if form_var and args_var:\n extracted_variables[variable] = [form_var, args_var]\n else:\n extracted_variables[variable] = form_var if form_var else args_var\n return extracted_variables", "def get_variables(self) -> Variables:\n # Assemble API calls for concurrent execution\n calls = []\n for (year, table_name), group in self.get_variables_by_year_and_table_name().items():\n for variable in group:\n call = self._census_api.fetch_variable(self.estimate, year, table_name, variable)\n calls.append(call)\n\n # Make concurrent API calls\n gathered_calls = self._census_api.gather_calls(calls)\n try:\n results = asyncio.run(gathered_calls)\n except RuntimeError as error:\n # Handle Jupyter issue with multiple running event loops by importing nest_asyncio\n if error.args[0] == 'asyncio.run() cannot be called from a running event loop':\n import nest_asyncio\n\n nest_asyncio.apply()\n results = asyncio.run(gathered_calls) # type: ignore\n else:\n raise error\n\n # Compile invalid variables\n variables = {}\n for variable_json in results:\n year = variable_json['year']\n if not variable_json.get('label', False):\n invalid_variable = variable_json['name']\n self._invalid_variables[year].append(invalid_variable)\n message = f'Warning: {invalid_variable} is not a recognized variable for {year}'\n logger.warning(message)\n else:\n variables[year, variable_json['name']] = variable_json\n return variables", "def get_variables(self):\n return {VariableString(s): self.get_variable_value(s) for s in self._project.GetVariables()}", "def _get_var_vals(item, context, global_only=False):\n\n import procedures\n import statements\n\n # Get all the variables.\n\n # Vars on RHS.\n var_visitor = var_in_expr_visitor(context)\n item.accept(var_visitor, no_embedded_loops=False)\n var_names = var_visitor.variables\n\n # Vars on LHS.\n lhs_visitor = lhs_var_visitor()\n item.accept(lhs_visitor, no_embedded_loops=False)\n lhs_var_names = lhs_visitor.variables\n \n # Handle member access expressions.\n var_names = var_names.union(lhs_var_names)\n tmp = set()\n for var in var_names:\n tmp.add(var)\n if (\".\" in var):\n tmp.add(var[:var.index(\".\")])\n var_names = tmp\n\n # Handle With variables if needed.\n if (context.with_prefix_raw is not None):\n var_names.add(safe_str_convert(context.with_prefix_raw))\n \n # Get a value for each variable.\n r = {}\n zero_arg_funcs = set()\n for var in var_names:\n\n # Don't try to convert member access expressions that involve\n # method calls to Python variables. These should be handled\n # later as actual calls.\n if (\"(\" in var):\n continue\n\n # Do we already know the variable value? \n val = None\n orig_val = None\n try:\n\n # Try to get the current value.\n val = context.get(var, global_only=global_only)\n orig_val = val\n \n # We have been kind of fuzzing the distinction between global and\n # local variables, so tighten down on globals only by just picking\n # up global variables that appear on the RHS but not LHS.\n if (global_only and (var in lhs_var_names)):\n continue\n \n # Do not set function arguments to new values.\n # Do not set loop index variables to new values.\n if ((val == \"__FUNC_ARG__\") or\n (val == \"__ALREADY_SET__\") or\n (val == \"__LOOP_VAR__\")):\n continue\n \n # Function definitions are not valid values.\n if isinstance(val, (VbaLibraryFunc, procedures.Function, procedures.Sub, statements.External_Function)):\n\n # Don't use the function definition as the value.\n val = None\n \n # 0 arg func calls should only appear on the RHS\n if (var not in lhs_var_names):\n zero_arg_funcs.add(var)\n\n # Don't treat these function calls as variables and\n # assign initial values to them.\n context.set(\"__ORIG__\" + var, orig_val, force_local=True)\n context.set(\"__ORIG__\" + var, orig_val, force_global=True)\n continue\n\n # 'inf' is not a valid value.\n val_str = None\n try:\n val_str = safe_str_convert(val).strip()\n except UnicodeEncodeError:\n val_str = filter(isprint, val).strip()\n if ((val_str == \"inf\") or\n (val_str == \"-inf\")):\n val = None\n\n # 'NULL' is not a valid value.\n if (val_str == \"NULL\"):\n val = None\n\n # Weird bug.\n if (\"core.vba_library.run_function\" in val_str):\n val = 0\n \n # Unedfined variable.\n except KeyError:\n if global_only:\n continue\n\n # Got a valid value for the variable?\n if (val is None):\n\n # Variable is not defined. Try to infer the type based on how it is used.\n #print \"TOP LOOK TYPE: \" + safe_str_convert(var)\n var_type, certain_of_type = _infer_type(var, item, context)\n #print (var_type, certain_of_type)\n if (var_type == \"INTEGER\"):\n val = \"NULL\"\n if certain_of_type:\n #print \"SET TYPE INT\"\n #print var\n val = 0\n context.set_type(var, \"Integer\")\n elif (var_type == \"STRING\"):\n val = \"\"\n if certain_of_type:\n context.set_type(var, \"String\")\n else:\n log.warning(\"Type '\" + safe_str_convert(var_type) + \"' of var '\" + safe_str_convert(var) + \"' not handled.\" + \\\n \" Defaulting initial value to \\\"NULL\\\".\")\n val = \"NULL\"\n\n # Rename some vars that overlap with python builtins.\n var = utils.fix_python_overlap(var)\n \n # Save the variable value.\n r[var] = val\n\n # Save the regex pattern if this is a regex object.\n if (safe_str_convert(val) == \"RegExp\"):\n if (context.contains(\"RegExp.pattern\")):\n pval = to_python(context.get(\"RegExp.pattern\"), context)\n if (pval.startswith('\"')):\n pval = pval[1:]\n if (pval.endswith('\"')):\n pval = pval[:-1]\n r[var + \".Pattern\"] = pval\n if (context.contains(\"RegExp.global\")):\n gval = to_python(context.get(\"RegExp.global\"), context)\n gval = gval.replace('\"', \"\")\n if (gval == \"True\"):\n gval = True\n if (gval == \"False\"):\n gval = False\n r[var + \".Global\"] = gval\n \n # Mark this variable as being set in the Python code to avoid\n # embedded loop Python code generation stomping on the value.\n context.set(var, \"__ALREADY_SET__\", force_local=True)\n context.set(var, \"__ALREADY_SET__\", force_global=True)\n \n # Save the original value so we know it's data type for later use in JIT\n # code generation.\n if (orig_val is None):\n orig_val = val\n context.set(\"__ORIG__\" + var, orig_val, force_local=True)\n context.set(\"__ORIG__\" + var, orig_val, force_global=True)\n \n # Done.\n return (r, zero_arg_funcs)", "def parameters(self):\n return [i for i in self.variables if has_roles(i, Parameter)]", "def get_vars(self):\n return [self.mu, self.var]", "def variables(self) -> pulumi.Input[Sequence[pulumi.Input['AssetModelExpressionVariableArgs']]]:\n return pulumi.get(self, \"variables\")", "def variables(self) -> pulumi.Input[Sequence[pulumi.Input['AssetModelExpressionVariableArgs']]]:\n return pulumi.get(self, \"variables\")", "def __get_data(self, filters):\n if not os.path.exists(CACHE_FILE):\n raise DataNotScrappedError()\n df = pd.read_csv(CACHE_FILE)\n if not filters:\n return list(df.T.to_dict().values())\n\n filtered_df = df[df['name'] == filters][['category', 'name']]\n\n return list(filtered_df.T.to_dict().values())" ]
[ "0.6380369", "0.5820642", "0.57636815", "0.56829864", "0.5545043", "0.552599", "0.5512235", "0.54310113", "0.5424095", "0.5416049", "0.5415185", "0.5367262", "0.53486437", "0.53446084", "0.5319058", "0.52849674", "0.52658916", "0.5254456", "0.5228465", "0.520828", "0.5206558", "0.5184065", "0.5169507", "0.5148047", "0.5146151", "0.51442504", "0.51369315", "0.51298916", "0.5127767", "0.51276964", "0.51250833", "0.51199317", "0.51070476", "0.51003325", "0.5085403", "0.50707394", "0.50683737", "0.50631016", "0.50618863", "0.50498116", "0.5037957", "0.5028189", "0.5027044", "0.4999761", "0.4997579", "0.49837586", "0.49710265", "0.49615473", "0.49591097", "0.493802", "0.4918586", "0.49073207", "0.49071214", "0.49041653", "0.49035725", "0.4894516", "0.48883253", "0.48748684", "0.48726866", "0.48706827", "0.48635995", "0.48554853", "0.4853137", "0.48380467", "0.48359796", "0.48302212", "0.4826667", "0.48186645", "0.48029184", "0.48020124", "0.48009568", "0.48002374", "0.47855037", "0.47809327", "0.47809055", "0.477587", "0.47687513", "0.4767383", "0.47498098", "0.47498098", "0.47498098", "0.47362968", "0.47307122", "0.4725411", "0.47213498", "0.4718528", "0.47171438", "0.47150084", "0.47040737", "0.46978387", "0.46849012", "0.46824422", "0.46788052", "0.4661464", "0.46571225", "0.46513212", "0.4645075", "0.46449485", "0.46449485", "0.464048" ]
0.5790898
2
Gets information about a single variable.
def GetVariable(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getVar(self, id):\n if id in self.variables:\n return self.variables[id]", "def getVariable(self, varName):\n return self[varName]", "def get_data(self, variable):\n return self.data.get(variable)", "async def getVariable(ctx, var):\n if ctx.message.channel.name != \"staff\":\n await ctx.send(\"You can only fetch variables from the staff channel.\")\n else:\n await ctx.send(\"Attempting to find variable.\")\n try:\n variable = globals()[var]\n await ctx.send(f\"Variable value: `{variable}`\")\n except:\n await ctx.send(f\"Can't find that variable!\")", "def get_variable(self, svc, var):\n action = \"variableget\"\n path = \"data_request?id=%s&DeviceNum=%d&serviceId=%s&Variable=%s\" \\\n % (action, self.id, svc, var)\n return self.vera.get(path)", "def getVariableInfo(self, variables, name):\r\n\r\n return [var.return_variable_dict() for var in variables if var.name == name][0]", "def get_variable(self, identifier):\n\t\tif not hasattr(self, \"__sdvcache\"):\n\t\t\tself.__sdvcache = {}\n\t\t\tbuffer = str(self.obj.description_variables.variables)\n\t\t\tbuffer = StringIO(buffer)\n\t\t\ttoken = buffer.read(1)\n\t\t\twhile token:\n\t\t\t\tif token == \"$\":\n\t\t\t\t\tvariable = self.__read_until(buffer, \"=\")\n\t\t\t\t\tvalue = self.__read_until(buffer, \"\\n\").strip()\n\t\t\t\t\tself.__sdvcache[variable] = value\n\t\t\t\ttoken = buffer.read(1)\n\t\t\n\t\tif identifier not in self.__sdvcache:\n\t\t\traise VariableNotFound(identifier)\n\t\t\n\t\treturn self.__sdvcache[identifier]", "def get(self, var):\n return getattr(self, 'var_%s' % (var,))", "def variable(self, name):\n\n status, stdout, stderr = self.__xcall__(['--variable=%s' % name])\n\n if status != 0:\n raise RuntimeError(\"error querying --variable=%s for package `%s': %s\" % (name, self.name, stderr))\n\n return stdout.strip()", "def Var(key):\n return vars[key]", "def get_metadata(self, variable):\n return self.dataset[variable]", "def get(self, var):\n s = self.eval('{0}'.format(var))\n return self.strip_answer(s)", "def get_variable(self, request, context):\n response = GetVariableResponse()\n value = self._delegator.get_variable(request.component, request.variable)\n response.value = encode(value)\n return response", "def get_variable(self, name):\n return self._properties[name]", "def get_variable_info(self, variable_info):\n \n try:\n # Take the data type associated to the variable\n t = self.fmu.get_variable_data_type(variable_info.name)\n \n # According to the data type read, select one of these methods to get the information\n if t == pyfmi.fmi.FMI_REAL:\n value = self.fmu.get_real( variable_info.value_reference )\n strType = \"Real\"\n elif t == pyfmi.fmi.FMI_INTEGER:\n value = self.fmu.get_integer( variable_info.value_reference )\n strType = \"Integer\"\n elif t == pyfmi.fmi.FMI_BOOLEAN:\n value = self.fmu.get_boolean( variable_info.value_reference )\n strType = \"Boolean\"\n elif t == pyfmi.fmi.FMI_ENUMERATION:\n value = self.fmu.get_int( variable_info.value_reference )\n strType = \"Enum\"\n elif t == pyfmi.fmi.FMI_STRING:\n value = self.fmu.get_string( variable_info.value_reference )\n strType = \"String\"\n else:\n logger.error(\"FMU-EXCEPTION, The type {0} is not known\".format(t))\n value = [\"\"]\n strType = \"Unknown\"\n \n # TODO: check the min and max value if the variables are not real or integers\n Min = self.fmu.get_variable_min(variable_info.name)\n Max = self.fmu.get_variable_max(variable_info.name)\n \n try:\n start = str(self.fmu.get_variable_start(variable_info.name))\n fixed = self.fmu.get_variable_fixed(variable_info.name)\n start = start+\" (fixed =\"+str(fixed)+\")\"\n except pyfmi.fmi.FMUException:\n start = \"\"\n \n strVal = str(value[0])\n strMin = str(Min)\n strMax = str(Max)\n if min < -1.0e+20:\n strMin = \"-Inf\"\n if max > 1.0e+20:\n strMax = \"+Inf\"\n \n return strType, strVal, start, strMin, strMax\n \n except pyfmi.fmi.FMUException:\n # if the real value is not present for this parameter/variable\n logger.error(\"FMU-EXCEPTION, No real value to read for variable {0}\".format(variable_info.name))\n return \"\", \"\", \"\", \"\", \"\"", "def get_variable(self, variable_name):\n with self._graph.as_default():\n return self._sess.run(self._get_tensor_by_name(variable_name))", "def __getitem__(self, key):\n return self.variables[key]", "def var(self, name):\n return self.get_ground_vector('!Var:{}'.format(name))", "def var(self, name):\n return self.get_ground_vector('!Var:{}'.format(name))", "def var(self, name):\n return self.get_ground_vector('!Var:{}'.format(name))", "def visit_Variable(self, node):\n var_name = node.value\n val = self.VARIABLES.get(var_name)\n if val is None:\n raise NameError(repr(var_name))\n else:\n return val", "def __getitem__(self, key: str) -> Any:\n return self.variables[key]", "def variable(self) -> Variable:\n ...", "def variables(self):\r\n return self.get_field('variable')", "def parse_variable(var):\n var_info = {} \n var_info[\"name\"] = var[\"name\"]\n\n # get the variable type\n raw_base_type = var[\"typeDescriptions\"][\"typeIdentifier\"].split(\"$\")[0]\n base_type = infer_type(raw_base_type)\n\n if (base_type != None):\n var_info[\"type\"] = base_type\n else:\n return None\n\n composite_types = parse_composite_types(var, base_type)\n if (composite_types != None):\n for k, v in composite_types.items():\n var_info[k] = v\n else: \n return None\n\n return var_info", "def get_var(my_vars: dict, name: str):\n desired_var = my_vars.get(name)\n if desired_var is not None:\n return desired_var\n else:\n var_names = 'x, y, alpha, beta, zeta, psi'\n print('No variable with this name, current model accepts only:' + var_names)\n return None", "def print_var1(self):\n print(self.get_var1())", "def __getitem__(self, varName):\n # Static variables\n if varName in self.statVars:\n staticFV = StaticFileVariable(self, varName)\n return staticFV\n\n # Time variables\n elif varName in self.timeVars:\n timeVariables = TimeFileVariable(self, varName)\n return timeVariables", "async def read_variable_by_name(self, name: str) -> Optional[Variable]:\n try:\n response = await self._client.get(f\"/variables/name/{name}\")\n return pydantic.parse_obj_as(Variable, response.json())\n except httpx.HTTPStatusError as e:\n if e.response.status_code == status.HTTP_404_NOT_FOUND:\n return None\n else:\n raise", "def getVariable(self):\n return _libsbml.Rule_getVariable(self)", "def get_info(self, name):\n return self.info[name]", "def getValue(variable):\n if(dic.__contains__(variable)):\n return dic[variable]\n else:\n print(\"Variable : \"+str(variable) + \" ERROR KEY NOT IN DIC\")", "def get_variable(self, variable_name):\n assert self.variable_name_to_index is not None\n return self.variable_name_to_index[variable_name]", "def GetString(self, variable):\n if self.Sets(variable):\n return self.variables[variable.upper().strip()]", "def get_variable(self, name, visual=None):\n # get the variables list\n if visual is None:\n variables = self.variables.values()\n else:\n variables = self.get_visual(visual)['variables']\n variables = [v for v in variables if v.get('name', '') == name]\n if not variables:\n return None\n return variables[0]", "def Get(self, section, var):\n return self.cp.get(section, var)", "def get_variable_name(self, line):\n self.E_str = \"get_variable_name\"\n corr_syn = \"<variable> = <value> OR <variable[<metadata>] = <value>\"\n words = line.split('=')\n md_var_names, md_names, _ = self.parse_metadata_line(words[0],\n get_set=\"set\")\n\n # Some error checking\n if len(md_var_names) > 1:\n err_msg = \"Syntax Error: Can only declare 1 variable per line\\n\\n\"\n self.print_error(err_msg+corr_syn)\n if md_var_names[0] not in self.variables:\n v_name = md_var_names[0]\n self.print_error(f\"Undeclared variable {v_name}\")\n\n # If we are setting some metadata\n metadata_name = False\n name = words[0].strip()\n if len(md_var_names) == 1:\n name = md_var_names[0].strip()\n metadata_name = md_names[0]\n\n return name, metadata_name", "def variable(self):\n return _coconut_tail_call(Var, self.name)", "def get(self, var, verbose=False, timeout=-1):\r\n if isinstance(var, str):\r\n var = [var]\r\n # make sure the variable(s) exist\r\n for variable in var:\r\n if self._eval(\"exist {0}\".format(variable),\r\n verbose=False) == 'ans = 0' and not variable == '_':\r\n raise Oct2PyError('{0} does not exist'.format(variable))\r\n argout_list, save_line = self._reader.setup(len(var), var)\r\n self._eval(save_line, verbose=verbose, timeout=timeout)\r\n return self._reader.extract_file(argout_list)", "def silicate(self):\n index = self.var_index(6)\n return self.var_data(index)", "def get_variable_value(self, name):\n return self._design.GetVariableValue(name)", "def get_airflow_variable(key: str) -> str:\n return models.Variable.get(key)", "def get_variable(self, name):\n if self._scalamagic:\n intp = self.scala_interpreter\n intp.interpret(name)\n return intp.last_result()", "def get_variable_name(uuid: UUID, variable_index: int) -> Optional[str]:\n scenario: Optional[AoE2DEScenario] = store.get_scenario(uuid)\n if scenario:\n if gv := get_game_version(uuid) == \"DE\":\n variable = scenario.trigger_manager.get_variable(variable_index)\n if variable:\n return variable.name\n elif 0 <= variable_index <= 255:\n return f\"Variable {variable_index}\"\n else:\n raise ValueError(f\"Scenarios with the game version: {gv} do not support variables.\")\n return None", "def _var(self, name=None, context=None):\n\t\tif name is None: name = None\n\t\tif context is None: context = self.context\n\t\tif (not name):\n\t\t\treturn context.getVariables().keys()\n\t\telif True:\n\t\t\treturn context.getVariables().get(name)", "def var(self):\n\n return time_stat(self, stat=\"var\")", "def __get_variable_from_dictionary(dictionary, variable_name):\n if variable_name not in dictionary.keys():\n dictionary[variable_name] = Variable(variable_name, None)\n return dictionary.get(variable_name)", "def retrieve_name(self, var):\r\n\t\tfor fi in reversed(inspect.stack()):\r\n\t\t\tnames = [var_name for var_name, var_val in fi.frame.f_locals.items() if var_val is var]\r\n\t\t\tif len(names) > 0:\r\n\t\t\t\treturn names[0]\r\n\t\treturn \"<unknown>\"", "def getvar(obj):\n class VarDict(dict):\n \"\"\"wrapper of var dict\"\"\"\n def __getitem__(self, key):\n # expression may be set a var in this dict\n if key in self:\n return super(VarDict, self).__getitem__(key)\n if hastag(obj, key):\n return gettag(obj, key)\n # maybe some build-in object\n try:\n return eval(key, {}, {})\n except:\n return False\n\n return VarDict()", "def _get_vartype(scalar):\n return VARINFO_DICT[type(scalar)]", "def print_variable(variable_name):\n # check variable type, call function 6 to check type\n if is_digit(variable_name):\n print(f\"{variable_name}\")\n return\n # check variable type, call function 5 to check type\n if not in_alphabet(f\"{variable_name}\"):\n print(f\"Syntax Error.\")\n return\n # check variable names in dictionary\n if variable_name not in lookUpTable:\n print(f\"{variable_name} is undefined.\")\n return\n # print variable names and value\n print(f\"{variable_name} equals {str(lookUpTable[variable_name])}\")", "def variable(self):", "def get_info(hass: HomeAssistant) -> dict[str, Any] | None:\n return hass.data.get(DATA_INFO)", "def info(self):\n\t\timport inspect\n\t\n\t\tmessage = \"All variables available for star ID %i\" % self.ID\t\t\n\t\tprint message\n\t\tprint '-'*len(message)\n\t\tattributes = inspect.getmembers(self, lambda a:not(inspect.isroutine(a)))\n\t\tfor a in attributes:\n\t\t\tif (a[0].startswith('__') and a[0].endswith('__')): continue\n\t\t\tprint a[0], \"=\", a[1]", "def get_var(self, tag):\n if not tag in self.env:\n print(\"ERROR: value {} is not defined yet\".format(tag))\n elif callable(self.env[tag]):\n print(\"ERROR: tried to access callable {} was a value\".format(tag))\n else:\n return self.env[tag]", "def get_mapping_variable(\n variable_name: Text, variables_mapping: VariablesMapping\n) -> Any:\n # TODO: get variable from debugtalk module and environ\n try:\n return variables_mapping[variable_name]\n except KeyError:\n raise exceptions.VariableNotFound(\n f\"{variable_name} not found in {variables_mapping}\"\n )", "def var(self):\n if self._properties[\"var\"] is None:\n self._derive_variance_()\n return self._properties[\"var\"]", "def _read_var(self, time_idx, pressure_idx, var_idx, lat_idx, lng_idx):\n offset = self.item_size * (\n time_idx * self.t_idx + pressure_idx * self.p_idx +\n var_idx * self.v_idx + lat_idx * self.l_idx + lng_idx)\n self.mm.seek(offset)\n return self.unpacker.unpack(self.mm.read(self.item_size))[0]", "def getVariable(self):\n return _libsbml.EventAssignment_getVariable(self)", "def variable_dict(self,variable):\n return [x for x in self.variable_dicts() if x['variable']==variable][0]", "def var(self, name):\n raise NotImplementedError", "def test_variables_id_get(self):\n pass", "def p(self):\n index = self.var_index(25)\n return self.var_data(index)", "def get_variable_object(self, name = None):\n if name is not None and name != \"\":\n if self.fmu is not None:\n try:\n return self.fmu.get_model_variables()[name]\n except Exception:\n logger.error(\"The variable or parameter: {0} is not available in the list: {1}\".format(name, self.fmu.get_model_variables().keys()))\n return None\n else:\n logger.error(\"The FMU model has not yet been set. Impossible return the variable {0}\".format(name))\n return None\n else:\n logger.error(\"Impossible to look for the name because it is None or empty\")\n return None", "def var(self) -> float:\n return self._data.var()", "def get_variable(x):\n return x.cuda() #if use_cuda else x", "def getvar(self, varname):\n try:\n val = self.get(MAIN_SECTION, varname)\n except ConfigParser.NoOptionError:\n val = VARS_OPT[varname]\n \n return val", "def lookup_variable(self, name: loxtoken.Token, expr: loxExprAST.Expr) -> object:\n distance: int = self.locals.get(expr)\n if distance is not None:\n return self.environment.get_at(distance, name.lexeme)\n else:\n return self.globals.get(name)", "def __getattr__(self, name):\n if 'all_variables' in self.__dict__ and name.upper() in self.__dict__['all_variables']:\n return self.get_variable(name)\n else:\n raise AttributeError", "def get(self, var_name):\n if var_name in self._var_names:\n iv = self._var_names.index(var_name)\n return self._vals[iv]\n elif var_name in self._params:\n return self._params[var_name]\n else:\n raise KeyError(\"Nothing found for %s in vars (%s) or params (%s)\" % (str(var_name),\n ', '.join(self._var_names),\n ', '.join(self._params.keys())))", "def get_variables(self):\n\t\treturn self.variables", "def _dlu_from_variable(variable):\n return variable.name.split('_')[0]", "def Lookup(self, var_name):\n if var_name in self._local_scope.get(\"vars\", {}):\n return self._local_scope[\"vars\"][var_name]\n raise Error(\"Var is not defined: %s\" % var_name)", "def key (self):\r\n return self.variable", "def __getitem__(self, variable: Variable) -> TermLike:\n return self.mapping[variable]", "def info(self):\n return self.__dict__[self.sid]", "def get_variable(x, volatile=False):\n tensor = torch.cuda.LongTensor(x) if CUDA else torch.LongTensor(x)\n return autograd.Variable(tensor, volatile=volatile)", "def test_variablepresentations_get(self):\n pass", "def get(self):\n return self.vars", "def createInfoVariable(self, product, variable_name):\r\n\r\n info_variable_dict = {'name': variable_name,\r\n 'dtype': None,\r\n 'vtype': 'info',\r\n 'units': None,\r\n 'ndims': None,\r\n 'shape': None}\r\n\r\n info_variable = Variable(info_variable_dict)\r\n\r\n return info_variable", "def get_variable_data():\n path = os.path.abspath('topics.json')\n with open(path) as json_data_file:\n data = json.load(json_data_file)\n return data", "def _retrieve_value(variable, data=None):\n if isinstance(variable, (pd.Series, pd.DataFrame)):\n return variable\n\n if isinstance(variable, str) or (\n isinstance(variable, Iterable) and all(isinstance(i, str) for i in variable)\n ):\n assert data is not None, \"`data` must be provided\"\n return data[variable]\n\n if isinstance(variable, Iterable) and all(\n isinstance(i, pd.Series) for i in variable\n ):\n return pd.DataFrame({s.name: s for s in variable})\n\n raise ValueError(f\"Unsupported type: {type(variable)}\")", "def __str__(self):\n return f\"Variable(type={self._type}, id={self._id}, value={self.status}, init={self.init})\"", "def get_value_var(self, var, data):\n \n #special case if the operand is boolean return it\n if isinstance(var, bool):\n return var\n \n try:\n #find the value for the key \n for key in str(var).split('.'):\n data = data[key]\n \n except (KeyError):\n # if key doesnt exist rather than returning None return the key as it is. This would be helpful for operands as strings\n return var\n else:\n return data", "def getStorage(self) -> ghidra.program.model.listing.VariableStorage:\n ...", "def get_variable_from_model(model,varname):\n variables = model.getVariables()\n itvar = variables.iterator()\n for i in xrange(len(variables)):\n currentvar = itvar.Next()\n if currentvar.GetName() == varname:\n return currentvar\n return None", "def getVariable(self, gradientCoordinate):\n return self.variables[gradientCoordinate]", "def get_stack_variable(self, variable_name):\n return self.parent_node.get_stack_variable(variable_name)", "def getInfo():", "def __getitem__(self, name):\n \n # Can you have a variable and a structure with the same name?\n if name in self.vars:\n return self.vars[name]\n \n name = name.upper()\n if name in self.structs:\n return self.struct[name]\n\n raise KeyError('%s not found as a variable or structure' % (name))", "def get_variable_from_model(self,modeltype,obsname):\n return get_variable_from_model(self.getmodel(modeltype),obsname)", "def lookup_var(self, var):\n if var in self.binding:\n return self.binding[var]\n elif self.parent is not None:\n return self.parent.lookup_var(var)\n else:\n raise Environment.Unbound('unbound variable \"%s\"' % var)", "def Lookup(self, var_name):\n if var_name in self._custom_vars:\n return self._custom_vars[var_name]\n elif var_name in self._local_scope.get(\"vars\", {}):\n return self._local_scope[\"vars\"][var_name]\n raise gclient_utils.Error(\"Var is not defined: %s\" % var_name)", "def getVar(tree):\n if(tree.data == \"string_expression\"):\n if(tree.children[0].data == \"string\"):\n return tree.children[0].children[0]\n elif(tree.children[0].data == \"variable\"):\n return getValue(tree.children[0].children[0])\n elif(tree.children[0].data == \"string_expression\"):\n # if the child is a string expression apply getVar again on the child\n if(len(tree.children)== 2):\n return getVar(tree.children[0])+getVar(tree.children[1])\n return getVar(tree.children[0])\n elif(tree.data == \"integer\"):\n return evalInteger(tree) \n \n elif(tree.data == \"string_list\"):\n return getStringInterior(tree.children[0],[])\n return \"ERROR\"", "def get_var(self, p_varnm):\n retval = tuple()\n (rc, rslt) = self.run_cmd(\"echo $\" + p_varnm)\n if rc:\n retval = (p_varnm, rslt.decode('UTF-8')[0:-1])\n else:\n retval = (p_varnm, '')\n return retval", "def getVariable( self, name ):\n for uniform in self.uniforms:\n if uniform.name == name:\n return uniform \n return None", "def get_variable(self, col: str, name: str, default: T = None) -> T:\n if self.scope is None:\n raise ValueError(\"Can't access variables on unbound modules\")\n return self.scope.get_variable(col, name, default)", "def get_variable_information_manager(variable_name, read_only=True):\n basedir = os.path.join(settings.get_configuration_directory(),\n variable_information.get_variable_directory_name())\n\n return variable_information.VariableInformation(\n config_file.ConfigurationFile(\n os.path.join(basedir,\n variable_information.get_information_file_name(\n variable_name)),\n variable_information.VariableInformation.config_schema,\n read_only=read_only)\n )", "def get_variable_values(self, vars):\n raise NotImplementedError()", "def get_stkvar(*args):\n return _ida_frame.get_stkvar(*args)", "def get_host_var(self, hostname, var, strict=False):\n return self.get_host_vars(hostname, strict).get(var, None)" ]
[ "0.6939153", "0.6936482", "0.69092554", "0.68890536", "0.6884928", "0.67508674", "0.6643614", "0.65952235", "0.6536052", "0.6496712", "0.64882463", "0.6455315", "0.6429799", "0.64095783", "0.63759357", "0.63599", "0.63293916", "0.6292426", "0.6292426", "0.6292426", "0.6207993", "0.6205717", "0.62009966", "0.61418355", "0.60920453", "0.6085491", "0.6071629", "0.6051697", "0.60496384", "0.60231775", "0.60169107", "0.6007024", "0.5980572", "0.59779567", "0.5973552", "0.5966859", "0.5960271", "0.59574366", "0.5951198", "0.59340036", "0.5899215", "0.5893475", "0.5883546", "0.5875823", "0.58559245", "0.5836741", "0.5812709", "0.58009046", "0.57847667", "0.5755258", "0.5751929", "0.57402235", "0.5740218", "0.5729196", "0.57126164", "0.56989706", "0.5696432", "0.56881595", "0.5687017", "0.5676994", "0.56629", "0.5650659", "0.5641579", "0.5631855", "0.5603158", "0.560118", "0.5598231", "0.55670434", "0.5557185", "0.5547417", "0.55374336", "0.553234", "0.55318344", "0.55294704", "0.55257016", "0.55245197", "0.5523187", "0.552192", "0.55172557", "0.5512727", "0.5500816", "0.5496293", "0.5491607", "0.5491034", "0.5484706", "0.5483099", "0.54804605", "0.5478382", "0.5478026", "0.546819", "0.5465783", "0.5458533", "0.54565346", "0.545646", "0.5451258", "0.5422241", "0.5421535", "0.54195905", "0.5412416", "0.54096943", "0.5406073" ]
0.0
-1
Watches a specific variable and waits for a change in the variable's value. When there is a change, this method returns the new value or times out. If a variable is deleted while being watched, the `variableState` state is set to `DELETED` and the method returns the last known variable `value`. If you set the deadline for watching to a larger value than internal timeout (60 seconds), the current variable value is returned and the `variableState` will be `VARIABLE_STATE_UNSPECIFIED`. To learn more about creating a watcher, read the [Watching a Variable for Changes](/deploymentmanager/runtimeconfigurator/watchingavariable) documentation.
def WatchVariable(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wait_for_variable(self, name, pred, timeout=None):\n if not hasattr(pred,\"__call__\"):\n v=pred\n if isinstance(pred,(tuple,list,set,dict)):\n pred=lambda x: x in v\n else:\n pred=lambda x: x==v\n ctl=threadprop.current_controller()\n split_name=tuple(dictionary.normalize_path(name))\n with self._params_exp_lock:\n self._params_exp.setdefault(split_name,[]).append(ctl)\n ctd=general.Countdown(timeout)\n try:\n value=self.get_variable(name)\n while True:\n if pred(value):\n return value\n value=ctl.wait_for_message(self._variable_change_tag,timeout=ctd.time_left())\n finally:\n with self._params_exp_lock:\n self._params_exp[split_name].remove(ctl)\n if not self._params_exp[split_name]:\n del self._params_exp[split_name]", "async def request_status_var_timeout(\n self, failed: bool = False, var: lcn_defs.Var | None = None\n ) -> None:\n assert var is not None\n # Detect if we can send immediately or if we have to wait for a\n # \"typeless\" response first\n has_type_in_response = lcn_defs.Var.has_type_in_response(\n var, self.addr_conn.software_serial\n )\n if not has_type_in_response:\n # Use the chance to remove a failed \"typeless variable\" request\n try:\n await asyncio.wait_for(self.last_var_lock.acquire(), timeout=3.0)\n except asyncio.TimeoutError:\n pass\n self.last_requested_var_without_type_in_response = var\n\n # Send variable request\n await self.addr_conn.send_command(\n False,\n PckGenerator.request_var_status(var, self.addr_conn.software_serial),\n )", "def poll(self) -> Tuple[np.ndarray]:\n try:\n v = self.controller.get(self.pvname)\n\n except TimeoutError:\n print(f\"No process variable found for {self.pvname}\")\n v = DEFAULT_SCALAR_VALUE\n\n return v", "def wait_for_change(self, predicate, timeout, max_age=WOERMS_UPDATE_PERIOD):\n age, original_value = self.process_predicate(predicate)\n self.wait(predicate != original_value, timeout)", "def wait(self, timeout_in_us=None, name=None):\n with ops.name_scope(name, \"wait_for_notification\",\n [self._handle]) as name:\n if timeout_in_us is None:\n timeout_in_us = -1\n timeout_in_us = ops.convert_to_tensor(\n timeout_in_us, dtype=dtypes.int64, name=\"timeout_in_us\")\n return gen_resource_variable_ops.wait_for_notification(\n self._handle, timeout_in_us=timeout_in_us, name=name)", "def update_variable(value):\n return value", "def poll(self) -> Tuple[np.ndarray]:\n t = time.time()\n try:\n v = self.controller.get(self.pvname)\n\n except TimeoutError:\n print(f\"No process variable found for {self.pvname}\")\n v = DEFAULT_SCALAR_VALUE[self.pvname]\n\n self.time = np.append(self.time, t)\n self.data = np.append(self.data, v)\n\n return self.time - self.tstart, self.data", "async def update(self, wait_time=0):\n self._last_update = now()\n await self._variables.update(wait_time)", "def watch(self, leader_index, timeout):\n\n self.event.wait(timeout)\n return self.event.isSet()", "def get(self, timeout=None):\n\t\tif timeout is None:\n\t\t\tself.event.wait()\n\t\telse:\n\t\t\tif timeout > 0:\n\t\t\t\tself.event.wait(timeout)\n\t\t\tif not self.event.is_set():\n\t\t\t\traise FutureTimedOut()\n\t\treturn self.value", "def wait_observation_time(self, time: int) -> None:\n var = tk.IntVar()\n self.master.after(time * 1000, var.set, 1)\n print(\"waiting...\")\n self.master.wait_variable(var)", "def get_variable(self, identifier):\n\t\tif not hasattr(self, \"__sdvcache\"):\n\t\t\tself.__sdvcache = {}\n\t\t\tbuffer = str(self.obj.description_variables.variables)\n\t\t\tbuffer = StringIO(buffer)\n\t\t\ttoken = buffer.read(1)\n\t\t\twhile token:\n\t\t\t\tif token == \"$\":\n\t\t\t\t\tvariable = self.__read_until(buffer, \"=\")\n\t\t\t\t\tvalue = self.__read_until(buffer, \"\\n\").strip()\n\t\t\t\t\tself.__sdvcache[variable] = value\n\t\t\t\ttoken = buffer.read(1)\n\t\t\n\t\tif identifier not in self.__sdvcache:\n\t\t\traise VariableNotFound(identifier)\n\t\t\n\t\treturn self.__sdvcache[identifier]", "def __getitem__(self, name):\n if name in self._variables:\n result = self._variables[name]\n\n if isinstance(result, Delayed):\n return result.get()\n return result\n\n raise VariableError(name)", "def get_variable_value(variable):\n def pipeline_from_info(variableinfo):\n controller = variableinfo._controller\n version = controller.vistrail.get_version_number(\n 'dat-var-%s' % variable.name)\n return controller.vistrail.getPipeline(version), version\n\n def pipeline_from_generator(variable_gen):\n # Get the original OutputPort module\n orig_controller = variable_gen._generator.controller\n base_pipeline = orig_controller.vistrail.getPipeline('dat-vars')\n if len(base_pipeline.module_list) != 1:\n raise ValueError(\"dat-vars version is invalid\")\n output_port = base_pipeline.module_list[0]\n\n controller = VistrailController(Vistrail())\n # OutputPort\n operations = [('add', output_port)]\n # Rest of the pipeline\n operations += variable_gen._generator.operations\n # Connection\n connection = controller.create_connection(\n variable_gen._output_module,\n variable_gen._outputport_name,\n output_port,\n 'InternalPipe')\n operations.append(('add', connection))\n # Materialize this\n action = create_action(operations)\n controller.add_new_action(action)\n version = controller.perform_action(action)\n controller.change_selected_version(version)\n assert version == controller.current_version == 1\n return controller.current_pipeline, 1\n\n # Obtain 'pipeline' and 'version' from 'variable'\n if isinstance(variable, Variable.VariableInformation):\n # Pipeline already exists\n pipeline, version = pipeline_from_info(variable)\n elif isinstance(variable, Variable):\n if variable._materialized is not None:\n # Pipeline already exists\n pipeline, version = pipeline_from_info(variable._materialized)\n else:\n # Pipeline doesn't exist\n # We need to make one from the operations\n pipeline, version = pipeline_from_generator(variable)\n else:\n raise TypeError\n\n # Setup the interpreter for execution\n interpreter = get_default_interpreter()\n interpreter.clean_non_cacheable_modules()\n interpreter.parent_execs = [None]\n res = interpreter.setup_pipeline(pipeline)\n if len(res[5]) > 0:\n raise ValueError(\"Variable pipeline has errors:\\n%s\" %\n '\\n'.join(me.msg for me in res[5].itervalues()))\n tmp_id_to_module_map = res[0]\n\n # Execute\n res = interpreter.execute_pipeline(\n pipeline,\n res[0], # tmp_id_to_module_map\n res[1], # persistent_to_tmp_id_map\n current_version=version,\n reason=\"getting variable value\")\n if len(res[2]) > 0:\n raise ValueError(\"Error while executing variable pipeline:\\n%s\" %\n '\\n'.join('%s: %s' % (me.module.__class__.__name__,\n me.msg)\n for me in res[2].itervalues()))\n if len(res[4]) > 0:\n # extract messages and previous ModuleSuspended exceptions\n raise ValueError(\"Module got suspended while executing variable \"\n \"pipeline:\\n%s\" %\n '\\n'.join(msg for msg in res[4].itervalues()))\n\n # Get the result\n outputport_desc = get_module_registry().get_descriptor_by_name(\n 'org.vistrails.vistrails.basic', 'OutputPort')\n for module in pipeline.module_list:\n if module.module_descriptor is outputport_desc:\n if get_function(module, 'name') == 'value':\n module_obj = tmp_id_to_module_map[module.id]\n result = module_obj.get_output('ExternalPipe')\n break\n else:\n result = None\n\n interpreter.finalize_pipeline(pipeline, *res[:-1])\n interpreter.parent_execs = [None]\n return result", "def get_status(self, vdef):\n _, status = self.loop_var_map[vdef]\n return status", "async def wait_value(self, value_or_predicate, *, held_for=0) -> VT:\n predicate = _ValueWrapper(value_or_predicate)\n while True:\n if not predicate(self._value):\n value = await self._wait_predicate(self._level_results, predicate)\n else:\n value = self._value\n await trio.sleep(0)\n if held_for > 0:\n with trio.move_on_after(held_for):\n await self.wait_value(lambda v: not predicate(v))\n continue\n break\n return value", "def _color_var_changed(self, *args):\n\n if not self._internal_color_change:\n self._variable = self.color_var.get()\n self._update()\n self._internal_color_change = False", "def replace_variable(self, variable):\r\n if variable == 'x':\r\n return self.value\r\n if variable == 't':\r\n return self.timedelta\r\n raise ValueError(\"Invalid variable %s\", variable)", "def wait_on(\n function: Callable,\n desc: str,\n timeout: timeout_type,\n delta: timeout_type = DEFAULT_POLLING_DELTA,\n polling_backoff: timeout_type = DEFAULT_POLLING_BACKOFF,\n sleep_: Optional[Callable] = None,\n):\n sleep = sleep_ or time.sleep\n total_wait = 0.0\n while True:\n if total_wait > timeout:\n raise TimeoutAssertionError(TIMEOUT_MESSAGE_TEMPLATE.format(total_wait, desc))\n value = function()\n if value is not None:\n return value\n total_wait += delta\n sleep(delta)\n delta += polling_backoff", "def monitoredVars():\n return _monitored", "def block_for_action(context, namespace, pod, var_name, expected_value, slumber=5, _max=10):\n\n for i in range(_max):\n\n try:\n actions = get_pod_actions(context, namespace, pod)\n\n var_value = actions[var_name]\n\n logging.debug(f'var_name {var_name} value is: {var_value}, expected value: {expected_value}')\n\n if var_value is not None and var_value == expected_value:\n return True\n else:\n logging.info(f'var_name is {var_name} var_value is {var_value}')\n\n except Exception as e:\n logging.error(\"Error getting action from pod\", e)\n\n logging.debug(f'sleeping {i} of {_max} for {slumber}')\n sleep(slumber)\n\n return False", "def fake_poll_until(retriever, condition=lambda value: value,\n sleep_time=1, time_out=0):\n from trove.common import exception\n slept_time = 0\n while True:\n resource = retriever()\n if condition(resource):\n return resource\n fake_sleep(sleep_time)\n slept_time += sleep_time\n if time_out and slept_time >= time_out:\n raise exception.PollTimeOut()", "def get_variable(self, request, context):\n response = GetVariableResponse()\n value = self._delegator.get_variable(request.component, request.variable)\n response.value = encode(value)\n return response", "def test_condition_value(env):\n timeouts = list([env.timeout(delay, value=delay) for delay in range(3)])\n\n def p(env, timeouts):\n results = yield env.all_of(timeouts)\n assert list(results) == timeouts\n assert list(results.keys()) == timeouts\n assert list(results.values()) == [0, 1, 2]\n assert list(results.items()) == list(zip(timeouts, [0, 1, 2]))\n assert timeouts[0] in results\n assert results[timeouts[0]] == 0\n assert results == results\n assert results == results.todict()\n\n env.process(p(env, timeouts))\n env.run()", "def visit_Variable(self, node):\n var_name = node.value\n val = self.VARIABLES.get(var_name)\n if val is None:\n raise NameError(repr(var_name))\n else:\n return val", "def update_variable_datetime(\n self, variable_value=None, commit=False, force=False):\n if variable_value is not None:\n new_value = variable_value\n if isinstance(new_value, (str, unicode)):\n new_value = \\\n parse_date_string('%s UTC' % new_value) \\\n or parse_date_string(new_value)\n if new_value is not None:\n if not force and self.variable_value is not None:\n current_value = parse_date_string(self.variable_value)\n if current_value is not None and new_value < current_value:\n return\n self.upsert(\n new_value.strftime('%Y-%m-%d %H:%M:%S'), commit=commit)", "def update(self, var_id, value):\n\n params = {\n 'filter': 'id',\n 'eq': var_id\n }\n\n data = {\n 'value': value\n }\n return self.base_request.request(\n 'service_environment_variable', 'PATCH', params=params, data=data,\n endpoint=self.settings.get('pine_endpoint')\n )", "def get_watch_values(self):\r\n for index, item in enumerate(S.WATCH):\r\n # Reset value for watch expression\r\n S.WATCH[index]['value'] = None\r\n\r\n # Evaluate watch expression when connected to debugger engine\r\n if is_connected():\r\n if item['enabled']:\r\n watch_value = None\r\n try:\r\n S.SESSION.send(dbgp.EVAL, expression=item['expression'])\r\n response = S.SESSION.read()\r\n\r\n watch_value = get_response_properties(response, item['expression'])\r\n except ProtocolConnectionException:\r\n pass\r\n\r\n S.WATCH[index]['value'] = watch_value", "def update(self, var_id, value):\n\n params = {\n 'filter': 'id',\n 'eq': var_id\n }\n data = {\n 'value': value\n }\n return self.base_request.request(\n 'device_service_environment_variable', 'PATCH', params=params, data=data,\n endpoint=self.settings.get('pine_endpoint')\n )", "def watchdog_timer(state, wait=3):\n time.sleep(wait)\n if not state['completed']:\n _thread.interrupt_main()", "def update_terraform_variable(**kwargs):\n\n create_value = kwargs[\"dag_run\"].dag_id == TerraformTasks.DAG_ID_CREATE_VM\n token = BaseHook.get_connection(AirflowConns.TERRAFORM).password\n terraform_api = TerraformApi(token)\n\n # Get variable\n workspace_id = get_workspace_id()\n variables = terraform_api.list_workspace_variables(workspace_id)\n vm = None\n vm_var = None\n for var in variables:\n if var.key == TerraformTasks.TERRAFORM_CREATE_VM_KEY:\n vm_var = var\n vm = VirtualMachine.from_hcl(var.value)\n break\n\n # Update vm create value and convert to HCL\n vm.create = create_value\n print(f\"CREATE VALUE!: {vm.create}\")\n vm_var.value = vm.to_hcl()\n\n # Update value\n terraform_api.update_workspace_variable(vm_var, workspace_id)", "def get_test_value(var, nofail=False):\n if 'theano' in sys.modules and isinstance(var, _getT().sharedvar.SharedVariable):\n retval = var.get_value()\n elif 'theano' in sys.modules and isinstance(var, _gettheano().graph.basic.Variable):\n try:\n retval = var.tag.test_value\n except AttributeError:\n if nofail:\n return None\n else:\n raise AttributeError(\"You've attempted to execute a function that \"\n \"requires a test_value for the variable {} to \"\n \"be set, and this value is not set.\".format(var))\n else:\n retval = var\n return retval", "def update(self):\n if self.api is None:\n return\n self.api.update()\n\n if self.var_type == 'Time':\n self.var_state = self.api.result['timeRelease']\n return\n\n result = self.api.result[self.var_period.lower()]\n if self.var_type == 'Sky':\n sky = result['sky']\n self.var_state = sky['name']\n self.var_icon = get_sky_icon(sky['code'])\n else:\n temp = result['temperature']\n if self.var_detail == 'Max':\n self.var_state = round(float(temp['tmax']), 1)\n else:\n self.var_state = round(float(temp['tmin']), 1)", "def wait(self, timeout=600):\n s = datetime.datetime.now()\n status = json.loads(self.get())\n while status['status'] != 'COMPLETE':\n status = self.get()\n e = datetime.datetime.now()\n if (e - s).seconds > timeout:\n raise RuntimeError('timeout')\n return status", "def check_timeout(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"check_timeout\")", "def check_timeout(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"check_timeout\")", "def check_timeout(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"check_timeout\")", "def update_variable(value):\n return value + 1", "def get_variable(x, volatile=False):\n tensor = torch.cuda.LongTensor(x) if CUDA else torch.LongTensor(x)\n return autograd.Variable(tensor, volatile=volatile)", "def test_variable_assign(self):\n self.trace('x = 1')\n\n events = self.variable_events\n self.assertEqual(len(events), 1)\n event = events[0]\n self.assertIsInstance(event, TraceAssign)\n self.assertEqual(event.name, 'x')\n self.assertEqual(event.value, 1)", "def get(self, timeout=None):\n self.wait(timeout)\n if not self.ready:\n raise TimeoutError(timeout)\n if self.success:\n return self.value\n else:\n raise self.value", "def wait(v, timeout=None, interval=0.5, intervalfunc=None):\n timeout = timeout or ST.FIND_TIMEOUT\n pos = loop_find(v, timeout=timeout, interval=interval, intervalfunc=intervalfunc)\n return pos", "def set_variable(self, request, context):\n response = SetVariableResponse()\n value = decode(request.value)\n self._delegator.set_variable(request.component, request.variable, value)\n return response", "def wait(self, timeout=60, use_pos=False):\n dmov = self.get_pvobj(\"done_moving\")\n if not use_pos and dmov.isinitialized:\n return dmov.wait_for_value(1, timeout)\n else:\n monpv = self.get_pvobj(\"readback\")\n goal = self.get_par(\"drive\")\n deadband = self.get_par(\"retry_deadband\")\n min = goal - abs(deadband)\n max = goal + abs(deadband)\n return monpv.wait_for_range(min, max, timeout)", "def lookup_variable_value(var, env):\n def env_loop(environment):\n \"\"\"\n calls scan on each frame in the env list\n \"\"\"\n def scan(vars, vals):\n \"\"\"\n scans variables in a frame\n \"\"\"\n if isNull(vars):\n return env_loop(enclosing_env(environment)) # 5-4: env -> environment\n elif isEq(var, car(vars)) == TRUE:\n return car(vals)\n else:\n return scan(cdr(vars), cdr(vals))\n if environment is the_empty_environment:\n raise UnboundLocalError(\"lookup_variable\")\n frame = first_frame(environment)\n return scan(frame_variables(frame), frame_values(frame))\n return env_loop(env)", "def wait_for(self, state, predicate, timeout=None):\n\n self.__lock.acquire()\n try:\n result = self.__state.value == state or predicate()\n if result:\n return result\n end_time = None if timeout is None else monotonic() + timeout\n wait_time = 1\n while not result:\n if end_time is not None:\n wait_time = min(end_time - monotonic(), 1)\n if wait_time <= 0:\n break\n result = self.__lock.wait_for(lambda: self.__state.value == state, wait_time) or predicate()\n return result\n finally:\n self.__lock.release()", "def test_variable_assign_boxed_access(self):\n self.trace('y = x', env={'x': 1})\n\n events = self.variable_events\n self.assertEqual(len(events), 2)\n self.assertIsInstance(events[0], TraceAccess)\n self.assertIsInstance(events[1], TraceAssign)\n self.assertEqual(events[1].value_event, events[0])", "def refresh_var_all():\n\n strtm = time.strftime(time_fmt, time.localtime())\n\n time_now = time.time() # int(time.time())\n if verbose:\n print(strtm, \"\\trefresh_var_all\", current_thread().name, \"pid=\", os.getppid())\n\n myisy.load_vars()\n\n for c in list(mac_targets.values()):\n if c.var.value != c.is_active:\n\n if verbose or delta:\n print(strtm, \"\\t>>>>Assert\", \" is_active = \", c.is_active, \"isy_var.value =\", c.var.value, c.var.name)\n\n c.var.value = c.is_active\n # c.var.set_var(c.is_active)\n c.set_var_time = time_now\n\n sys.stdout.flush()", "async def _watch_status(self, job_id, job_paths):\n status_path = job_paths['status.json']\n\n watcher = aionotify.Watcher()\n watcher.watch(status_path, aionotify.Flags.CLOSE_WRITE)\n await watcher.setup(self.loop)\n try:\n while True:\n try:\n await self._read_status(job_id, job_paths)\n await watcher.get_event()\n self.logger.debug(f'Detected status change for job {job_id}')\n except concurrent.futures.CancelledError:\n # Break loop (likely normal exit through task cancellation)\n break\n except Exception: # pylint: disable=broad-except\n self.logger.exception(f'Exception while watching status of job {job_id}')\n finally:\n watcher.unwatch(status_path)\n watcher.close()", "def get_variable(self, svc, var):\n action = \"variableget\"\n path = \"data_request?id=%s&DeviceNum=%d&serviceId=%s&Variable=%s\" \\\n % (action, self.id, svc, var)\n return self.vera.get(path)", "def timeout_change(self, timedelta):\n pass # pylint: disable=unnecessary-pass\n # For backward compatibility only.", "def updateValue(self):\n self.value = self.var.get()", "def _tkvar_changed(self,name):\n self.debug(\"_tkvar_changed(%s)\"%name)\n displayed_value = self._string2object(name,self._tkvars[name]._original_get())\n object_value = self.get_parameter_value(name) #getattr(self._extraPO,name)\n\n # use equality check then identity check because e.g. val\n # starts at 0.5, type 0.8, then type 0.5, need that to be\n # changed is False, but some types cannot be equality compared\n # (can be identity compared).\n # CEBALERT: need to add a unit test to ensure this keeps working.\n # Plus, I need to think about this, because while the above is\n # true for floats, identity tests make more sense for many types\n # (i.e. you want to know the object is the same).\n try:\n if displayed_value != object_value:\n changed = True\n else:\n changed = False\n except:\n if displayed_value is not object_value:\n changed = True\n else:\n changed = False\n\n self.debug(\"..._v_c return %s\"%changed)\n return changed", "def wait(self, predicate, timeout, max_age=WOERMS_UPDATE_PERIOD):\n start_time = time.time()\n self._deadline = start_time + timeout\n last_predicate = None\n age = 0\n unevaluated_predicate_str = str(predicate)\n try:\n while time.time() - start_time < timeout:\n age, value = self.process_predicate(predicate)\n pretty_predicate = str(predicate)\n if value and (age <= max_age or max_age is None):\n self._log('= %s' % pretty_predicate)\n return\n if last_predicate is None:\n self._log('Waiting for %s' % unevaluated_predicate_str)\n if pretty_predicate != last_predicate:\n self._log(' %s' % pretty_predicate,\n deadline=self._deadline)\n last_predicate = pretty_predicate\n if age > max_age and max_age is not None:\n self._log('FAIL: %s; max allowable age: %0.3f' %\n (str(predicate), max_age))\n raise StaleScoreboardEntryException, (\n '%s; max age = %0.3f' % (repr(predicate), max_age))\n else:\n self._log('FAIL: %s' % str(predicate))\n raise ScoreboardTimeout, predicate\n finally:\n self._deadline = None", "def wait_fluently(condition: Callable, timeout: TimeoutType, err_msg: str):\n if timeout is None:\n timeout = 0\n start_time = time.time()\n while True:\n res = condition()\n if res:\n return res\n if time.time() - start_time >= timeout:\n raise TimeoutException(err_msg)\n time.sleep(0.3)", "def stopped_check(self, timeout=None):", "def _get_reference_by_variable(self, var):\n if not var[0] == consts.VARIABLE:\n raise Exception('Internal error: Expected a variable, got: \"%r\"' % var)\n res = self._bindings.get(var, var)\n if res == consts.TOPIC_IN_FOCUS:\n res = self.focus\n while res[0] == consts.VARIABLE and self.parent:\n res = self.parent._get_reference_by_variable(res) #pylint: disable-msg=W0212\n if res == consts.TOPIC_IN_FOCUS:\n res = self.focus\n return res", "def wait(self, timeout):\n if not hasattr(self, '_value'):\n try:\n value = self.broker.pop_result(self, timeout=timeout)\n except KeyError:\n return False\n except TaskExpired as err:\n value = err\n self._value = value\n return hasattr(self, '_value')", "def get_wait_timeout(vm_):\n return config.get_cloud_config_value(\n \"wait_for_timeout\", vm_, __opts__, default=15 * 60, search_global=False\n )", "def variable(self):\n return _coconut_tail_call(Var, self.name)", "async def _async_poll_state_variables(\n self, service_name: str, action_names: Union[str, Sequence[str]], **in_args: Any\n ) -> None:\n service = self._service(service_name)\n if not service:\n _LOGGER.debug(\"Can't poll missing service %s\", service_name)\n return\n\n if isinstance(action_names, str):\n action_names = [action_names]\n\n changed_state_variables: List[UpnpStateVariable] = []\n\n for action_name in action_names:\n try:\n action = service.action(action_name)\n except KeyError:\n _LOGGER.debug(\n \"Can't poll missing action %s:%s for state variables\",\n service_name,\n action_name,\n )\n continue\n try:\n result = await action.async_call(**in_args)\n except UpnpResponseError as err:\n _LOGGER.debug(\n \"Failed to call action %s:%s for state variables: %r\",\n service_name,\n action_name,\n err,\n )\n continue\n\n for arg in action.arguments:\n if arg.direction != \"out\":\n continue\n if arg.name not in result:\n continue\n if arg.related_state_variable.value == arg.value:\n continue\n\n try:\n arg.related_state_variable.value = arg.value\n except UpnpValueError:\n continue\n changed_state_variables.append(arg.related_state_variable)\n\n if changed_state_variables:\n self._on_event(service, changed_state_variables)", "def pytest_timeout_set_timer(item, settings):", "def wait(self, deadline):\n exitst = ctypes.c_int()\n return libruss.russ_cconn_wait(self._ptr, deadline, ctypes.byref(exitst)), exitst.value", "def planning_variable(variable_type, value_range_provider_refs, nullable=False, graph_type=None,\n strength_comparator_class=None, strength_weight_factory_class=None):\n def planning_variable_function_wrapper(variable_getter_function):\n ensure_init()\n from org.optaplanner.core.api.domain.variable import PlanningVariable as JavaPlanningVariable\n variable_getter_function.__optaplannerPlanningVariable = {\n 'annotationType': JavaPlanningVariable,\n 'valueRangeProviderRefs': value_range_provider_refs,\n 'nullable': nullable,\n 'graphType': graph_type,\n 'strengthComparatorClass': strength_comparator_class,\n 'strengthWeightFactoryClass': strength_weight_factory_class\n }\n variable_getter_function.__return = variable_type.__javaClass\n return variable_getter_function\n return planning_variable_function_wrapper", "def update_waiting(self):\n if not self.inputs[0]:\n self.set_value(False, 0)\n if self.desc_value in self.struct_variables:\n struct = self.struct_variables[self.desc_value]\n if struct[\"structure\"] in (\"list\", \"array\"):\n values = self.struct_variables[self.desc_value][\"values\"]\n if self.get_value(1) in values:\n self.set_value(True, 0)\n elif struct[\"structure\"] in (\"dict\", ):\n values = self.struct_variables[self.desc_value][\"values\"].keys()\n if self.get_value(1) in values:\n self.set_value(True, 0)\n self.state = ACTIVE\n elif self.variant(map(lambda x: x is not None, self.get_value(0, True))):\n self.set_value(False, 0)\n for value in self.get_value(0, True):\n if self.get_value(1) == value:\n self.set_value(True, 0)\n break\n self.state = ACTIVE", "def testOptimizationWithCapturedRefVar(self, dataset_fn):\n variable = variable_scope.get_variable(\n \"v\", initializer=0, use_resource=False)\n assign_op = variable.assign_add(1)\n unoptimized_dataset = dataset_fn(variable)\n\n options = options_lib.Options()\n options.experimental_optimization.apply_default_optimizations = False\n options.experimental_optimization.noop_elimination = True\n options.experimental_optimization.map_and_batch_fusion = True\n options.experimental_optimization.warm_start = False\n optimized_dataset = unoptimized_dataset.with_options(options)\n optimized_it = dataset_ops.make_initializable_iterator(optimized_dataset)\n\n # Check that outputs are the same in the optimized and unoptimized cases,\n # when the variable value is changing.\n unoptimized_it = dataset_ops.make_initializable_iterator(\n unoptimized_dataset)\n with ops.control_dependencies([assign_op]):\n unoptimized_output = unoptimized_it.get_next()\n optimized_output = optimized_it.get_next()\n\n self.evaluate(variable.initializer)\n self.evaluate((unoptimized_it.initializer, optimized_it.initializer))\n while True:\n try:\n unoptimized, optimized = self.evaluate((unoptimized_output,\n optimized_output))\n self.assertEqual(unoptimized, optimized)\n except errors.OutOfRangeError:\n break", "def set_variable(self, name, value, notify=False, notify_tag=\"changed/*\"):\n split_name=tuple(dictionary.normalize_path(name))\n notify_list=[]\n with self._params_val_lock:\n if name in self._params_funcs:\n del self._params_funcs[name]\n self._params_val.add_entry(name,value,force=True)\n for exp_name in self._params_exp:\n if exp_name==split_name[:len(exp_name)] or split_name==exp_name[:len(split_name)]:\n notify_list.append((self._params_val[exp_name],self._params_exp[exp_name]))\n for val,lst in notify_list:\n for ctl in lst:\n ctl.send_message(self._variable_change_tag,val)\n if notify:\n notify_tag.replace(\"*\",name)\n self.send_signal(\"any\",notify_tag,value)", "def add_watched_var(cls, step_name, worker_id):\n cls.__variables__.add(\"{}.{}\".format(step_name, worker_id))", "def wait(self, timeout=None):\n if self.counter > 0:\n return self.counter\n\n self._wait(timeout) # return value irrelevant, whether we got it or got a timeout\n return self.counter", "def update_variable_by_id(self, id, request):\n\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.body_wrapper import BodyWrapper\n\t\texcept Exception:\n\t\t\tfrom .body_wrapper import BodyWrapper\n\n\t\tif not isinstance(id, int):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)\n\t\t\n\t\tif request is not None and not isinstance(request, BodyWrapper):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)\n\t\t\n\t\thandler_instance = CommonAPIHandler()\n\t\tapi_path = ''\n\t\tapi_path = api_path + '/crm/v2/settings/variables/'\n\t\tapi_path = api_path + str(id)\n\t\thandler_instance.set_api_path(api_path)\n\t\thandler_instance.set_http_method(Constants.REQUEST_METHOD_PUT)\n\t\thandler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)\n\t\thandler_instance.set_content_type('application/json')\n\t\thandler_instance.set_request(request)\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.action_handler import ActionHandler\n\t\texcept Exception:\n\t\t\tfrom .action_handler import ActionHandler\n\t\treturn handler_instance.api_call(ActionHandler.__module__, 'application/json')", "def _wait_for_vdu_state(self, vdu_id, state, timeout=300):\n state = state.lower()\n current_state = 'unknown'\n while current_state != state:\n rc, vdu_info = self.cal.get_vdu(self.account, vdu_id)\n\n assert rc == RwTypes.RwStatus.SUCCESS\n current_state = vdu_info.state.lower()\n\n if current_state in ['failed']:\n raise ValidationError('VM [{}] entered failed state while waiting for state [{}]'.format(vdu_id, state))\n\n if current_state != state:\n time.sleep(1)\n\n if current_state != state:\n raise TimeoutError('VM [{}] failed to reach state [{}] within timeout [{}]'.format(vdu_id, state, timeout))\n\n return vdu_info", "def get(self, name, **valuefilter):\n if not valuefilter:\n valuefilter = self.valuefilter\n varobj = Variable(name, **valuefilter)\n value = varobj.get(gid=self.gid)\n return value", "def test_variable_assign_boxed_return(self):\n env = self.trace('foo = objects.Foo()')\n\n events = self.variable_events\n event = next(evt for evt in events if isinstance(evt, TraceAssign))\n self.assertEqual(event.name, 'foo')\n self.assertEqual(event.value, env['foo'])\n self.assertIsInstance(event.value_event, TraceReturn)\n self.assertEqual(event.value_event.function, objects.Foo)\n self.assertFalse(event.value_event.multiple_values)", "def state_changed(target, new_value, old_value, initiator):\n\n if (new_value == _WorkState.RUNNING and\n (old_value not in [_WorkState.RUNNING, _WorkState.PAUSED] or\n target.time_started == None)):\n target.time_started = datetime.utcnow()\n target.time_finished = None\n\n elif new_value in (_WorkState.DONE, _WorkState.FAILED):\n target.time_finished = datetime.utcnow()", "def change_var(self, var):\n return _coconut_tail_call(self.__class__, var, self.elem.substitute({self.var: var}))", "def wait_for_notification(self, device_id, resource_path, expected_value, timeout=30, assert_errors=False):\n expected_value = str(expected_value)\n for _ in range(timeout):\n for item in self.ws.events['notifications']:\n if item['ep'] == device_id and item['path'] == resource_path and \\\n base64.b64decode(item['payload']).decode('utf8') == expected_value:\n log.info('Expected notification value \"{}\" received at callback'.format(expected_value))\n return item\n sleep(1)\n if assert_errors:\n assert False, 'Failed to receive notification'\n return False", "def wait_condition(cond, timeout=1, sleeptime=.01):\n # NOTE Increasing sleeptime can dramatically increase testsuite runtime\n # It also reduces CPU load significantly\n if timeout is None:\n timeout = 1\n\n if timeout < sleeptime:\n print(\"Warning, timeout cannot be smaller than\", sleeptime)\n timeout = sleeptime\n\n # Max number of attempts until giving up\n tries = int(timeout / sleeptime)\n\n for i in range(tries):\n val = cond()\n\n if val is not None:\n break\n\n sleep(sleeptime)\n\n return val", "def wait_condition(cond, timeout=1, sleeptime=.01):\n # NOTE Increasing sleeptime can dramatically increase testsuite runtime\n # It also reduces CPU load significantly\n if timeout is None:\n timeout = 1\n\n if timeout < sleeptime:\n print(\"Warning, timeout cannot be smaller than\", sleeptime)\n timeout = sleeptime\n\n # Max number of attempts until giving up\n tries = int(timeout / sleeptime)\n\n for i in range(tries):\n val = cond()\n\n if val is not None:\n break\n\n sleep(sleeptime)\n\n return val", "def remove_watched_var(cls, step_name, worker_id):\n key = \"{}.{}\".format(step_name, worker_id)\n if key in cls.__variables__:\n cls.__variables__.remove(key)", "def wait_for_js_variable_truthy(variable):\r\n javascript = \"\"\"\r\n var callback = arguments[arguments.length - 1];\r\n var unloadHandler = function() {{\r\n callback(\"unload\");\r\n }}\r\n addEventListener(\"beforeunload\", unloadHandler);\r\n addEventListener(\"unload\", unloadHandler);\r\n var intervalID = setInterval(function() {{\r\n try {{\r\n if({variable}) {{\r\n clearInterval(intervalID);\r\n removeEventListener(\"beforeunload\", unloadHandler);\r\n removeEventListener(\"unload\", unloadHandler);\r\n callback(true);\r\n }}\r\n }} catch (e) {{}}\r\n }}, 10);\r\n \"\"\".format(variable=variable)\r\n for _ in range(5): # 5 attempts max\r\n try:\r\n result = world.browser.driver.execute_async_script(dedent(javascript))\r\n except WebDriverException as wde:\r\n if \"document unloaded while waiting for result\" in wde.msg:\r\n result = \"unload\"\r\n else:\r\n raise\r\n if result == \"unload\":\r\n # we ran this on the wrong page. Wait a bit, and try again, when the\r\n # browser has loaded the next page.\r\n world.wait(1)\r\n continue\r\n else:\r\n return result", "async def read_variable_by_name(self, name: str) -> Optional[Variable]:\n try:\n response = await self._client.get(f\"/variables/name/{name}\")\n return pydantic.parse_obj_as(Variable, response.json())\n except httpx.HTTPStatusError as e:\n if e.response.status_code == status.HTTP_404_NOT_FOUND:\n return None\n else:\n raise", "def _wait_for_state(self, task_id, current, new):\n state = result = None\n start = time.time()\n # 45 seconds is a long time.\n # TODO(maruel): Make task_runner use exponential backoff instead of\n # hardcoded 10s/30s, which makes these tests tremendously slower than\n # necessary.\n # https://crbug.com/825500\n while time.time() - start < 45.:\n result = self.client.task_result(task_id)\n state = result[u'state']\n if state == new:\n break\n self.assertEqual(current, state, result)\n time.sleep(0.01)\n self.assertEqual(new, state, result)\n return result", "def timeout(self) -> Optional[pulumi.Input[float]]:\n return pulumi.get(self, \"timeout\")", "def timeout(self) -> Optional[pulumi.Input[float]]:\n return pulumi.get(self, \"timeout\")", "def process_event(self, event):\n if not self.frozen:\n if event[\"event\"] == self.event:\n if self.what is None or event[\"target\"].startswith(self.what):\n self._varstate = event\n try:\n for key in self.subval:\n self._varstate = self._varstate[key]\n\n if bridgectl.log:\n bridgectl.log.debug(\n \"New value for {} is {}\".format(\n self.name,\n self._varstate))\n except Exception as e:\n if bridgectl.log:\n bridgectl.log.critical(\n \"Failed to process event for rule {}\".format(\n self.name),\n exc_info=(type(e),\n e,\n e.__traceback__))\n pass\n if event['event'] == 'time tick':\n if self.period in event[\"starts\"]:\n self._varstate = self.reset()", "def test_variable_access(self):\n self.trace('x', env={'x': 1})\n\n events = self.variable_events\n self.assertEqual(len(events), 1)\n event = events[0]\n self.assertIsInstance(event, TraceAccess)\n self.assertEqual(event.name, 'x')\n self.assertEqual(event.value, 1)", "def getVariable(self):\n return _libsbml.EventAssignment_getVariable(self)", "def process_event(self, event):\n if not self.frozen:\n if event[\"event\"] == self.event:\n if self.what is None or event[\"target\"].startswith(self.what):\n self._varstate = event\n try:\n for key in self.subval:\n self._varstate = self._varstate[key]\n\n if bridgectl.log:\n bridgectl.log.debug(\n \"New value for {} is {}\".format(\n self.name,\n self._varstate))\n except Exception as e:\n if bridgectl.log:\n bridgectl.log.critical(\n \"Failed to process event for {}\".format(\n self.name),\n exc_info=(type(e),\n e,\n e.__traceback__))\n pass\n if event['event'] == 'time tick':\n if self.period in event[\"starts\"]:\n self._varstate = self.reset()", "def get_notification():\n condition.acquire()\n if not notifications:\n ret = condition.wait(2)\n if not ret:\n condition.release()\n raise TimeoutError(\"Timed out while waiting for notification\")\n\n notice = notifications.pop(0)\n condition.release()\n return notice", "def settimeout(self, value: int) -> None:\n ...", "def wait_on_object_state ( aws_object, target_state, sleep_time = 10, max_wait = 300, failure_state = None ) :\n while aws_object.state != target_state :\n if aws_object.state == failure_state :\n break\n if max_wait <= 0 :\n break\n time.sleep( sleep_time )\n max_wait -= sleep_time\n aws_object.update( )\n\n return aws_object.state == target_state", "def _on_value_msg(self, variable_name, recv_msg, t):\n if self._state == \"values\":\n if self.logger.isEnabledFor(logging.DEBUG):\n self.logger.debug(\n f\"Received variable value {recv_msg.value} from {variable_name}\"\n )\n self._handle_value_message(variable_name, recv_msg)\n else:\n if self.logger.isEnabledFor(logging.DEBUG):\n self.logger.debug(\n f\"Postponing variable value {recv_msg.value} from {variable_name}\"\n )\n self.__postponed_value_messages__.append((variable_name, recv_msg))", "def createServerVar(varname, value, notify=False, description=\"A custom cvar.\"):\n sv = es.ServerVar(varname, value)\n \n if notify:\n es.flags(\"add\", \"notify\", varname)\n return sv", "async def getVariable(ctx, var):\n if ctx.message.channel.name != \"staff\":\n await ctx.send(\"You can only fetch variables from the staff channel.\")\n else:\n await ctx.send(\"Attempting to find variable.\")\n try:\n variable = globals()[var]\n await ctx.send(f\"Variable value: `{variable}`\")\n except:\n await ctx.send(f\"Can't find that variable!\")", "def get_variable(self, name, default=None, init=None, init_on_each_run=False):\n if name not in self._variables:\n self.init_variable(name, default, init, init_on_each_run)\n var = self._variables.get(name)\n return var.get('value', default)", "def test_obtain_read_lock_when_variable_unreadable(self):\n\n transaction = Transaction(\"T1\", TransactionType.READ_WRITE, 1)\n instruction = Instruction(\"R(T1, x2)\")\n variable = self.data_manager.variables[\"x2\"]\n variable.readable = False\n self.data_manager.variables[\"x2\"] = variable\n self.assertFalse(self.data_manager.variables[\"x2\"].readable)\n self.assertFalse(self.data_manager.obtain_read_lock(transaction, instruction))", "def get(self, var, verbose=False, timeout=-1):\r\n if isinstance(var, str):\r\n var = [var]\r\n # make sure the variable(s) exist\r\n for variable in var:\r\n if self._eval(\"exist {0}\".format(variable),\r\n verbose=False) == 'ans = 0' and not variable == '_':\r\n raise Oct2PyError('{0} does not exist'.format(variable))\r\n argout_list, save_line = self._reader.setup(len(var), var)\r\n self._eval(save_line, verbose=verbose, timeout=timeout)\r\n return self._reader.extract_file(argout_list)", "def test_condition_waiter(self):\n\n class Holder(object):\n def __init__(self, start_value, max_value):\n self.value = start_value\n self.max_value = max_value\n self.num_calls = 0\n\n def my_check(holder):\n holder.num_calls += 1\n if holder.value == holder.max_value:\n return True\n holder.value += 1\n\n holder = Holder(1, 10)\n event = Event()\n condition = ConditionWaiter(event, my_check, holder)\n condition.start()\n self.assertTrue(event.wait(2))\n condition.stop()\n self.assertEqual(holder.num_calls, 10 - 1 + 1)\n\n holder = Holder(4, 10)\n event = Event()\n condition = ConditionWaiter(event, my_check, holder)\n condition.start()\n self.assertTrue(event.wait(3))\n condition.stop()\n self.assertEqual(holder.num_calls, 10 - 4 + 1)\n\n holder = Holder(1, 10)\n event = Event()\n condition = ConditionWaiter(event, my_check, holder)\n condition.start()\n self.assertFalse(event.wait(0.0001))\n condition.stop()", "def test_update_wait():\n wait = '10 seconds'\n config_info = read_config()\n config_info['wait'] = wait\n open(config_file, 'w').close()\n with open(config_file, 'r+') as conf:\n conf.write(json.dumps(config_info))\n config_info = read_config()\n\n assert config_info['wait'] == wait", "def get_assigned_value(self, var) :\n return self.assigned_values.get(var, None)" ]
[ "0.63492364", "0.60993075", "0.55610037", "0.5458114", "0.5403095", "0.52911294", "0.52841663", "0.5238075", "0.50422585", "0.49651515", "0.48968446", "0.48794204", "0.48586887", "0.48463622", "0.4791569", "0.4787551", "0.46988493", "0.46986935", "0.46816027", "0.46677256", "0.46585143", "0.4656395", "0.46349782", "0.4623188", "0.46217912", "0.4621076", "0.4615054", "0.46019378", "0.4588629", "0.45793837", "0.45667425", "0.4564183", "0.45527193", "0.4543405", "0.45371976", "0.45371976", "0.45222306", "0.45150796", "0.45074975", "0.4504623", "0.4502974", "0.45023263", "0.4500432", "0.4495602", "0.44755808", "0.44455144", "0.4427981", "0.44139954", "0.44123033", "0.44002366", "0.43998116", "0.4394105", "0.4394072", "0.43696108", "0.4368123", "0.4362747", "0.43609688", "0.435496", "0.43416804", "0.43388718", "0.43364194", "0.43363333", "0.4335792", "0.4335431", "0.4329247", "0.4328682", "0.4326025", "0.43094525", "0.43082607", "0.4306285", "0.43059704", "0.43000817", "0.42899013", "0.42885044", "0.4263127", "0.42584226", "0.4249465", "0.4249465", "0.42471138", "0.4247073", "0.42452702", "0.42352092", "0.42341113", "0.42341113", "0.42330524", "0.422861", "0.42284507", "0.42200524", "0.42195043", "0.42164528", "0.42125833", "0.42019874", "0.42012355", "0.41988063", "0.41978315", "0.41871545", "0.4186665", "0.41864082", "0.4184394", "0.4178549" ]
0.46302924
23
Creates a variable within the given configuration. You cannot create a variable with a name that is a prefix of an existing variable name, or a name that has an existing variable name as a prefix. To learn more about creating a variable, read the [Setting and Getting Data](/deploymentmanager/runtimeconfigurator/setandgetvariables) documentation.
def CreateVariable(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(self, app_id, service_name, env_var_name, value):\n\n if not _is_valid_env_var_name(env_var_name):\n raise exceptions.InvalidParameter('env_var_name', env_var_name)\n\n services = self.service.get_all_by_application(app_id)\n service_id = [i['id'] for i in services if i['service_name'] == service_name]\n\n data = {\n 'name': env_var_name,\n 'value': value,\n 'service': service_id\n }\n return json.loads(self.base_request.request(\n 'service_environment_variable', 'POST', data=data,\n endpoint=self.settings.get('pine_endpoint')\n ).decode('utf-8'))", "def make_variable(self, name=None):\r\n return self.Variable(self, name=name)", "def make_variable(self, name=None):\r\n return self.Variable(self, name=name)", "def define_variable(self, var, value):\n self.namespace[var] = value", "def make_variable(self, name = None):\r\n return self.Variable(self, name = name)", "def createServerVar(varname, value, notify=False, description=\"A custom cvar.\"):\n sv = es.ServerVar(varname, value)\n \n if notify:\n es.flags(\"add\", \"notify\", varname)\n return sv", "def variable_factory(p, variable_name):\n if isinstance(variable_name, (Variable,)):\n return variable_name\n if not hasattr(p, \"variable_mapping\"):\n setattr(p, \"variable_mapping\", {})\n if variable_name not in p.variable_mapping:\n p.variable_mapping[variable_name] = Variable(variable_name)\n return p.variable_mapping[variable_name]", "def update_terraform_variable(**kwargs):\n\n create_value = kwargs[\"dag_run\"].dag_id == TerraformTasks.DAG_ID_CREATE_VM\n token = BaseHook.get_connection(AirflowConns.TERRAFORM).password\n terraform_api = TerraformApi(token)\n\n # Get variable\n workspace_id = get_workspace_id()\n variables = terraform_api.list_workspace_variables(workspace_id)\n vm = None\n vm_var = None\n for var in variables:\n if var.key == TerraformTasks.TERRAFORM_CREATE_VM_KEY:\n vm_var = var\n vm = VirtualMachine.from_hcl(var.value)\n break\n\n # Update vm create value and convert to HCL\n vm.create = create_value\n print(f\"CREATE VALUE!: {vm.create}\")\n vm_var.value = vm.to_hcl()\n\n # Update value\n terraform_api.update_workspace_variable(vm_var, workspace_id)", "def create(self, uuid, service_name, env_var_name, value):\n\n if not _is_valid_env_var_name(env_var_name):\n raise exceptions.InvalidParameter('env_var_name', env_var_name)\n\n device = self.device.get(uuid)\n services = self.service.get_all_by_application(device['belongs_to__application']['__id'])\n service_id = [i['id'] for i in services if i['service_name'] == service_name]\n if service_id:\n service_installs = self.service_install.get_all_by_device(device['id'])\n service_install_id = [i['id'] for i in service_installs if i['installs__service']['__id'] == service_id[0]]\n\n data = {\n 'service_install': service_install_id[0],\n 'name': env_var_name,\n 'value': value\n }\n\n return json.loads(self.base_request.request(\n 'device_service_environment_variable', 'POST', data=data,\n endpoint=self.settings.get('pine_endpoint')\n ).decode('utf-8'))\n else:\n raise exceptions.ServiceNotFound(service_name)", "def set_var(self, var_name, var_data, metadata={}):\n var_name = var_name.strip()\n\n # Get any old metadata\n if var_name in self.variables:\n Old_Var = getattr(self, var_name)\n if hasattr(Old_Var, \"metadata\"):\n md_old = Old_Var.metadata\n for key in md_old:\n if key not in metadata:\n metadata[key] = md_old[key]\n\n # Create a new Variable and set it to self\n Var = inp_types.Variable(var_name, var_data, metadata)\n\n setattr(self, var_name, Var)\n\n # Append the variable to the list of variables\n if var_name not in self.variables:\n self.variables.append(var_name)\n\n return Var", "def define_var(self, var, value):\n self.binding[var] = value", "def _create_embedding_variable(self, name, initial_value):\n if name not in self._tls._embed_variables:\n embed_var = tf.Variable(\n initial_value,\n name=name + str(threading.get_ident()),\n shape=(None, None),\n dtype=tf.float32,\n trainable=False,\n )\n self._tls._embed_variables[name] = embed_var\n else:\n embed_var = self._tls._embed_variables[name]\n embed_var.assign(initial_value)\n return embed_var", "def DefineVariable(self, var_name: str, shape: tuple, dtype: type): \n adios_var = self.IO.DefineVariable(var_name, np.zeros(shape, dtype),\n shape, len(shape) * [0], shape, adios2.ConstantDims)\n self.var_dict.update({var_name: (adios_var, shape, dtype)})\n return(adios_var)", "def add_variable(self, name, domain):\n name = str(name)\n vnode = VariableNode(name, domain)\n if name in self.vs:\n raise RuntimeError(\"Variable '{0}' already defined\".format(name))\n self.vs[name] = vnode\n return vnode", "def variable(initializer=None, shape=None, dtype=None, name=None, **kwargs):\n return get_var(name, shape=shape, dtype=dtype, initializer=initializer, **kwargs)", "def Variable(name):\n placeholder_node = placeholder_op()\n placeholder_node.name = name\n return placeholder_node", "def _create(self, variables):\n required_vars = ['container']\n variables_dict = self._get_vars(variables, required=required_vars)\n\n container_name = variables_dict.pop('container')\n container_data = self._create_container(container_name=container_name)\n\n if not container_data:\n container_data = self.swift.head_container(container_name)\n\n return self._facts(facts=[container_data])", "def create_variables(self, request):\n\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.body_wrapper import BodyWrapper\n\t\texcept Exception:\n\t\t\tfrom .body_wrapper import BodyWrapper\n\n\t\tif request is not None and not isinstance(request, BodyWrapper):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)\n\t\t\n\t\thandler_instance = CommonAPIHandler()\n\t\tapi_path = ''\n\t\tapi_path = api_path + '/crm/v2/settings/variables'\n\t\thandler_instance.set_api_path(api_path)\n\t\thandler_instance.set_http_method(Constants.REQUEST_METHOD_POST)\n\t\thandler_instance.set_category_method(Constants.REQUEST_CATEGORY_CREATE)\n\t\thandler_instance.set_content_type('application/json')\n\t\thandler_instance.set_request(request)\n\t\thandler_instance.set_mandatory_checker(True)\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.action_handler import ActionHandler\n\t\texcept Exception:\n\t\t\tfrom .action_handler import ActionHandler\n\t\treturn handler_instance.api_call(ActionHandler.__module__, 'application/json')", "def register_var(tiling_var, val):\n globals()[tiling_var] = val", "def _find_or_create_variable(self, cname, vname, source):\n try:\n var = self.model.get_variable_by_name(cname, source.name)\n raise KeyError()\n except KeyError:\n # Have we created it already?\n try:\n var = self.model.get_variable_by_name(cname, vname)\n except KeyError:\n # Create it and add to model\n units = source.component.get_units_by_name(source.units)\n var = self.add_variable(cname, vname, units)\n return var", "def read_variables(self, dataset):\n if 'variables' in self.configs:\n for variable_name, variable_dict in self.configs['variables'].items():\n if variable_name not in dataset.variables:\n temp_var = dataset.createVariable(variable_name, self.configs['variables'][variable_name]['data_type'])\n temp_var[:] = self.configs['variables'][variable_name]['value']\n \n for key, value in variable_dict.items():\n if (key != 'data_type') and (key != 'value'):\n setattr(temp_var, key, value)", "def add_variable(self, name, var):\n self.variables.append(_3ds_named_variable(name, var))", "def add_variable(self, name, var):\n self.variables.append(_3ds_named_variable(name, var))", "def set_variable_value():\n\n mp_rp_conf_file = 'entitlement-tests/CCI/ReportPortal/mp_rp_conf.json'\n \n # 1. Set project name which is just the test product name with upper case letter\n cmd = \"sed -i -e 's/PROJECT_NAME/{0}/g' {1}\".format(test_product.upper(), mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # 2. Set launch name\n # Launch name examples - Errata-49798_RHEL7_Server_x86_64_Full_ProdCDN; Errata-53717_RHEL8_x86_64_Full_ProdCDN\n cmd = \"sed -i -e 's/LAUNCH_NAME/{0}/g' {1}\".format(get_launch_name(), mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # 3. Set variables value in description of launch\n # a) Set Errata url in description of launch\n errata_url = \"[{0}](https:\\/\\/errata.devel.redhat.com\\/advisory\\/{1})\".format(errata_id, errata_id)\n cmd = \"sed -i -e 's/ERRATA_URL/{0}/g' {1}\".format(errata_url, mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # b) Set jenkins job url in description of launch\n build_id = build_url.strip('/').split('/')[-1]\n build_url_str = \"[{0}]({1})\".format(build_id, build_url.replace(\"/\",\"\\/\"))\n \n cmd = \"sed -i -e 's/BUILD_URL/{0}/g' {1}\".format(build_url_str, mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)\n \n # 4. Set launch tag\n # Tag examples - OpenStack16; Ceph3; CNV2\n cmd = \"cat product_version.txt\"\n (ret, output) = commands.getstatusoutput(cmd)\n \n cmd = \"sed -i -e 's/LAUNCH_TAG/{0}{1}/g' {2}\".format(test_product, output, mp_rp_conf_file)\n (ret, output) = commands.getstatusoutput(cmd)", "def assign_variable(self, name, value):\n return self.set_variable(name, value)", "def create_variable(name, size, number_of_nodes, type, casadi_type = 'SX'):\n SX_var = SX.sym('SX_'+name, size)\n opc_var = []\n\n ns = 0\n if type == \"STATE\":\n ns = number_of_nodes\n elif type == \"CONTROL\":\n ns = number_of_nodes-1\n elif type == \"FINAL_STATE\":\n ns = 1\n\n if casadi_type is 'MX':\n for i in range(ns):\n opc_var.append(MX.sym(name + str(i), SX_var.size1()))\n elif casadi_type is 'SX':\n for i in range(ns):\n opc_var.append(SX.sym(name + str(i), SX_var.size1()))\n else:\n raise Exception('casadi_type can be only SX or MX')\n\n return SX_var, opc_var", "def define_vars(vars, namespace=None):\n\t# TODO: support namespacing via nested dictionaries\n\tif namespace is None:\n\t\tprefix = \"\"\n\telse:\n\t\tprefix = namespace + \"/\"\n\treturn \"\\\\setvalue{%s}\" % \", \".join([\n\t\t\"%s = %s\" % (prefix + key, value)\n\t\tfor (key, value) in vars.items()\n\t])", "def mk_var(self, name, type_):\n # type: (str, ty.Type) -> expr.Var\n\n var = expr.Var(name, type_)\n self.var_scopes[0].appendleft((name, var))\n return var", "def _var(self,\n name,\n shape,\n collections=None,\n initializer=None,\n trainable=True,\n device='/cpu:0',\n dtype=tf.float32):\n collections = set(collections)\n collections.add(GKeys.GLOBAL_VARIABLES)\n var = tf.contrib.framework.variable(\n name=name,\n shape=shape,\n collections=list(collections),\n initializer=initializer,\n trainable=trainable,\n device=device,\n dtype=dtype\n )\n if GKeys.TRAIN_OP not in collections:\n tf.contrib.framework.add_model_variable(var)\n return var", "def _create_slot_variable(self, layer_name, slot_name, initial_value):\n embed_var = self._get_embedding_variable(layer_name)\n if embed_var is None:\n raise RuntimeError(\n \"Embedding variable for layer %s should be already created.\"\n % (layer_name)\n )\n slot_var = self._init_slot_variable(\n layer_name, embed_var, slot_name, initial_value\n )\n self._update_slot_variable_to_optimizer(slot_name, embed_var, slot_var)\n\n return slot_var", "def addVariable(self, name, value, save = False):\r\n setattr(self, name, value)\r\n if save and name not in self.variables:\r\n self.variables.append(name)", "def variable_creation_on_device(name,\n shape,\n initializer,\n parameter_update_device='-1'):\n with tf.device(parameter_update_device):\n dtype = tf.float32\n var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype)\n return var", "def createSensorVariable(self, product, variable_name):\r\n\r\n sensor_variable_dict = {'name': variable_name,\r\n 'dtype': None,\r\n 'vtype': 'sensor',\r\n 'units': None,\r\n 'ndims': None,\r\n 'shape': None}\r\n\r\n sensor_variable = Variable(sensor_variable_dict)\r\n\r\n return sensor_variable", "def createDataVariable(self, product, variable_name):\r\n\r\n data_variable_dict = {'name': variable_name,\r\n 'dtype': None,\r\n 'vtype': 'data',\r\n 'units': None,\r\n 'ndims': None,\r\n 'shape': None,\r\n 'wavelength': None,\r\n 'bandwidth': None,\r\n 'srf': None}\r\n\r\n data_variable = SpectralVariable(data_variable_dict)\r\n\r\n return data_variable", "def Variable(val, dtype = None, name=\"Variable\", shape = None):\r\n placeholder_node = placeholder_op()\r\n placeholder_node.name = name\r\n if dtype!=None:\r\n global_variables[placeholder_node] = np.array(val).astype(dtype)\r\n else:\r\n global_variables[placeholder_node] = np.array(val)\r\n return placeholder_node", "def mk_VAR(v):\n if not v.is_var():\n raise InvalidDerivationException(\"mk_VAR\")\n return Thm([], Const(\"_VAR\", TFun(v.T, BoolType))(v))", "def set_custom_variable(self, key, value):\n self.logger.info(\"Set custom variable : %s:%s\" % (key, value))\n\n try:\n if 'custom_variables' not in self._answer_payload:\n self._answer_payload['custom_variables'] = {}\n self._answer_payload['custom_variables'][key] = value\n except Exception as e:\n self.logger.error(\"Error on set custom variables : %s\" % e)", "def copy_var(self, name, val):\n return self.env_copy({name: val})[name]", "def addVariable(self, varName, file, var, axesArgString):\n # If the variable is defined, replace existing variable, else create a new variable\n if self.isVariableDefined(varName):\n item = self.getItem(varName)\n item.setVariable(var)\n item.setFile(file)\n else:\n item = QDefinedVariableItem(file, var, varName)\n self.varList.addItem(item)\n\n # Recording define variable teaching command\n self.recordDefineVariableTeachingCommand(varName, var.id, file, axesArgString)\n\n # emit signal to QVariableView to create a new axisList / tab\n self.emit(QtCore.SIGNAL('setupDefinedVariableAxes'), file, var, varName)", "def get_variable_create(**kwargs) -> bool:\n\n token = BaseHook.get_connection(AirflowConns.TERRAFORM).password\n terraform_api = TerraformApi(token)\n\n # Add variable to workspace\n workspace_id = get_workspace_id()\n variables = terraform_api.list_workspace_variables(workspace_id)\n vm = None\n for var in variables:\n if var.key == TerraformTasks.TERRAFORM_CREATE_VM_KEY:\n vm = VirtualMachine.from_hcl(var.value)\n break\n\n # create_vm dag run and terraform create variable is already true (meaning VM is already on) or\n # destroy_vm dag run and terraform destroy variable is already false (meaning VM is already off)\n vm_is_on = vm.create\n print(f\"DAG RUN: {kwargs['dag_run'].dag_id}\")\n print(f\"VM state: {vm_is_on}\")\n\n if (kwargs[\"dag_run\"].dag_id == TerraformTasks.DAG_ID_CREATE_VM and vm_is_on) or (\n kwargs[\"dag_run\"].dag_id == TerraformTasks.DAG_ID_DESTROY_VM and not vm_is_on\n ):\n logging.info(f'VM is already in this state: {\"on\" if vm_is_on else \"off\"}')\n return False\n\n logging.info(f'Turning vm {\"off\" if vm.create else \"on\"}')\n return True", "def make_var_constant(self, var, value):\n self.remove_definition(var)\n var.clear_dependency_info()\n var.initial_value = unicode(str(value))\n var._set_type(VarTypes.Constant)", "def variable_on_cpu(name, shape, initializer):\n # Use the /cpu:0 device for scoped operations\n with tf.device('/cpu:0'):\n # Create or get apropos variable\n var = tfv1.get_variable(name=name, shape=shape, initializer=initializer)\n return var", "def _create_or_match_memory_variable(var):\n global _MEMORY_VARIABLE_LIST\n var_list = [a[0] for a in _MEMORY_VARIABLE_LIST]\n if var in var_list:\n i = var_list.index(var)\n return _MEMORY_VARIABLE_LIST[i][1]\n else:\n memory_var = tk.StringVar()\n _MEMORY_VARIABLE_LIST.append([var,memory_var])\n return memory_var", "def __setattr__(self, name, value):\n if not name.endswith(\"_\"):\n self.__dict__[name] = value\n else:\n varname = name[:-1]\n if varname in self._varlist:\n self[:, self._varlist.index(varname)] = value\n else:\n self.append_var(varname, value)", "def add_variables(ds, variables_dict, dim_sizes_dict):\n\n du = DatasetUtil()\n\n for variable_name in variables_dict.keys():\n\n variable_attrs = variables_dict[variable_name]\n\n # Check variable definition\n TemplateUtil._check_variable_definition(variable_name, variable_attrs)\n\n # Unpack variable attributes\n dtype = variable_attrs[\"dtype\"]\n dim_names = variable_attrs[\"dim\"]\n attributes = deepcopy(variable_attrs[\"attributes\"]) if \"attributes\" in variable_attrs else None\n\n # Determine variable shape from dims\n try:\n dim_sizes = TemplateUtil._return_variable_shape(dim_names, dim_sizes_dict)\n except KeyError:\n raise KeyError(\"Dim Name Error - Variable \" + variable_name + \" defined with dim not in dim_sizes_dict\")\n\n # Create variable and add to dataset\n if dtype == \"flag\":\n flag_meanings = attributes.pop(\"flag_meanings\")\n variable = du.create_flags_variable(dim_sizes, meanings=flag_meanings,\n dim_names=dim_names, attributes=attributes)\n\n else:\n variable = du.create_variable(dim_sizes, dim_names=dim_names,\n dtype=dtype, attributes=attributes)\n\n if \"encoding\" in variable_attrs:\n du.add_encoding(variable, **variable_attrs[\"encoding\"])\n\n ds[variable_name] = variable\n\n return ds", "def make_var(x, dtype=np.float32, cuda=True, volatile=False, requires_grad=False):\n if type(x) != Variable:\n if isinstance(x, np.ndarray): \n x = torch.from_numpy(x.astype(dtype))\n x = Variable(x, volatile=volatile, requires_grad=requires_grad)\n return make_cuda(x) if cuda else x", "def create_write_variable(self, write_variable_name, initial_value = None):\n if write_variable_name is None:\n error_line = ('ERROR in Agent.create_write_variable(), write_variable_name is None')\n raise Exception(error_line)\n shared_variable_for_write_variable = SharedVariable(write_variable_name, writing_agent_name = self.__name__)\n self.write_variables[write_variable_name] = shared_variable_for_write_variable\n # Initialize the shared variable\n shared_variable_for_write_variable.write(initial_value)\n return WriteVariable(self, write_variable_name)", "def _create_production(self, m, comp):\n name = comp.name\n # create pyomo indexer for this component's resources\n res_indexer = pyo.Set(initialize=range(len(m.resource_index_map[comp])))\n setattr(m, f'{name}_res_index_map', res_indexer)\n # production variable depends on resource, time\n # # TODO if transfer function is linear, isn't this technically redundant? Maybe only need one resource ...\n ## Method 1: set variable bounds directly --> not working! why??\n #lower, upper, domain = self._get_prod_bounds(comp)\n #prod = pyo.Var(res_indexer, m.T, bounds=(lower, upper)) #within=domain,\n ## Method 2: set capacity as a seperate constraint\n prod = pyo.Var(res_indexer, m.T, initialize=0)\n prod_name = '{c}_production'.format(c=name)\n setattr(m, prod_name, prod)\n return prod_name", "def _create_tkvar(self,PO,name,param_obj):\n # CEBALERT: should probably delete any existing tkvar for name\n self._create_translator(name,param_obj)\n\n tkvar = lookup_by_class(self._param_to_tkvar,type(param_obj))()\n self._tkvars[name] = tkvar\n\n # overwrite Variable's set() with one that will handle\n # transformations to string\n tkvar._original_set = tkvar.set\n tkvar.set = lambda v,x=name: self._tkvar_set(x,v)\n\n tkvar.set(self.get_parameter_value(name,PO))\n tkvar._last_good_val=tkvar.get() # for reverting\n tkvar.trace_variable('w',lambda a,b,c,p_name=name: self._handle_gui_set(p_name))\n # CB: Instead of a trace, could we override the Variable's\n # set() method i.e. trace it ourselves? Or does too much\n # happen in tcl/tk for that to work?\n\n # Override the Variable's get() method to guarantee an\n # out-of-date value is never returned. In cases where the\n # tkinter val is the most recently changed (i.e. when it's\n # edited in the gui, resulting in a trace_variable being\n # called), the _original_get() method is used.\n # CEBALERT: what about other users of the variable? Could they\n # be surprised by the result from get()?\n tkvar._original_get = tkvar.get\n tkvar.get = lambda x=name: self._tkvar_get(x)", "def variable_on_cpu(name, shape, initializer):\n # Use the /cpu:0 device for scoped operations\n with tf.device('/cpu:0'):\n # Create or get apropos variable\n var = tf.get_variable(name=name, shape=shape, initializer=initializer)\n return var", "def set_variable(self, name, value):\n if name not in self._variables:\n logging.warning(\"Pipeline variable '%s' was not initialized\", name)\n self._variables[name].update({'value': value})\n return self", "def build_parameter_container_for_variables(\n variables_configs: Dict[str, Any]\n) -> ParameterContainer:\n variable_config_key: str\n variable_config_value: Any\n parameter_values: Dict[str, Any] = {}\n for variable_config_key, variable_config_value in variables_configs.items():\n variable_config_key = f\"{VARIABLES_KEY}{variable_config_key}\"\n parameter_values[variable_config_key] = variable_config_value\n\n parameter_container: ParameterContainer = ParameterContainer(parameter_nodes=None)\n build_parameter_container(\n parameter_container=parameter_container, parameter_values=parameter_values\n )\n\n return parameter_container", "def global_var(name: str) -> SetupVal:\n return GlobalVarVal(name)", "def define(self, variable_name=None, value=None, **definition_map):\n is_single_definition = bool(variable_name and value)\n is_batch_definition = bool(definition_map)\n\n if is_single_definition and not is_batch_definition:\n self.definition_map[variable_name] = value\n\n return\n elif not is_single_definition and is_batch_definition:\n self.definition_map.update(definition_map)\n\n return\n\n raise ValueError('Cannot define one variable or multiple variables at the same time.')", "def add(variable, value):\n prefixes[variable] = value", "def createValue(self, valueName, valueSetting, valueDescription):\n\t\tself.settings[valueName] = [ valueSetting, valueDescription ]", "def create_variable(df, name, label, label_map, default_value=0):\n output_df = df.copy()\n output_df[name] = default_value\n \n label_map[name] = label\n \n return output_df", "def create_new_var_and_update(self, name):\n\n self.create_new_var(name)\n self.list_widget.recreate_ui()", "def new(self, name: str, conditional) -> VariableReference:\n mv = self.map.memory.new_address()\n vr = self.get(name, inherit_from_parent=False)\n vr.set({mv}, conditional)\n return vr", "def add_variable(self, comp, vname, units, **kwargs):\n if not isinstance(comp, cellml_component):\n comp = self.model.get_component_by_name(comp)\n units = self.add_units(units)\n var = cellml_variable.create_new(comp, vname, units.name, **kwargs)\n comp._add_variable(var)\n return var", "def env_variable(self, name: str) -> \"HostVariable\":\n _args = [\n Arg(\"name\", name),\n ]\n _ctx = self._select(\"envVariable\", _args)\n return HostVariable(_ctx)", "def register_variable(self, var: ScalarVariable, nested: bool = True):\n variable_reference = len(self.vars)\n self.vars[variable_reference] = var\n # Set the unique value reference\n var.value_reference = variable_reference\n owner = self\n if var.getter is None and nested and \".\" in var.name:\n split = var.name.split(\".\")\n split.pop(-1)\n for s in split:\n owner = getattr(owner, s)\n if var.getter is None:\n var.getter = lambda: getattr(owner, var.local_name)\n if var.setter is None and hasattr(owner, var.local_name) and var.variability != Fmi2Variability.constant:\n var.setter = lambda v: setattr(owner, var.local_name, v)", "def add_variable(self, name):\n self.all_variables.add(name)", "def putvarname(self,j_,name_):\n if isinstance(name_,unicode):\n name_ = name_.encode(\"utf-8\",errors=\"replace\")\n res = __library__.MSK_XX_putvarname(self.__nativep,j_,name_)\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)", "def set_variable(self, request, context):\n response = SetVariableResponse()\n value = decode(request.value)\n self._delegator.set_variable(request.component, request.variable, value)\n return response", "def create(self, config):\n\n assert config[\"name\"] == self.name, \"Given config is not for this template\"\n\n data = self._json_encode(config)\n headers = self._default_headers()\n\n return self._request(\"\",\n ok_status=None,\n data=data,\n headers=headers)", "def add_variable(self, name, domain):\n self.variables.append(name)\n self.domains[name] = list(domain)\n self.constraints[name] = {}", "def set_variable(self, name, value):\n # Note that \":=\" is used so that we can control the behavior for\n # both Makefile and CMake variables similarly.\n self.write_line(name + \" := \" + value)", "def add(self, name, variable_classification):\r\n if name in self.__table__:\r\n logger.error(\"Tried to add duplicate key : \" , name)\r\n \r\n if isinstance(variable_classification, Global_Variable):\r\n self.__table__[name] = variable_classification\r\n elif isinstance(variable_classification, Local_Variable):\r\n self.__table__[name] = variable_classification\r\n elif isinstance(variable_classification, Procedure_Variable):\r\n self.__table__[name] = variable_classification\r\n elif isinstance(variable_classification, Constant_Variable):\r\n self.__table__[name] = variable_classification\r\n elif isinstance(variable_classification, Function_Variable):\r\n self.__table__[name] = variable_classification\r\n\r\n else:\r\n raise RuntimeError(\"Attempt to add bad value to symbol table\")", "def test_create_namespaced_deployment_config(self):\n pass", "def put_node_variable_name(self, name, index):\n # 1 - based indexing!\n assert index <= self._f.dimensions[\"num_nod_var\"]\n\n self._f.variables[\"name_nod_var\"][index - 1] = b\"\"\n self._f.variables[\"name_nod_var\"][index - 1, :len(name)] = \\\n [_i.encode() if hasattr(_i, \"encode\") else _i for _i in name]", "def create_variables(xblock_id, connection, updated_variables):\n \n cursor = connection.cursor()\n query = \"INSERT INTO edxapp.qgb_variable (xblock_id, name, type, min_value, max_value, decimal_places) VALUES (%s, %s, %s, %s, %s, %s)\"\n print query\n for variable_name, variable in updated_variables.iteritems():\n updated_variable_data = (xblock_id, variable_name, variable['type'], variable['min_value'], variable['max_value'], variable['decimal_places'])\n cursor.execute(query, updated_variable_data)\n \n cursor.close()", "def variable(value, dtype, name=None, broadcastable=None):\n return tf.Variable(value, dtype=dtype, name=name)", "def fetch_framework_var(attr_name):\n\n scope = fluid.executor.global_scope()\n core_var = scope.find_var(attr_name)\n shape = core_var.get_tensor().shape()\n framework_var = fluid.layers.create_parameter(\n shape=shape, dtype='float32', attr=fluid.ParamAttr(name=attr_name))\n return framework_var", "def createMeteorologicalVariable(self, product, variable_name):\r\n\r\n meteorological_variable_dict = {'name': variable_name,\r\n 'dtype': None,\r\n 'vtype': 'meteorological',\r\n 'units': None,\r\n 'ndims': None,\r\n 'shape': None}\r\n\r\n meteorological_variable = Variable(meteorological_variable_dict)\r\n\r\n return meteorological_variable", "def define_variable(var, val, env):\n frame = first_frame(env)\n def scan(vars, vals):\n if isNull(vars):\n return addBindingToFrame(var, val, frame)\n elif var == car(vars):\n return set_car(vals, val)\n else:\n return scan(cdr(vars), cdr(vals))\n return scan(frame_variables(frame), frame_values(frame))", "def __new_var__(self, var_type=None, name=None) -> str:\n if name is None:\n from time import time\n name = str(int(time()*1e7))\n self.__numVar += 1\n self.__variables.append((name, var_type))\n if var_type == List:\n return name + \" = []\"\n return name", "def model_variable(name, shape=None, dtype=dtypes.float32, initializer=None,\n regularizer=None, trainable=True, collections=None,\n caching_device=None, device=None, partitioner=None,\n custom_getter=None, use_resource=None):\n collections = list(collections or [])\n collections += [ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.MODEL_VARIABLES]\n var = variable(name, shape=shape, dtype=dtype,\n initializer=initializer, regularizer=regularizer,\n trainable=trainable, collections=collections,\n caching_device=caching_device, device=device,\n partitioner=partitioner, custom_getter=custom_getter,\n use_resource=use_resource)\n return var", "def add_variable(self, var):\n self.var_list.append(var)\n self.var_dict[var.name] = var", "def set_variables(project, env_spec_name, vars_and_values, prepare_result=None):\n (env_prefix, status) = _prepare_env_prefix(project,\n env_spec_name,\n prepare_result,\n mode=provide.PROVIDE_MODE_DEVELOPMENT)\n if env_prefix is None:\n return status\n\n local_state = LocalStateFile.load_for_directory(project.directory_path)\n var_reqs = dict()\n for req in project.find_requirements(env_spec_name, klass=EnvVarRequirement):\n var_reqs[req.env_var] = req\n present_vars = set(var_reqs.keys())\n errors = []\n local_state_count = 0\n keyring_count = 0\n for varname, value in vars_and_values:\n if varname in present_vars:\n if var_reqs[varname].encrypted:\n # import keyring locally because it's an optional dependency\n # that prints a warning when it's needed but not found.\n from anaconda_project.internal import keyring\n\n keyring.set(env_prefix, varname, value)\n keyring_count = keyring_count + 1\n else:\n local_state.set_value(['variables', varname], value)\n local_state_count = local_state_count + 1\n else:\n errors.append(\"Variable %s does not exist in the project.\" % varname)\n\n if errors:\n return SimpleStatus(success=False, description=\"Could not set variables.\", errors=errors)\n else:\n if local_state_count > 0:\n local_state.save()\n if keyring_count == 0:\n description = (\"Values saved in %s.\" % local_state.filename)\n elif local_state_count == 0:\n description = (\"Values saved in the system keychain.\")\n else:\n description = (\"%d values saved in %s, %d values saved in the system keychain.\" %\n (local_state_count, local_state.filename, keyring_count))\n return SimpleStatus(success=True, description=description)", "def assign_from_values(var_names_to_values):\n feed_dict = {}\n assign_ops = []\n\n for var_name in var_names_to_values:\n var_value = var_names_to_values[var_name]\n var = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES, var_name)\n if not var:\n raise ValueError('Variable %s wasn\\'t found' % var_name)\n elif len(var) > 1:\n # tf.get_collection is just a filter on the prefix: find the exact match:\n found = False\n for v in var:\n if v.op.name == var_name:\n var = v\n found = True\n break\n\n if not found:\n raise ValueError('Variable %s doesn\\'t uniquely identify a variable' %\n var_name)\n else:\n var = var[0]\n\n # TODO(nsilberman): ensure placeholder and assign are on the same device.\n # Assign a placeholder to the value that will be filled later.\n placeholder_name = 'placeholder/' + var.op.name\n placeholder_value = array_ops.placeholder(\n dtype=var.dtype.base_dtype,\n shape=var.get_shape(),\n name=placeholder_name)\n assign_ops.append(var.assign(placeholder_value))\n\n feed_dict[placeholder_value] = var_value.reshape(var.get_shape())\n\n assign_op = control_flow_ops.group(*assign_ops)\n return assign_op, feed_dict", "def get_make_var(self, name: str) -> str:\n return self.soong_ui([\"--dumpvar-mode\", name], capture_output=True).rstrip(\"\\n\")", "def PseudoIOAPIVariable(parent,name,typecode,dimensions,**kwds):\n\n retval = PseudoNetCDFVariable(parent, name, typecode, dimensions, **kwds)\n\n if not 'units' in kwds:\n warn('IOAPI variables must have units; %s has been initialized with \"None\" units')\n retval.units = 'None'\n \n if not 'long_name' in kwds:\n retval.long_name = name.ljust(16)\n\n if not 'var_desc' in kwds:\n retval.var_desc = name.ljust(80)\n\n return retval", "def defineVariable(self, file, var, axesArgString):\n if self.isVariableDefined(var.id):\n self.warningWidget.showWarning(var.id, file, var, axesArgString)\n else:\n self.addVariable(var.id, file, var, axesArgString)", "def ttvar(name, index=None):\n bvar = boolfunc.var(name, index)\n try:\n var = _VARS[bvar.uniqid]\n except KeyError:\n var = _VARS[bvar.uniqid] = TTVariable(bvar)\n return var", "def create_variable(arr, dtype='float32', device=None, requires_grad=True, backend='autograd'):\n args = {}\n if backend == 'autograd':\n if dtype is not None:\n args['dtype'] = dtype_mapping_dict[dtype]['autograd']\n var = anp.array(arr, **args)\n elif backend == 'pytorch':\n if dtype is not None:\n args['dtype'] = getattr(engine_dict['pytorch'], dtype_mapping_dict[dtype]['pytorch'])\n if device is not None:\n args['device'] = device\n args['requires_grad'] = requires_grad\n var = tc.tensor(arr, **args)\n return var", "def createTagValue(creatorID, tagID, objectID, value):\n store = getMainStore()\n return store.add(TagValue(creatorID, tagID, objectID, value))", "def test_existing_value(self):\n var_name = \"PICCOLO_TEST_2\"\n initial_value = \"hello\"\n new_value = \"goodbye\"\n\n os.environ[var_name] = initial_value\n\n with set_env_var(var_name=var_name, temp_value=new_value):\n self.assertEqual(os.environ.get(var_name), new_value)\n\n self.assertEqual(os.environ.get(var_name), initial_value)", "def create_variables(self, source, prior, data, ds):\n\n # Store variable-length data\n if source == \"gbnode\" and prior in self.NODE_LIST:\n node_id = ds.createVariable(\"node_id\", \"i8\", (\"num_nodes\",))\n node_id[:] = data[\"node_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_nodes\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_nodes\",))\n values[:] = data[\"values\"]\n\n elif prior in self.PROB_LIST:\n reach_id = ds.createVariable(\"reach_id\", \"i8\", (\"num_reaches\",))\n reach_id[:] = data[\"reach_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_reaches\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_reaches\", \"probability\"))\n values[:] = data[\"values\"]\n\n elif prior in self.MONTHS_LIST:\n reach_id = ds.createVariable(\"reach_id\", \"i8\", (\"num_reaches\",))\n reach_id[:] = data[\"reach_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_reaches\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_reaches\", \"num_months\"))\n values[:] = data[\"values\"]\n\n elif prior in self.DAYS_LIST:\n reach_id = ds.createVariable(\"reach_id\", \"i8\", (\"num_reaches\",))\n reach_id[:] = data[\"reach_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_reaches\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_reaches\", \"num_days\"))\n values[:] = data[\"values\"]\n\n value_t = ds.createVariable(\"value_t\", \"f8\", (\"num_reaches\", \"num_days\"))\n value_t[:] = data[\"value_t\"]\n\n else:\n reach_id = ds.createVariable(\"reach_id\", \"i8\", (\"num_reaches\",))\n reach_id[:] = data[\"reach_ids\"]\n\n indexes = ds.createVariable(\"indexes\", \"i4\", (\"num_reaches\",))\n indexes[:] = data[\"indexes\"]\n\n values = ds.createVariable(\"prior_values\", data[\"data_type\"], (\"num_reaches\",))\n values[:] = data[\"values\"]", "def push(self, name, var, timeout=None, verbose=True):\r\n if isinstance(name, str):\r\n name = [name]\r\n var = [var]\r\n\r\n for n, v in zip(name, var):\r\n self.feval(\"assignin\", \"base\", n, v, nout=0, timeout=timeout, verbose=verbose)", "def constructor_env_variables(loader, node) -> Any: # type: ignore\n value = loader.construct_scalar(node)\n match = pattern.findall(value) # to find all env variables in line\n if match:\n full_value = value\n for g in match:\n (env_var, default_val) = g.split(\"::\")\n value = os.environ.get(env_var, default_val)\n full_value = full_value.replace(f\"${{{g}}}\", value)\n if not full_value:\n full_value = None\n _logger.debug(f\"injected ENV parameter {env_var} resolved to {value}\")\n return full_value\n return value", "def test_variables_id_put(self):\n pass", "def variable_on_cpu(name,\n shape,\n initializer,\n dtype=tf.float32,\n trainable=True):\n with tf.device('/cpu:0'):\n var = tf.get_variable(name, shape, initializer=initializer,\n dtype=dtype, trainable=trainable)\n return var", "def newTemp():\n global varSeq\n toRet = 'var'+str(varSeq)\n varSeq += 1\n scopeDict[currScope].insert(toRet,\"temporary\")\n return toRet", "def init_config_vars(config):\r\n global run_id\r\n run_id = config.info.run_id\r\n global unique_op_dir\r\n unique_op_dir = os.path.join(config.info.output_dir, config.info.run_id)\r\n os.makedirs(unique_op_dir, exist_ok=True)", "def AddVariable(self, e):\n if (not self.mainparent.file_loaded):\n msg = \"An input file must be loaded before a variable can be added\"\n ShowMessage(msg, kind='warn')\n return\n if (self.mainparent.namelist is None):\n msg = \"Use the menu to select a namelist, first\"\n ShowMessage(msg, kind='info')\n return\n self.mainparent.statusbar.SetStatusText(\"Adding new variable\", 0)\n\n # get variable name/value from user\n dlg = NewVariableDialog(self.parent, \"Enter New Variable\")\n if (dlg.ShowModal() != wx.ID_OK):\n dlg.Destroy()\n self.mainparent.reset_statusbar()\n return\n\n name, value = dlg.get_values()\n\n var = Variable(name, value) # add variable\n self.mainparent.input_file.namelists[self.mainparent.namelist].add_variable(var)\n\n self.mainparent.statusbar.SetStatusText(\"Added: {}\".format(name), 0)\n\n self.mainparent.nmlpanel.update(unset_namelist=False) # update displayed namelist to include new entry", "def constructor_env_variables(loader, node):\n value = loader.construct_scalar(node)\n match = pattern.findall(value) # to find all env variables in line\n if match:\n full_value = value\n for g in match:\n full_value = full_value.replace(\n f'${{{g}}}', os.environ.get(g, g)\n )\n return full_value\n return value", "def create_new_vararray_core(cfg_set,var):\r\n if cfg_set[\"source_dict\"][var]==\"METADATA\":\r\n return\r\n \r\n t1 = datetime.datetime.now()\r\n if cfg_set[\"verbose\"]: print(\" ... new \"+var+\" array created in:\")\r\n filename = path_creator_vararr(\"orig\",var,cfg_set)\r\n vararr = np.zeros((cfg_set[\"n_integ\"],)+cfg_set[\"xy_ext\"])\r\n \r\n ## Get field of every time step (if map-function cannot be applied)\r\n i = 0\r\n t_delta = np.array(range(cfg_set[\"n_integ\"]))*datetime.timedelta(minutes=cfg_set[\"timestep\"])\r\n if var == \"TRT\":\r\n vararr = get_vararr_TRT_t0(cfg_set[\"t0\"], cfg_set)\r\n else:\r\n for t_d in t_delta:\r\n t_current = cfg_set[\"t0\"] - cfg_set[\"time_change_factor\"]*t_d\r\n vararr_t = get_vararr_t(t_current, var, cfg_set)\r\n vararr[i,:,:] = vararr_t[0,:,:]\r\n i += 1\r\n save_file(filename, data_arr=vararr,var_name=var,cfg_set=cfg_set)\r\n if cfg_set[\"verbose\"]: print(\" \"+filename)\r\n \r\n ## In case verification of displacements should be performed, also initialise skill-score array:\r\n if cfg_set[\"verify_disp\"]:\r\n filename_verif = \"%stmp/%s_%s_%s_verif.npy\" % (cfg_set[\"root_path\"],\r\n cfg_set[\"verif_param\"],str(cfg_set[cfg_set[\"verif_param\"]]), var)\r\n verif_array = np.zeros((1,len(cfg_set[\"scores_list\"]),cfg_set[\"n_integ\"]-1))-9999.\r\n np.save(filename_verif, verif_array)\r\n \r\n t2 = datetime.datetime.now()\r\n if False: print(\" Elapsed time for creation of variable %s: %s\" % (var,str(t2-t1)))", "def __init__(self, variables, name='TPUReplicatedVariable'):\n if not isinstance(variables, abc.Sequence) or not variables or any(\n not isinstance(v, variables_lib.Variable) for v in variables):\n raise TypeError('Argument `variables` should be a non-empty list of '\n f'`variables.Variable`s. Received {variables}')\n\n if any(v.dtype != variables[0].dtype for v in variables):\n raise ValueError(\n 'All elements in argument `variables` must have the same dtype. '\n f'Received dtypes: {[v.dtype for v in variables]}')\n\n if any(v.shape != variables[0].shape for v in variables):\n raise ValueError(\n 'All elements in argument `variables` must have the same shape. '\n f'Received shapes: {[v.shape for v in variables]}')\n\n self._vars = variables\n self._name = name\n self._common_name = self._name.split(':')[0]\n self._cached_value = None", "def createMaskVariable(self, product, variable_name):\r\n\r\n mask_variable_dict = {'name': variable_name,\r\n 'dtype': None,\r\n 'vtype': 'mask',\r\n 'units': None,\r\n 'ndims': None,\r\n 'shape': None}\r\n\r\n mask_variable = Variable(mask_variable_dict)\r\n\r\n return mask_variable" ]
[ "0.6057225", "0.5921494", "0.5921494", "0.5910765", "0.58916956", "0.57897204", "0.5695237", "0.5639612", "0.55724275", "0.557233", "0.55559117", "0.5538004", "0.551758", "0.55008006", "0.5477593", "0.54400873", "0.54192346", "0.5404222", "0.54034936", "0.5355584", "0.5355295", "0.53494877", "0.53494877", "0.534791", "0.5336159", "0.53007835", "0.5286047", "0.5283897", "0.52384716", "0.51792324", "0.517256", "0.51724774", "0.5171256", "0.5166801", "0.51514095", "0.514136", "0.51390535", "0.51203156", "0.5119157", "0.5113932", "0.5094211", "0.5092889", "0.5090003", "0.5083346", "0.5081966", "0.5076264", "0.5065009", "0.50552756", "0.50448203", "0.50342673", "0.50307506", "0.5028488", "0.50164956", "0.5015353", "0.50085425", "0.49794275", "0.49790883", "0.49691772", "0.49654278", "0.4961352", "0.49570647", "0.49498385", "0.49460226", "0.4941721", "0.49381158", "0.4933307", "0.49242085", "0.49228993", "0.49162588", "0.4915537", "0.491263", "0.49061376", "0.49007326", "0.48964003", "0.4894725", "0.48796755", "0.48753124", "0.48715818", "0.48655882", "0.4858495", "0.48553625", "0.48505077", "0.4841471", "0.48290196", "0.48284805", "0.4814153", "0.4806066", "0.48020804", "0.47974408", "0.47878957", "0.478383", "0.478356", "0.4782126", "0.47809145", "0.47777364", "0.47736892", "0.47678718", "0.4764886", "0.47637248", "0.47601107" ]
0.48706943
78
Updates an existing variable with a new value.
def UpdateVariable(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_variable(value):\n return value", "def updateValue(self):\n self.value = self.var.get()", "def update_variable(value):\n return value + 1", "def updateVar(self, id, value, type_):\n if id in self.variables:\n symbol = self.variables[id]\n symbol = sym.Symbol(id, value, type_, symbol.row, symbol.column)\n self.variables[id] = symbol\n return True", "def set_variable(self, name, value):\n self.send_to_backend('set', name=name, value=value)\n self.refresh_variable(name)", "def updateVariables(self) -> None:\n ...", "def update_binding(self, variable, value):\n old_value = self.lookup(variable).value\n if old_value is None:\n raise BaseException(\n \"Tried to update a variable that's not in scope!\")\n var_x, var_y = self.lookup(variable).variable.pos\n self.lookup(variable).value = value\n if old_value.moves_with_binding:\n old_value.set_pos(0, 0) # Or better yet, somehow remove it\n if value.moves_with_binding:\n value.set_pos(var_x + 130, var_y)\n self.update()", "def update(self, var_id, value):\n\n params = {\n 'filter': 'id',\n 'eq': var_id\n }\n data = {\n 'value': value\n }\n return self.base_request.request(\n 'device_service_environment_variable', 'PATCH', params=params, data=data,\n endpoint=self.settings.get('pine_endpoint')\n )", "def update(self, var_id, value):\n\n params = {\n 'filter': 'id',\n 'eq': var_id\n }\n\n data = {\n 'value': value\n }\n return self.base_request.request(\n 'service_environment_variable', 'PATCH', params=params, data=data,\n endpoint=self.settings.get('pine_endpoint')\n )", "def setVariable(self, variable):\n self.variable = variable\n self.updateVariableString()", "def set_value(self, var_name, new_value, tf_session):\n\n if(var_name in self.assign_operator):\n\n tf_session.run(\n self.assign_operator[var_name], {\n self.l_param_input[var_name]: new_value})\n else:\n print(\"Thou shall only assign learning parameters!\")", "def set_var_value(self, var, value):\n if var in self.binding:\n self.binding[var] = value\n elif self.parent is not None:\n self.parent.set_var_value(var, value)\n else:\n raise Environment.Unbound('unbound variable \"%s\"' % var)", "def update(self, x):\n pass", "def assign_variable(self, name, value):\n return self.set_variable(name, value)", "def set_assignment(self, var, value):\n self.variable_to_value[var] = value", "def doEdit(var, value, target):\n currentValue = target.get(var, \"\")\n newValue = Simplifier.simplify(str(value).replace(f\"{{{var}}}\", str(currentValue)))\n target[var] = newValue", "def set(self, var, value):\n cmd = '{0}={1};'.format(var, value)\n out = self.eval(cmd)\n if out.find(\"error\") != -1:\n raise TypeError(\"Error executing code in Matlab\\nCODE:\\n\\t{0}\\nMatlab ERROR:\\n\\t{1}\".format(cmd, out))", "def set_variable(self, name, value):\n if self._scalamagic and (not name.startswith(\"_i\")):\n self.scala_interpreter.bind(name, value)\n else:\n self.log.debug('Not setting variable %s', name)", "def change_var(self, var):\n return _coconut_tail_call(self.__class__, var, self.elem.substitute({self.var: var}))", "async def updated(self, value):\n pass", "def __setitem__(self, item, value):\n self.vars[item] = value", "def set_variable_value(self, var, value):\n \n namespace = self.first_namespace_that_binds_the_var(var)\n if namespace is None:\n raise LookupError(f'cannot set the variable \"{var}\" to the value {value}: '\n 'the variable is not bound in the current environment')\n namespace[var] = value", "def define_var(self, var, value):\n self.binding[var] = value", "def set(self, key, value):\n self.data[key] = value\n logger.debug('Setting value \"%s\" for variable \"%s\"', value, key)", "def update(self, **vars):\n for name in vars:\n # Use __setitem__ for all effects\n self[name] = vars[name]", "def update( self, dval ):\n self.val[:] += dval[:]\n return", "def variable(self, val):", "def update_parameter(self, param, val, force=False):\n self._update_dict[param] = val\n if force:\n self._cur_val[param] = None", "def assign(self, V, py):\n V.value = py", "def assign(self, V, py):\n V.value = py", "def assign(self, V, py):\n V.value = py", "def assign(self, V, py):\n V.value = py", "def Update(self, value):\n self.SetValue(self.GetValue() + tf.cast(value, self.dtype))", "def Update(self, value):\n self.SetValue(self.GetValue() + tf.cast(value, self.dtype))", "def assign(self, var, value):\n\t\tself._root = self._insert(self._root, var, value)", "def update_variable(variable, value):\n # check value type, call function 5 to check type\n if not in_alphabet(variable):\n print(f\"Syntax Error.\")\n return\n # check value type, call function 6 to check type\n if is_digit(value):\n lookUpTable[variable] = int(value)\n return\n # check value in dictionary\n if value not in lookUpTable:\n print(f\"{value} is undefined.\")\n return\n # update dictionary\n lookUpTable[variable] = lookUpTable[value]", "def __setitem__(self, name, value):\n if name in self._variables_locked:\n raise Error(\"Attempted to set a variable which is already locked: {0}\".format(name))\n\n if isinstance(value, Delete):\n # Delete the variable\n self._variables.pop(name, None)\n self._variables_default.discard(name)\n elif isinstance(value, Default):\n # Only set if not already set or if the set value was another default\n if name not in self._variables or name in self._variables_default:\n self[name] = value._value\n # direct set below clears default flag, so add it again\n self._variables_default.add(name)\n elif isinstance(value, NoChange):\n pass\n elif isinstance(value, Description):\n self._var_desc[name] = value._desc\n self[name] = value._value\n elif isinstance(value, Locked):\n self[name] = value._value # Set before setting the locked flag\n self._variables_locked.add(name)\n else:\n self._variables[name] = value\n # direct set clears the default flag\n self._variables_default.discard(name)", "async def update(self, wait_time=0):\n self._last_update = now()\n await self._variables.update(wait_time)", "def set(self, key, value):\n if value is not None:\n self.vars[key] = value", "def update():", "def update():", "def set(self, key, value):\n if (\n key in self.variables and\n type(value).__name__ == self.variables[key]['type']\n ):\n self.variables[key]['value'] = value\n else:\n raise ValueError(\"Bad key or wrong variable type\")", "def update(self):\n if self.api is None:\n return\n self.api.update()\n\n if self.var_type == 'Time':\n self.var_state = self.api.result['timeRelease']\n return\n\n result = self.api.result[self.var_period.lower()]\n if self.var_type == 'Sky':\n sky = result['sky']\n self.var_state = sky['name']\n self.var_icon = get_sky_icon(sky['code'])\n else:\n temp = result['temperature']\n if self.var_detail == 'Max':\n self.var_state = round(float(temp['tmax']), 1)\n else:\n self.var_state = round(float(temp['tmin']), 1)", "def update_val(self, val):\n self.in_val = val", "def value(self, value):\n self._update_value(value)", "def define_variable(self, var, value):\n self.namespace[var] = value", "def addVariable(self, name, value, save = False):\r\n setattr(self, name, value)\r\n if save and name not in self.variables:\r\n self.variables.append(name)", "def update_variable_by_id(self, id, request):\n\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.body_wrapper import BodyWrapper\n\t\texcept Exception:\n\t\t\tfrom .body_wrapper import BodyWrapper\n\n\t\tif not isinstance(id, int):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)\n\t\t\n\t\tif request is not None and not isinstance(request, BodyWrapper):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)\n\t\t\n\t\thandler_instance = CommonAPIHandler()\n\t\tapi_path = ''\n\t\tapi_path = api_path + '/crm/v2/settings/variables/'\n\t\tapi_path = api_path + str(id)\n\t\thandler_instance.set_api_path(api_path)\n\t\thandler_instance.set_http_method(Constants.REQUEST_METHOD_PUT)\n\t\thandler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)\n\t\thandler_instance.set_content_type('application/json')\n\t\thandler_instance.set_request(request)\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.action_handler import ActionHandler\n\t\texcept Exception:\n\t\t\tfrom .action_handler import ActionHandler\n\t\treturn handler_instance.api_call(ActionHandler.__module__, 'application/json')", "def set_var(self,variable,value):\n self.template=self.template.replace(\"@{}@\".format(variable),value)", "def increment2(cls, var):\r\n var += 1", "def set_variable(self, request, context):\n response = SetVariableResponse()\n value = decode(request.value)\n self._delegator.set_variable(request.component, request.variable, value)\n return response", "def __setitem__(self, key: str, value: Any) -> None:\n self.variables[key] = value", "def update(self, val, feats):\n raise NotImplementedError", "def set_value(self,x):\n self._value = x", "def set_value(self,x):\n self._value = x", "def _update_field_value(browser, field_name, operator, value):\n browser.login('mgr')\n browser.keyword_search(KEYWORD, apply='Update')\n browser.getControl('field').displayValue = [field_name]\n browser.getControl('Next').click()\n assert '' == browser.getControl('new value', index=0).value\n browser.getControl('new value', index=0).value = value\n browser.getControl('operation').displayValue = [operator]\n browser.getControl('Next').click()", "def set_variable(self, variable):\n self.variable = variable\n return self", "def update(self, value):\n # If the value has not already been set, set it.\n if self.value is None:\n self.value = value\n else:\n # Calculate the new value.\n self.value = ((1-self.weight) * self.value + self.weight * value)\n return self.value", "def update_node(node, attribute, value):\n node.set(attribute, value)\n return", "def update(self, v_input):\n\n self.v = v_input", "def setVariable(self, *args):\n return _libsbml.Rule_setVariable(self, *args)", "def assign(self, value):\n self.value = value", "def toggle_variable(self, obj):\n if self.is_variable_present(obj):\n self.remove_variable(obj)\n else:\n self.add_variable(obj)", "def upsert(self, variable_value=None, commit=False):\n statement = UPDATE if self.exists else INSERT\n self.oxdb.execute(\n statement,\n variable_value or datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'),\n self.variable_name, commit=commit)\n self.select()", "def update(self)->None:\n database.cursor.execute(\"UPDATE votes SET value = %s WHERE id = %s\", (\n self.value,\n self.id\n ))\n database.connection.commit()", "def update(self, key, new_value):\n raise NotImplementedError", "def update(self, key, new_val):\n\n s = self.get(key)\n s.value = new_val\n s.update()\n return s", "def set_var(self, var_name, var_data, metadata={}):\n var_name = var_name.strip()\n\n # Get any old metadata\n if var_name in self.variables:\n Old_Var = getattr(self, var_name)\n if hasattr(Old_Var, \"metadata\"):\n md_old = Old_Var.metadata\n for key in md_old:\n if key not in metadata:\n metadata[key] = md_old[key]\n\n # Create a new Variable and set it to self\n Var = inp_types.Variable(var_name, var_data, metadata)\n\n setattr(self, var_name, Var)\n\n # Append the variable to the list of variables\n if var_name not in self.variables:\n self.variables.append(var_name)\n\n return Var", "def updateItem(self, value):\n self.value = value\n self.age = 0\n self.freq += 1", "def set_variable(self, name, value, notify=False, notify_tag=\"changed/*\"):\n split_name=tuple(dictionary.normalize_path(name))\n notify_list=[]\n with self._params_val_lock:\n if name in self._params_funcs:\n del self._params_funcs[name]\n self._params_val.add_entry(name,value,force=True)\n for exp_name in self._params_exp:\n if exp_name==split_name[:len(exp_name)] or split_name==exp_name[:len(split_name)]:\n notify_list.append((self._params_val[exp_name],self._params_exp[exp_name]))\n for val,lst in notify_list:\n for ctl in lst:\n ctl.send_message(self._variable_change_tag,val)\n if notify:\n notify_tag.replace(\"*\",name)\n self.send_signal(\"any\",notify_tag,value)", "def update_pv(self, pvname, value) -> None:\n val = value\n pvname = pvname.replace(f\"{self._prefix}:\", \"\")\n\n self._cached_values.update({pvname: val})\n\n # only update if not running\n if not self._running_indicator.value:\n self._in_queue.put({\"protocol\": self.protocol, \"pvs\": self._cached_values})\n self._cached_values = {}", "def setLocal(name, value):", "def update( ):\r\n pass", "def update(self, value: Opt[bytes], wal: bool = True):\n self.value = value\n self.wal = wal", "def update( d, **kw):\n d.update( d, **kw )\n return d", "def update(self, *args, **kw):\n pass", "def test_setitem(self, env: yaenv.Env):\n assert 'NEW_VAR' not in env\n env['NEW_VAR'] = 'new_var'\n assert env['NEW_VAR'] == 'new_var'\n env['NEW_VAR'] = 'newer var'\n assert env['NEW_VAR'] == 'newer var'", "def set_value(attr_name, value, gpu_id):\n place = fluid.CPUPlace() if gpu_id < 0 \\\n else fluid.CUDAPlace(gpu_id)\n var = _fetch_var(attr_name, return_numpy=False)\n var.set(value, place)", "def pupdate(self):\n try:\n tmp = self.path_list[0]\n except IndexError:\n print(\"Empty value for env variable \", self.name)\n return\n\n for p in self.path_list[1:]:\n tmp = tmp + ':' + p\n self.val = tmp", "def _update(self, var, channels, value):\n\n if type(channels) is int:\n channels = [channels]\n\n if var =='frequency':\n for channel in channels:\n self.frequencies[channel] = value\n elif var =='phase':\n for channel in channels:\n self.phases[channel] = value\n elif var == 'amplitude':\n for channel in channels:\n self.amplitudes[channel] = value", "def test_update(self):\r\n\r\n # Simple value assignment.\r\n x = shared(0)\r\n assign = pfunc([], [], updates={x: 3})\r\n assign()\r\n self.assertTrue(x.get_value() == 3)\r\n\r\n # Basic increment function.\r\n x.set_value(0)\r\n inc = pfunc([], [], updates={x: x + 1})\r\n inc()\r\n self.assertTrue(x.get_value() == 1)\r\n\r\n # Increment by a constant value.\r\n x.set_value(-1)\r\n y = shared(2)\r\n inc_by_y = pfunc([], [], updates={x: x + y})\r\n inc_by_y()\r\n self.assertTrue(x.get_value() == 1)", "def change_value(self,val):\n self.val = val", "def update(*args):", "def set_variable(self, name, value):\n if name not in self._variables:\n logging.warning(\"Pipeline variable '%s' was not initialized\", name)\n self._variables[name].update({'value': value})\n return self", "def update_variable_by_api_name(self, api_name, request):\n\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.body_wrapper import BodyWrapper\n\t\texcept Exception:\n\t\t\tfrom .body_wrapper import BodyWrapper\n\n\t\tif not isinstance(api_name, str):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: api_name EXPECTED TYPE: str', None, None)\n\t\t\n\t\tif request is not None and not isinstance(request, BodyWrapper):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: request EXPECTED TYPE: BodyWrapper', None, None)\n\t\t\n\t\thandler_instance = CommonAPIHandler()\n\t\tapi_path = ''\n\t\tapi_path = api_path + '/crm/v2/settings/variables/'\n\t\tapi_path = api_path + str(api_name)\n\t\thandler_instance.set_api_path(api_path)\n\t\thandler_instance.set_http_method(Constants.REQUEST_METHOD_PUT)\n\t\thandler_instance.set_category_method(Constants.REQUEST_CATEGORY_UPDATE)\n\t\thandler_instance.set_content_type('application/json')\n\t\thandler_instance.set_request(request)\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.action_handler import ActionHandler\n\t\texcept Exception:\n\t\t\tfrom .action_handler import ActionHandler\n\t\treturn handler_instance.api_call(ActionHandler.__module__, 'application/json')", "def setVariable(self, *args):\n return _libsbml.EventAssignment_setVariable(self, *args)", "def create_new_var_and_update(self, name):\n\n self.create_new_var(name)\n self.list_widget.recreate_ui()", "def update(self, value):\n orig = get_nested_default(self._request.session, self._path)\n orig.update(value)\n set_nested(self._request.session, self._path, orig)\n # self._value = get_nested_default(self._session, self._path)\n self.save()", "def __setitem__(self, name, variable):\n vec = h.Vector()\n self._vectors[name] = vec\n vec.record(variable)", "def _update_varinfo(varinfo, data):\n varinfo_data = _get_varinfo(data)\n if \"vartype\" not in varinfo:\n varinfo.update(vartype=varinfo_data['vartype'])\n if \"ndim\" not in varinfo:\n varinfo.update(ndim=varinfo_data['ndim'])\n if \"size\" not in varinfo:\n varinfo.update(size=varinfo_data['size'])\n return varinfo", "def __setitem__(self, name, val):\n\n if name in self.vars:\n l[name].setVal(val)\n else:\n l[name] = YPFVal(name, val)", "def update(self, v, r):\n pass", "def update(self, val):\n self.current_val = val\n self.redraw()", "def run_modification(self, parameter, new_value):\n original_value = self.data[parameter]\n self.data[parameter] = new_value\n\n # Now that we've modified the value, we can make_request with no arguments to send off the request.\n self.make_request()\n\n self.data[parameter] = original_value", "def update_field(\n self, field, value,\n ):\n temp_cursor = user_db.cursor()\n\n sql = \"UPDATE users\"\n sql += \" SET \" + field + \"=\" + str(value)\n\n sql += \" WHERE user_id=\" + str(self.user_id)\n\n temp_cursor.execute(sql)\n user_db.commit()", "def variational_update(self):\n with self.elbo_check('update_p_allele_swap'):\n self.model.update_p_allele_swap()\n\n with self.elbo_check('p_cn'):\n self.model.update_p_cn()\n\n with self.elbo_check('p_breakpoint'):\n self.model.update_p_breakpoint()\n\n with self.elbo_check('p_outlier_total'):\n self.model.update_p_outlier_total()\n\n with self.elbo_check('p_outlier_allele'):\n self.model.update_p_outlier_allele()", "def update(self, value):\n log_gui.debug(\"update value of field %s with : %s\", repr(self._name), value)\n wid = self._store_widget\n wid.setProperty(\"python-object\", value)\n wid.emit(self._sig)", "def value(self, value):\n\t\toldvalue = self._value\n\t\tself._value = value\n\t\tif oldvalue != value:\n\t\t\tself.changed()", "def set_variable(self, name, value):\n # Note that \":=\" is used so that we can control the behavior for\n # both Makefile and CMake variables similarly.\n self.write_line(name + \" := \" + value)", "def __setattr__(self, name, value):\n if not name.endswith(\"_\"):\n self.__dict__[name] = value\n else:\n varname = name[:-1]\n if varname in self._varlist:\n self[:, self._varlist.index(varname)] = value\n else:\n self.append_var(varname, value)" ]
[ "0.8306532", "0.770032", "0.7636661", "0.74136305", "0.7011678", "0.68405515", "0.67276084", "0.6489138", "0.6488961", "0.64369726", "0.6304455", "0.6286994", "0.6245881", "0.6235783", "0.62295234", "0.6209103", "0.617981", "0.6172482", "0.61659956", "0.615926", "0.6130616", "0.6101558", "0.609538", "0.6059827", "0.60558254", "0.60521364", "0.60414135", "0.6031967", "0.6015659", "0.6015659", "0.6015659", "0.6015659", "0.60035676", "0.60035676", "0.5998117", "0.5997803", "0.59886056", "0.59813064", "0.5956043", "0.5949246", "0.5949246", "0.59383833", "0.5938283", "0.5902536", "0.58912563", "0.5886472", "0.5885464", "0.5885002", "0.58781636", "0.5869048", "0.586835", "0.5863607", "0.58348066", "0.58293486", "0.58293486", "0.5807619", "0.57820034", "0.57724476", "0.57352006", "0.5733529", "0.571976", "0.5717858", "0.5717711", "0.5715657", "0.57130784", "0.57096", "0.5702205", "0.5700233", "0.5685241", "0.5681195", "0.5675329", "0.5659967", "0.56555486", "0.56497914", "0.564877", "0.5647144", "0.56470287", "0.5641024", "0.56322336", "0.56267804", "0.5618921", "0.56101143", "0.56098986", "0.560589", "0.5591667", "0.5591418", "0.5589953", "0.5583711", "0.5579831", "0.557713", "0.5575696", "0.5566667", "0.5565018", "0.5559273", "0.5554407", "0.55275357", "0.55269396", "0.55214775", "0.5520626", "0.55082315" ]
0.6769582
6
Deletes a variable or multiple variables. If you specify a variable name, then that variable is deleted. If you specify a prefix and `recursive` is true, then all variables with that prefix are deleted. You must set a `recursive` to true if you delete variables by prefix.
def DeleteVariable(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _delete(self, variables):\n required_vars = ['container']\n variables_dict = self._get_vars(variables, required=required_vars)\n\n container_name = variables_dict.pop('container')\n object_name = variables_dict.pop('object', None)\n\n if object_name:\n self.swift.delete_object(container_name, object_name)\n else:\n self.swift.delete_container(container_name)\n\n self.state_change = True", "async def delete_variable_by_name(self, name: str):\n try:\n await self._client.delete(f\"/variables/name/{name}\")\n except httpx.HTTPStatusError as e:\n if e.response.status_code == 404:\n raise prefect.exceptions.ObjectNotFound(http_exc=e) from e\n else:\n raise", "def delete_variable(self, name):\n if name not in self._variables:\n logging.warning(\"Pipeline variable '%s' does not exist\", name)\n else:\n if isinstance(name, str):\n self._variables.pop(name)\n else:\n for var in name:\n self._variables.pop(var)\n return self", "def delete(self, prefix, paths):\n pass", "def removeVariable(self, name, delete = True):\r\n if name in self.variables:\r\n self.variables.remove(name)\r\n if delete and hasattr(self, name):\r\n delattr(self, name)", "def del_variable(self, name):\n return self.delete_variable(name)", "def del_variables(self, variables):\n variables = [variables] if isinstance(variables, str) else set(variables)\n indices = [\n index\n for index, variable in enumerate(self.variables)\n if variable in variables\n ]\n self.variables = np.delete(self.variables, indices, 0)\n self.cardinality = np.delete(self.cardinality, indices, 0)\n self.inhibitor_probability = [\n prob_array\n for index, prob_array in enumerate(self.inhibitor_probability)\n if index not in indices\n ]", "def delete_variable(self, id):\n\n\t\tif not isinstance(id, int):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: id EXPECTED TYPE: int', None, None)\n\t\t\n\t\thandler_instance = CommonAPIHandler()\n\t\tapi_path = ''\n\t\tapi_path = api_path + '/crm/v2/settings/variables/'\n\t\tapi_path = api_path + str(id)\n\t\thandler_instance.set_api_path(api_path)\n\t\thandler_instance.set_http_method(Constants.REQUEST_METHOD_DELETE)\n\t\thandler_instance.set_category_method(Constants.REQUEST_METHOD_DELETE)\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.action_handler import ActionHandler\n\t\texcept Exception:\n\t\t\tfrom .action_handler import ActionHandler\n\t\treturn handler_instance.api_call(ActionHandler.__module__, 'application/json')", "def delete_keys(prefix, *args):\n rc = redis.StrictRedis(host=REDIS_SINGLE_HOST, port=REDIS_PORT, db=0)\n # 如果有多个参数,将多个参数拼接成一个key\n if args:\n for i in args:\n prefix = str(prefix) + str(i)\n keys = rc.keys(\"*\" + prefix + \"*\")\n for key in keys:\n rc.delete(key)", "def _remove_prefix(self, path, prefix):\n expression = f\"_remove_prefix({repr(path)}, {repr(prefix)})\"\n return eval(expression, self.bzl_globals)", "def delete(self):\n self.oxdb.execute(DELETE, self.variable_name, commit=True)\n self._exists = None", "def DeletePrefix(self, prefix):\n # We'll retry both the List and the batch deletion in the event of an\n # error. This lets us catch the \"GCS says files are there when they really\n # aren't\" problem.\n self._RunWithRetries(lambda: self._DeletePrefixInternal(prefix),\n self._CommonErrorMatcher)", "def remove_variables(project, env_spec_name, vars_to_remove, prepare_result=None):\n (env_prefix, status) = _prepare_env_prefix(project, env_spec_name, prepare_result, mode=provide.PROVIDE_MODE_CHECK)\n # we allow env_prefix of None, which means the env wasn't created so we won't\n # try to unset any values for the variable.\n if status is not None and not status:\n return status\n\n local_state = LocalStateFile.load_for_directory(project.directory_path)\n for varname in vars_to_remove:\n path_to_variable = _path_to_variable(env_spec_name, varname)\n\n if env_prefix is not None:\n _unset_variable(project, env_spec_name, env_prefix, varname, local_state)\n path_to_variable = _path_to_variable(env_spec_name, varname)\n project.project_file.unset_value(path_to_variable)\n project.project_file.save()\n local_state.save()\n\n return SimpleStatus(success=True, description=\"Variables removed from the project file.\")", "def remove(self, var_id):\n\n params = {\n 'filter': 'id',\n 'eq': var_id\n }\n return self.base_request.request(\n 'service_environment_variable', 'DELETE', params=params,\n endpoint=self.settings.get('pine_endpoint')\n )", "def script_delete(ctx: click.Context, name):\n subcommand_script.cmd_delete(ctx.obj, name)", "def delete_variables(self, param_instance=None):\n\n\t\tif param_instance is not None and not isinstance(param_instance, ParameterMap):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: param_instance EXPECTED TYPE: ParameterMap', None, None)\n\t\t\n\t\thandler_instance = CommonAPIHandler()\n\t\tapi_path = ''\n\t\tapi_path = api_path + '/crm/v2/settings/variables'\n\t\thandler_instance.set_api_path(api_path)\n\t\thandler_instance.set_http_method(Constants.REQUEST_METHOD_DELETE)\n\t\thandler_instance.set_category_method(Constants.REQUEST_METHOD_DELETE)\n\t\thandler_instance.set_param(param_instance)\n\t\ttry:\n\t\t\tfrom zcrmsdk.src.com.zoho.crm.api.variables.action_handler import ActionHandler\n\t\texcept Exception:\n\t\t\tfrom .action_handler import ActionHandler\n\t\treturn handler_instance.api_call(ActionHandler.__module__, 'application/json')", "def del_regvar(*args):\n return _ida_frame.del_regvar(*args)", "def delete_znodes(cluster_name, path, recursive=False, del_snapshots=True):\n del_znode_query = del_snapshot_query = None\n if recursive:\n # monkey patch for delete znodes recursively\n target_path = path.rstrip(\"/\") + \"/\"\n del_znode_query = ZdZnode.delete().where(\n (ZdZnode.cluster_name == cluster_name) &\n ((ZdZnode.path.startswith(target_path)) | (ZdSnapshot.path == path))\n )\n del_snapshot_query = ZdSnapshot.delete().where(\n (ZdSnapshot.cluster_name == cluster_name) &\n ((ZdSnapshot.path.startswith(target_path)) | (ZdSnapshot.path == path))\n )\n else:\n del_znode_query = ZdZnode.delete().where(\n (ZdZnode.cluster_name == cluster_name) &\n (ZdZnode.path == path)\n )\n del_snapshot_query = ZdSnapshot.delete().where(\n (ZdSnapshot.cluster_name == cluster_name) &\n (ZdSnapshot.path == path)\n )\n del_znode_query.execute()\n if del_snapshots:\n del_snapshot_query.execute()", "def delete_param(param, verbose=False):\n try:\n if param == GLOBALNS:\n # not allowed to delete the root of the tree as it must always\n # have a value. the equivalent command is setting the root to an\n # empty dictionary\n get_param_server().setParam(GLOBALNS, {})\n if verbose:\n print(\"deleted ENTIRE parameter server\")\n else:\n get_param_server().deleteParam(param)\n if verbose:\n print(\"deleted parameter [%s]\"%param)\n except socket.error:\n raise RosParamIOException(\"Unable to communicate with master!\")", "def remove(self, var_id):\n\n params = {\n 'filter': 'id',\n 'eq': var_id\n }\n return self.base_request.request(\n 'device_service_environment_variable', 'DELETE', params=params,\n endpoint=self.settings.get('pine_endpoint')\n )", "def delete(path, recursive=False):\n fs.delete(path, recursive)", "def delRBVPNprefixlist(**kwargs):\n proxy = kwargs['proxy']\n session_token = kwargs['sessiontoken']\n if kwargs['prefix_list_id'] is not None:\n prefix_list_id = kwargs['prefix_list_id']\n else:\n print(\"Please specify the prefix list ID to configure using --prefix-list-id. Use 'pyVMC.py rbvpn-prefix-list show --prefix-lists for a list.'\")\n sys.exit(1)\n response = remove_bgp_prefix_list_json(proxy, session_token, prefix_list_id)\n if response == 200:\n print(f'The BGP prefix list {prefix_list_id} has been deleted.')\n else:\n print(\"The prefix list was not deleted.\")\n sys.exit(1)", "def rm(args):\n args.delete = True\n return remove(args)", "def deleteNode(*args, **kwds):\n nodes = args\n if len(args) < 1:\n nodes = cmds.ls(sl=1)\n \n for node in nodes:\n node_lst = [node]\n if isinstance(node, (list, tuple)):\n node_lst = node\n\n for n in node_lst:\n if cmds.objExists(str(n)):\n cmds.delete(str(n), **kwds)\n else:\n cmds.warning(\"# Don’t exist - \" + node)", "def deepdel(self, key):\n if DEBUG:\n print(repr(self))\n if '.' in key:\n top, rest = key.split('.', 1)\n self[top].deepdel(rest)\n else:\n del self[key]", "def delete(self, key, recurse=None):\n assert not key.startswith('/')\n params = {}\n if recurse:\n params['recurse'] = '1'\n return self.agent.http.delete(\n lambda x: x.code == 200, '/v1/kv/%s' % key, params=params)", "def _del(self) -> None:\n self.variables.pop(prop_name, None)", "def delete_keys_with_prefix(prefix):\n rc = redis.StrictRedis(host=REDIS_SINGLE_HOST, port=REDIS_PORT, db=0)\n keys = rc.keys(\"*\" + prefix + \"*\")\n for key in keys:\n rc.delete(key)", "def removeEditVariable(self, variable: Variable, /) -> None:\n ...", "def removeRuleByVariable(self, *args):\n return _libsbml.Model_removeRuleByVariable(self, *args)", "def delete_nat_rule(**kwargs):\n proxy = kwargs['proxy']\n sessiontoken = kwargs['sessiontoken']\n nat_id = kwargs['objectname']\n tier1_id = kwargs['tier1_id']\n\n result = remove_sddc_nat_json(proxy, sessiontoken, nat_id, tier1_id)\n if result is not None:\n print(\"\\n\")\n params = {'proxy':proxy, 'sessiontoken':sessiontoken, 'objectname':nat_id, 'tier1_id':tier1_id}\n get_nat_rules(**params)\n else:\n print('Something went wrong. Please check your syntax and try again.')\n sys.exit(1)", "def delprofile(variable, account):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n\n if not account:\n account = mph.config[\"default_account\"]\n if not unlock_wallet(stm):\n return\n acc = Account(account, morphene_instance=stm)\n json_metadata = Profile(acc[\"json_metadata\"])\n\n for var in variable:\n json_metadata.remove(var)\n\n tx = acc.update_account_profile(json_metadata)\n tx = json.dumps(tx, indent=4)\n print(tx)", "def del_field(key, obj):\n\n val = obj\n subkeys = key.split('.')\n\n for subkey in subkeys[:-1]:\n if subkey not in val:\n return\n\n val = val[subkey]\n\n del val[subkeys[-1]]", "def deleteTagValue(fluiddb, objectId, path):\n return fluiddb.objects[objectId][path].delete()", "def delete(ctx, **_):\n # Delete the resource\n azure_config = ctx.node.properties.get('azure_config')\n if not azure_config.get(\"subscription_id\"):\n azure_config = ctx.node.properties.get('client_config')\n else:\n ctx.logger.warn(\"azure_config is deprecated please use client_config, \"\n \"in later version it will be removed\")\n resource_group_name = utils.get_resource_group(ctx)\n vm_name = ctx.instance.runtime_properties.get('virtual_machine')\n name = ctx.instance.runtime_properties.get('name')\n api_version = \\\n ctx.node.properties.get('api_version', constants.API_VER_COMPUTE)\n vm_extension = VirtualMachineExtension(azure_config, ctx.logger,\n api_version)\n utils.handle_delete(ctx, vm_extension, resource_group_name, name, vm_name)", "def delete(self, path):\n head = path[:-1]\n key = str(path[-1])\n if len(head):\n pth = self._path[:]\n pth.extend(stringify_keys(head))\n del get_nested_default(self._request.session, pth)[key]\n else:\n del get_nested_default(self._request.session, self._path)[key]\n self.save()", "def test_variables_id_delete(self):\n pass", "def delete(self, remote_path, recursive=False, storage_id=None):\n client, remote_path = self._get_storage(remote_path, storage_id=storage_id)\n return client.delete(remote_path, recursive)", "def removevars(self,subset): # 3\n num_ = None\n if num_ is None:\n num_ = len(subset)\n elif num_ != len(subset):\n raise IndexError(\"Inconsistent length of array subset\")\n if num_ is None: num_ = 0\n if subset is None: raise TypeError(\"Invalid type for argument subset\")\n if subset is None:\n subset_ = None\n else:\n try:\n subset_ = memoryview(subset)\n except TypeError:\n try:\n _tmparr_subset = array.array(\"i\",subset)\n except TypeError:\n raise TypeError(\"Argument subset has wrong type\")\n else:\n subset_ = memoryview(_tmparr_subset)\n \n else:\n if subset_.format != \"i\":\n subset_ = memoryview(array.array(\"i\",subset))\n \n res = self.__obj.removevars(num_,subset_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)", "def RemoveVariable(self, e):\n if (not self.mainparent.file_loaded):\n msg = \"An input file must be loaded before a variable can be removed\"\n ShowMessage(msg, kind='warn')\n return\n if (self.mainparent.namelist is None):\n msg = \"Use the menu to select a namelist, first\"\n ShowMessage(msg, kind='info')\n return\n self.mainparent.statusbar.SetStatusText(\"Removing a variable\", 0)\n\n remove_name = AskText(\"Enter variable name to remove\", title=\"Remove Variable\")\n\n if (remove_name is not None):\n self.mainparent.input_file.namelists[self.mainparent.namelist].remove_variable(remove_name)\n\n # redraw stuff\n self.mainparent.statusbar.SetStatusText(\"Removed: {}\".format(remove_name), 0)\n self.mainparent.nmlpanel.update(unset_namelist=False) # update displayed namelist to include new entry", "def delete_parameter(request, parameter, **_kwargs):\n pass", "def op_delete(self, args):\n stack_level = 0\n if args != None:\n stack_level = int(args[0])\n self.require_stack(stack_level+1)\n if stack_level == None:\n self.stack.pop()\n else:\n self.stack.pop(-stack_level-1)", "def delete_by_path(data: Dict[str, T], path: Sequence[str]):\n del get_by_path(data, path[:-1])[path[-1]]", "def rm(args):\n try:\n opts, args = getopt(args, \"rRf\", [\"force\", \"recursive\"])\n except GetoptError as e:\n raise errors.PythonError(\"rm: %s\" % e, 1)\n force = False\n recursive = False\n for o, a in opts:\n if o in ('-f', '--force'):\n force = True\n elif o in ('-r', '-R', '--recursive'):\n recursive = True\n for f in args:\n if os.path.isdir(f):\n if not recursive:\n raise errors.PythonError(\"rm: cannot remove '%s': Is a directory\" % f, 1)\n else:\n shutil.rmtree(f, force)\n elif os.path.exists(f):\n try:\n os.unlink(f)\n except:\n if not force:\n raise errors.PythonError(\"rm: failed to remove '%s': %s\" % (f, sys.exc_info()[0]), 1)\n elif not force:\n raise errors.PythonError(\"rm: cannot remove '%s': No such file or directory\" % f, 1)", "def _DeletePrefixInternal(self, prefix):\n def _HandleResponse(unused_request_id, unused_response, exception):\n \"\"\"Handler with format dictated by BatchHttpRequest.\"\"\"\n if exception is not None:\n raise exception\n\n bucket, _ = self._ParseBucketAndPath(prefix)\n # The file names are returned as paths relative to the bucket.\n file_names = self.List(prefix)\n\n # TODO(b/123649389): With a request over 1000 objects we have started\n # getting a BatchError. This seems to be new behavior and is undocumented.\n # Working around it for now.\n max_batch_size = 1000\n if file_names:\n for i in range(0, len(file_names), max_batch_size):\n batch = self._service.new_batch_http_request()\n for name in file_names[i:min(i + max_batch_size, len(file_names))]:\n batch.add(\n self._service.objects().delete(bucket=bucket, object=name),\n callback=_HandleResponse)\n batch.execute()", "def remove_variable(self, obj):\n try:\n index = self.variables.index(obj)\n self.variables.pop(index)\n return True\n except ValueError:\n # the object cannot be removed because it is not present\n logger.warn(\"Variable {0} not present, can't be remove from the list\".format(obj))\n return False", "def delete(obj, path, ignore_missing=False):\n return glom(obj, Delete(path, ignore_missing=ignore_missing))", "def delete_namespaced_project(self, name, **kwargs):\n\n all_params = ['name', 'pretty']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method delete_namespaced_project\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'name' is set\n if ('name' not in params) or (params['name'] is None):\n raise ValueError(\"Missing the required parameter `name` when calling `delete_namespaced_project`\")\n\n resource_path = '/oapi/v1/projects/{name}'.replace('{format}', 'json')\n path_params = {}\n if 'name' in params:\n path_params['name'] = params['name']\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'DELETE',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='UnversionedStatus',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def remove_variable(self, standard_name):\n if standard_name in self:\n del self[standard_name]\n # End if", "def rm_path():\n shutil.rmtree(options.input_path)", "def delete_pods(\n name: str = None,\n ns: str = \"default\",\n label_selector: str = None,\n secrets: Secrets = None,\n):\n _log_deprecated(\"delete_pods\", \"terminate_pods\")\n return terminate_pods(\n name_pattern=name, label_selector=label_selector, ns=ns, secrets=secrets\n )", "def __delitem__(self, key: str) -> None:\n del self.variables[key]", "def remove(self, rec=1, force=1):\r\n assert rec, \"svn cannot remove non-recursively\"\r\n if not self.check(versioned=True):\r\n # not added to svn (anymore?), just remove\r\n py.path.local(self).remove()\r\n return\r\n flags = []\r\n if force:\r\n flags.append('--force')\r\n self._svn('remove', *flags)", "def remove_prefix(z, prefix):\n if z.startswith(prefix):\n return re.sub(r\"^{}\".format(prefix), \"\", z)\n else:\n return z", "def delete(constraint,check=True):\n output = db.query(['jobid','fwid','storage_directory'],constraint,order='jobid')\n for jid,fwid,path in output: \n lpad.archive_wf(fwid) # archive firework\n db.updateDB('deleted','jobid',jid,1,tableName='completed') # note deletion in deleted column\n if not check or ask('Do you want to delete %s?'%path): # delete storage directory \n if 'scratch' in path: shutil.rmtree(path)\n elif 'nfs' in path: \n d = subprocess.Popen(['ssh','ksb@suncatls1.slac.stanford.edu', 'rm -r %s'%path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n dout, err = d.communicate()\n else: raise NotImplementedError\n print 'deleted!'", "def clean(params):\n if tf.gfile.IsDirectory(params.ckpt_path):\n tf.gfile.DeleteRecursively(params.ckpt_path)\n\n if tf.gfile.IsDirectory(params.summary_path):\n tf.gfile.DeleteRecursively(params.summary_path)\n\n if tf.gfile.IsDirectory(params.result_path):\n tf.gfile.DeleteRecursively(params.result_path)\n\n if tf.gfile.IsDirectory(params.log_path):\n tf.gfile.DeleteRecursively(params.log_path)\n\n if tf.gfile.IsDirectory(\"__pycache__\"):\n tf.gfile.DeleteRecursively(\"__pycache__\")\n\n if tf.gfile.IsDirectory(params.config_path):\n tf.gfile.DeleteRecursively(params.config_path)", "def delete(dtype, name, rootdir=None):\n # type and the name\n # delete them\n num_deleted = 0\n for dataset in FreezableAPI.datasets(dtype,name,rootdir=rootdir,fullpath=True):\n # delete it\n shutil.rmtree(dataset)\n num_deleted += 1\n return num_deleted", "def visit_Delete(self, node):\n self.generic_visit(node)\n target = get_single_target(node)\n if isinstance(target, ast.Subscript):\n fun = to_attribute(self.operator, 'delitem')\n args = [ target.value, self.index_to_expr(target.slice) ]\n return ast.Expr(to_call(fun, args))\n return node", "def delete_objects(data,\n path = None,\n submode_adjustor = None):\n if debug.description(): # description debugging\n print 'delete_objects', data, path\n\n if not path:\n raise error.CommandDescriptionError(\"Need path to delete an object\")\n\n data = dict(data)\n bigdb = bigsh.bigdb\n bigdb.canonicalize_values_of_path(path, data)\n\n # if the node type under configuration is a LIST\n # (or LEAF_LIST), this likely wants to add a new\n # item to the list.\n (schema, items_matched) = bigdb.schema_of_path(path, {} )\n if schema == None:\n print 'Missing Schema for', path\n return\n node_type = schema['nodeType']\n if debug.description(): # description debugging\n print 'delete_objects:', path, node_type\n\n if node_type == 'LIST':\n list_nodes = schema['listElementSchemaNode']\n selection = {}\n for key in list_nodes.get('keyNodeNames', []):\n if key in data:\n full_path = '%s/%s' % (path, key)\n selection[full_path] = data[key]\n del data[key]\n # populate for fields which are key's\n for key in list_nodes.get('keyNodeNames', []):\n if not key in selection:\n for row in command.bigsh.mode_stack:\n if 'name' in row and row['name'] == key:\n if 'obj' in row:\n selection[key] = row['obj']\n bigdb.add_mode_stack_paths(selection)\n if submode_adjustor:\n command.submode_adjustor_invoke(submode_adjustor,\n path,\n selection,\n data,\n 'delete')\n\n oper = bigdb.canonicalize_values_for_delete(path,\n data,\n selection,\n list_nodes['childNodes'])\n if oper == 'POST':\n bigdb.post(path, data, selection)\n else:\n # bigdb.delete(path, data, selection) perhaps date <- {}\n bigdb.delete(path, data, selection)\n return\n if node_type == 'LEAF_LIST':\n if debug.description(): # description debugging\n print 'delete_object: leaf-list needs implementation:LEAF_LISTN'\n selection = {}\n bigdb.add_mode_stack_paths(selection)\n leaf_node = schema['leafSchemaNode']\n type_node = leaf_node['typeSchemaNode']\n split_path = path.split('/')\n item_name = split_path[-1]\n item = None\n if item_name in data:\n item = data[item_name]\n elif type_node['name'] in data:\n item = data[type_node['name']]\n del data[type_node['name']]\n if debug.description(): # description debugging\n print 'DATUM', data, 'SELECTUM', selection, 'ITEM', item\n # Currently, 'add/delete' for specific elements isn't\n # directly support in the BigDB REST API's. \n split_path = path.split('/')\n base_path = '/'.join(split_path[:-1])\n (schema, result) = bigdb.schema_and_result(base_path, selection)\n collection = result.expect_single_result(failed_result = [])\n item_name = split_path[-1]\n if item_name in collection:\n collection = collection[item_name]\n if debug.description(): # description debugging\n print 'COLLECTION', collection, ' REMOVE ', item\n if item in collection:\n collection = [x for x in collection if x != item]\n bigdb.put(path, collection, selection, 'query')\n return\n raise error.CommandSemanticError('%s \"%s\" '\n 'not currently configured' %\n (item_name, item))\n return\n if node_type == 'CONTAINER':\n container_nodes = schema.get('childNodes')\n\n selection = {}\n bigdb.add_mode_stack_paths(selection)\n\n for (n,v) in data.items():\n oper = bigdb.canonicalize_values_for_delete(path,\n data,\n selection,\n container_nodes)\n if oper == 'PATCH':\n bigdb.patch(path, data, selection)\n else:\n item_path = '%s/%s' % (path, n)\n bigdb.delete(item_path, {}, selection)\n return\n\n bigsh.bigdb.add_mode_stack_paths(data)\n bigsh.bigdb.delete(path, data)", "def drop_component(self, var_name):\n if self.has_component(var_name):\n iv = self._var_names.index(var_name)\n del self._var_names[iv]\n self._vals = np.delete(self._vals, self._vals[iv])", "def delete_variable(self, columns):\n if not isinstance(columns, (list, tuple)):\n columns = [columns]\n for col in columns:\n if isinstance(col, str):\n col = [i for i, v in enumerate(self.list) if v.name == col][0]\n self.list.pop(col)", "def remove_prefix(word, prefixes=[]):\n result = word\n # Convert prefixes to list if user give string\n if not isinstance(prefixes, list):\n prefixes = [prefixes]\n for prefix in prefixes:\n if prefix == word[:len(prefix)]:\n result = word[len(prefix):]\n break\n return result", "def clean_up_variables_and_expressions(xblock_id, connection):\n \n cursor = connection.cursor()\n \n # remove variables\n VARIABLES_REMOVE_QUERY = (\"DELETE FROM edxapp.qgb_variable WHERE xblock_id = '\" + xblock_id + \"'\")\n cursor.execute(VARIABLES_REMOVE_QUERY)\n \n cursor.close()", "def dfs_rm(self, path, recursive=False):\n cmd = \"hdfs dfs -rm \"\n if recursive:\n cmd += \"-r \"\n out, err = self.execute_command(cmd + path, no_exception=True)\n if out.startswith(\"Moved\"):\n return out, err\n else:\n raise RuntimeError(\n \"unable to remove \" +\n path +\n \"\\nOUT\\n\" +\n out +\n \"\\nERR:\\n\" +\n err)", "def delete_key_vault_command(client: KeyVaultClient, args: dict[str, Any], params: dict[str, Any]) -> CommandResults:\n\n vault_name = args['vault_name']\n # subscription_id and resource_group_name arguments can be passed as command arguments or as configuration parameters,\n # if both are passed as arguments, the command arguments will be used.\n subscription_id = get_from_args_or_params(params=params, args=args, key='subscription_id')\n resource_group_name = get_from_args_or_params(params=params,\n args=args, key='resource_group_name')\n\n response = client.delete_key_vault_request(subscription_id=subscription_id,\n resource_group_name=resource_group_name,\n vault_name=vault_name)\n message = \"\"\n if response.get('status_code') == 200:\n message = f'Deleted Key Vault {vault_name} successfully.'\n elif response.get('status_code') == 204:\n message = f'Key Vault {vault_name} does not exists.'\n\n return CommandResults(\n readable_output=message\n )", "def clear(self, prefix=PREFIX):\n for key in self.get_keys():\n # delete files in folder by not actual folder\n if key.startswith(prefix) and prefix + \"/\" != key:\n self.delete(key)", "def __del__ ( self ) :\n \n if self.name and self.name in self.__pdf_names :\n self.__pdf_names.remove ( self.name ) \n while self.__local_names :\n a = self.__local_names.pop ()\n if a in self.__var_names :\n self.__var_names.remove ( a )", "def removevars(self,subset_):\n num_ = None\n if num_ is None:\n num_ = len(subset_)\n elif num_ != len(subset_):\n raise IndexError(\"Inconsistent length of array subset\")\n if subset_ is None:\n raise ValueError(\"Argument subset cannot be None\")\n if subset_ is None:\n raise ValueError(\"Argument subset may not be None\")\n if isinstance(subset_, numpy.ndarray) and subset_.dtype is numpy.dtype(numpy.int32) and subset_.flags.contiguous:\n _subset_copyarray = False\n _subset_tmp = ctypes.cast(subset_.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_int32))\n elif subset_ is not None:\n _subset_copyarray = True\n _subset_np_tmp = numpy.zeros(len(subset_),numpy.dtype(numpy.int32))\n _subset_np_tmp[:] = subset_\n assert _subset_np_tmp.flags.contiguous\n _subset_tmp = ctypes.cast(_subset_np_tmp.ctypes._as_parameter_,ctypes.POINTER(ctypes.c_int32))\n else:\n _subset_copyarray = False\n _subset_tmp = None\n \n res = __library__.MSK_XX_removevars(self.__nativep,num_,_subset_tmp)\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)", "def svn_fs_delete(*args):\r\n return _fs.svn_fs_delete(*args)", "def delete(*args, all: bool=True, attribute: Union[AnyStr, List[AnyStr]]=\"\", channels:\n bool=True, constraints: bool=True, constructionHistory: bool=True, controlPoints:\n bool=False, expressions: bool=True, hierarchy: AnyStr=\"\", inputConnectionsAndNodes:\n bool=True, motionPaths: bool=True, shape: bool=True, staticChannels: bool=True,\n timeAnimationCurves: bool=True, unitlessAnimationCurves: bool=True, **kwargs)->None:\n pass", "def _clean_salt_variables(params, variable_prefix=\"__\"):\n list(list(map(params.pop, [k for k in params if k.startswith(variable_prefix)])))\n return params", "def delete(self, package=\"\", uid=\"\", params={}):\n return self.__post('delete-nat-rule', package, uid, params)", "def delete_account(self, account_name, recursive=False):\n self.log.debug(\"Deleting account: \" + account_name)\n params = {\n 'AccountName': account_name,\n 'Recursive': recursive\n }\n self.connection.get_response('DeleteAccount', params)", "def rm(path, recursive=False):\n path = normalized_path_obj(path)\n if recursive:\n shutil.rmtree(path)\n else:\n if path.is_file():\n path.unlink()\n else:\n path.rmdir()", "def delete (root, subkey=\"\"):\n key = registry (root, accept_value=False).get_key (subkey)\n for k in key.iterkeys ():\n k.delete ()\n win32api.RegDeleteKey (key.parent ().pyobject (), key.name)\n return key", "def request_workspace_delete(self, request):\n unique_id = request['uuid']\n# print('###', user_id)\n# print('###', alias)\n# print('###', source_uuid)\n \n uuid_mapping = self._get_uuid_mapping_object(user_id)\n alias = uuid_mapping.get_alias(unique_id)\n self.delete_workspace(unique_id=unique_id)\n \n response = {'alias': alias, \n 'uuid': unique_id}\n \n return response", "def magic_reset(self, parameter_s=''):\n\n ans = raw_input(\n \"Once deleted, variables cannot be recovered. Proceed (y/n)? \")\n if not ans.lower() == 'y':\n print 'Nothing done.'\n return\n for i in self.magic_who_ls():\n del(self.locals[i])", "def delete(ribo, name, force):\n\n delete_rnaseq_wrapper(ribo_file = ribo, \n name = name,\n force = force)", "def delete_stored_project():\n client = RequestManager()\n client.set_method(\"DELETE\")\n client.set_endpoint(\"/projects/{0}\".format(STORED_ID['project_id']))\n client.execute_request()", "def delete(self, app_prefix, path):\n return self.handle_request('delete', app_prefix, path)", "def remove_assignment(self, var):\n\n del self.variable_to_value[var]", "def generate_delete(stmt, ref, path):\n path_params = get_input_path_parameters(path)\n delete = {}\n generate_api_header(stmt, delete, 'Delete', path)\n # Input parameters\n if path_params:\n delete['parameters'] = create_parameter_list(path_params)\n # Responses\n response = create_responses(stmt.arg)\n delete['responses'] = response\n return delete", "def delete(**args):\n\tglobal _objstore\n\t_objstore = _objstore or ObjStore()\n\n\t_objstore.delete(args['type'], args['name'])\n\treturn {'message':'ok'}", "def clean(ctx):\n header(clean.__doc__)\n with ctx.cd(ROOT):\n for pattern in CLEAN_PATTERNS:\n info(\"Removing {0}\", pattern)\n ctx.run(\"rm -rf {0}\".format(pattern))", "def delete(self, package='', name='', uid='', params={}):\n return self.__post('delete-nat-section', package, name, uid, params)", "def delete_deployment(request, deployment, **_kwargs):\n pass", "def delete(self):\n pdbox._args.get(\"dryrun\") or shutil.rmtree(self.path)\n pdbox.info(\"Deleted %s/\" % self.path)", "def variable_parser(var_list, prefix):\r\n ret_list = []\r\n for var in var_list:\r\n varname = var.name\r\n varprefix = varname.split('/')[0]\r\n if varprefix == prefix:\r\n ret_list.append(var)\r\n elif prefix in varname:\r\n ret_list.append(var)\r\n return ret_list", "def variable_parser(var_list, prefix):\r\n ret_list = []\r\n for var in var_list:\r\n varname = var.name\r\n varprefix = varname.split('/')[0]\r\n if varprefix == prefix:\r\n ret_list.append(var)\r\n return ret_list", "def delete(args, config):\n print('Deletes a selected HPC fleet with name \"{}\"'.format(args.fleet_name))", "def remove(args):\n files = []\n for path in args.files:\n if os.path.isdir(path):\n ft = filetree(path)\n files.extend(ft.filelist())\n else:\n files.append(path)\n for path in files:\n relpath = os.path.normpath(os.path.relpath(path, args.base))\n if relpath in args.cache:\n del args.cache[args.cache.index(relpath)]\n if args.delete and os.path.exists(path):\n os.remove(path)\n args.update = True\n return", "def test_variable_delete(self):\n self.trace('del x', env={'x': 1})\n\n events = self.variable_events\n self.assertEqual(len(events), 1)\n event = events[0]\n self.assertIsInstance(event, TraceDelete)\n self.assertEqual(event.name, 'x')", "def rm(self, path):\n try:\n basedir, item = os.path.split(path)\n postdata = codecs.encode(json.dumps({ 'baseDir': basedir, 'items': [ item ] }), 'utf-8')\n self._urlopen('/api/fileops/delete', postdata).read()\n except HTTPError as err:\n raise RuntimeError(\"Unable to delete '{}'\".format(path))", "def delete_dataset(dataset_path):\n force_rmtree(dataset_path)", "def delve(o,*k):\n\treturn delve(o[k[0]],*k[1:]) if len(k)>1 else o[k[0]]", "def rm_command(ignore_missing, star_silent, recursive, enable_globs,\n endpoint_plus_path, label, submission_id, dry_run, deadline,\n skip_activation_check, notify,\n meow, heartbeat, polling_interval, timeout):\n endpoint_id, path = endpoint_plus_path\n\n client = get_client()\n\n # attempt to activate unless --skip-activation-check is given\n if not skip_activation_check:\n autoactivate(client, endpoint_id, if_expires_in=60)\n\n delete_data = DeleteData(client, endpoint_id,\n label=label,\n recursive=recursive,\n ignore_missing=ignore_missing,\n submission_id=submission_id,\n deadline=deadline,\n skip_activation_check=skip_activation_check,\n interpret_globs=enable_globs,\n **notify)\n\n if not star_silent and enable_globs and path.endswith('*'):\n # not intuitive, but `click.confirm(abort=True)` prints to stdout\n # unnecessarily, which we don't really want...\n # only do this check if stderr is a pty\n if (err_is_terminal() and\n term_is_interactive() and\n not click.confirm(\n 'Are you sure you want to delete all files matching \"{}\"?'\n .format(path), err=True)):\n safeprint('Aborted.', write_to_stderr=True)\n click.get_current_context().exit(1)\n delete_data.add_item(path)\n\n if dry_run:\n formatted_print(delete_data, response_key='DATA',\n fields=[('Path', 'path')])\n # exit safely\n return\n\n # Print task submission to stderr so that `-Fjson` is still correctly\n # respected, as it will be by `task wait`\n res = client.submit_delete(delete_data)\n task_id = res['task_id']\n safeprint('Delete task submitted under ID \"{}\"'.format(task_id),\n write_to_stderr=True)\n\n # do a `task wait` equivalent, including printing and correct exit status\n task_wait_with_io(meow, heartbeat, polling_interval, timeout, task_id,\n client=client)", "def remove_variables(self):\n self.variables = []", "def op_remove(template_data, field_hierarchy_list, value):\n op_trace = []\n\n def match_value(elem, field):\n if isinstance(elem, dict) and elem.get(field) == value:\n op_trace.append('Template {name}, removed {path}: {value}'.format(\n name=template_data['templateName'],\n path='/'.join(field_hierarchy_list),\n value=value))\n return True\n else:\n return False\n\n def remove(json_obj, search_list):\n if len(search_list) == 0:\n return\n\n if isinstance(json_obj, dict):\n for k, v in json_obj.items():\n if k == search_list[0]:\n if len(search_list) > 1:\n remove(v, search_list[1:])\n else:\n remove(v, search_list)\n\n elif isinstance(json_obj, list):\n if len(search_list) == 1:\n json_obj[:] = [elem for elem in json_obj if not match_value(elem, search_list[0])]\n\n for elem in json_obj:\n remove(elem, search_list)\n\n remove(template_data, field_hierarchy_list)\n\n return op_trace", "def delete_expression(DomainName=None, ExpressionName=None):\n pass", "def delete_file(filename, sudo=True):\n LOG.info(\"Deleting file {}\".format(filename))\n cmd = \"rm {}\".format(filename)\n _exec_cmd(cmd=cmd, sudo=sudo, fail_ok=False)" ]
[ "0.6082754", "0.5931479", "0.5894654", "0.57672286", "0.5762076", "0.566055", "0.560499", "0.545678", "0.53137845", "0.5311779", "0.52954566", "0.5270962", "0.52198213", "0.51949286", "0.5131959", "0.51042974", "0.5100813", "0.5093037", "0.5086412", "0.5080377", "0.5057334", "0.50514454", "0.50270927", "0.50254977", "0.49983487", "0.49552593", "0.49287984", "0.4923075", "0.48910415", "0.48854893", "0.48754916", "0.4854954", "0.48542422", "0.48509443", "0.4846274", "0.48251918", "0.48193535", "0.48161644", "0.48148856", "0.4804702", "0.4787996", "0.47748232", "0.47716188", "0.47648618", "0.47382858", "0.4725908", "0.47053066", "0.47031286", "0.46931186", "0.4691612", "0.4690702", "0.46740842", "0.46594188", "0.46540722", "0.46426827", "0.46416816", "0.46415785", "0.46371332", "0.46245158", "0.4624199", "0.4618151", "0.46108186", "0.46031982", "0.4596462", "0.45858085", "0.45789763", "0.45782426", "0.45686305", "0.4544456", "0.45380864", "0.45378503", "0.4537263", "0.4534473", "0.45329192", "0.4528217", "0.45250067", "0.45244974", "0.45224613", "0.45214674", "0.45177543", "0.4514179", "0.4501818", "0.4497144", "0.44934472", "0.44866696", "0.44858122", "0.44850358", "0.4479059", "0.44764864", "0.4469684", "0.44661665", "0.44637108", "0.44621792", "0.44598052", "0.44568783", "0.4456666", "0.4455346", "0.44517544", "0.4450873", "0.44500276" ]
0.5528326
7
List waiters within the given configuration.
def ListWaiters(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def waiters(self):\n waiters = []\n\n for name, item in self._definition.get('waiters', {}).items():\n name = self._get_name('waiter', Waiter.PREFIX + name)\n waiters.append(Waiter(name, item))\n\n return waiters", "def get_list_of_configlets(configlets):\n\n futures_list = []\n results = []\n\n with ThreadPoolExecutor(max_workers=40) as executor:\n for configlet in configlets:\n futures = executor.submit(clnt.api.get_configlet_by_name, configlet)\n futures_list.append(futures)\n\n for future in futures_list:\n try:\n result = future.result(timeout=60)\n results.append(result)\n except Exception:\n results.append(None)\n return results", "def cronWaitingList(self, date):\n match = {\"task_type\": \"crontab\", \"task_day\": date, \"status\": \"waiting\"}\n l = []\n for doc in self.search(match):\n l.append(doc)\n return l", "def get_waiting_jobs(self):\n return []", "def list():\n # Calling config file\n cf = config.ReadFile(config_file)\n user = cf[\"authentication\"][\"user\"]\n\n l = []\n for job in cron:\n l.append(job)\n return l", "def watch_list(self) -> list:\n return []", "def create_checkers(config):\n\n checkers = []\n if 'checkers' in config:\n for checker_name, checker_config in config['checkers'].iteritems():\n if checker_name in __checkers:\n configs = None\n if type(checker_config) == list:\n configs = checker_config\n else:\n configs = [checker_config]\n for config in configs:\n ch = __checkers[checker_name]()\n ch.set_config(config)\n if ch:\n checkers.append(ch)\n return checkers", "def waitables(self):\n return (), (), ()", "def get_config(config):\n return ['-deadlock'] if 'ignore deadlock' in config else []", "def list(self):\n self.background_scheduler.print_jobs()", "def _ls_waiting_jobs(self):\n \n jobs = [j for j in os.listdir(pjoin(self._jobsdir, \"00_waiting\")) if j.endswith(self._job_ext)]\n \n if self._job_filter:\n jobs = [j for j in jobs if self._job_filter(pjoin(self._jobsdir, \"00_waiting\", j), j)]\n \n return jobs", "def wait_on_cluster_conditions(cluster, waiters):\n results = []\n start = datetime.datetime.now()\n while waiters:\n new_waiters = []\n for waiter in waiters:\n type = waiter.get(\"type\")\n name = waiter.get(\"name\")\n timeout = waiter.get(\"timeout\", 1800) # 30 minutes\n expiry = waiter.get(\"expiry\")\n namespace = waiter.get(\"namespace\", \"default\")\n if timeout:\n if not expiry:\n waiter[\"expiry\"] = start + \\\n datetime.timedelta(seconds=timeout)\n if datetime.datetime.now() > waiter[\"expiry\"]:\n waiters = []\n waiter.pop('expiry')\n return None, f\"Waiter: {waiter} expired on cluster: {cluster.id}\" # noqa\n if type == \"ingress\":\n ingress = cluster.ctl.get_ingress(\n name=name, namespace=namespace)\n ips = ingress.get(\"ips\")\n hostnames = ingress.get(\"hostnames\")\n if ips or hostnames:\n waiter.update({\"result\": ingress})\n waiter.pop(\"expiry\", None)\n results.append(waiter)\n else:\n new_waiters.append(waiter)\n waiters = new_waiters\n sleep(5)\n return results, None", "def get_binners(config):\n binners = []\n if config[\"binning\"][\"metabat\"]:\n binners.append(\"metabat\")\n if config[\"binning\"][\"concoct\"]:\n binners.append(\"concoct\")\n if config[\"binning\"][\"maxbin\"]:\n binners.append(\"maxbin\")\n return binners", "def wait_for_workers(self):\r\n stop = False\r\n workers = self.aggregator.get_participants()\r\n\r\n while not stop: \r\n try:\r\n with self.aggregator:\r\n resp = self.aggregator.receive(1)\r\n participant = resp.notification['participant']\r\n workers.append(participant)\r\n print('Task %s: participant %s has joined' % (self.task_name, participant))\r\n except Exception as err:\r\n print(\"Task %s: joined %d participants out of %d\" % (self.task_name, len(workers), self.Nworkers))\r\n #print(err)\r\n #print('Check here: error')\r\n #import code\r\n #code.interact(local=locals())\r\n pass\r\n\r\n if len(workers) == self.Nworkers:\r\n stop = True\r\n\r\n workers = self.aggregator.get_participants()\r\n return list(workers.keys())", "def list(self, config_path: str, results_filter: Optional[ObjectType]) -> List[str]:\n ...", "def list(self):\n for item in self._config:\n item.list()", "def PingWorkers(config, wait_time = None):\n if wait_time:\n wait_time = int(wait_time)\n if not config.HasCommandChannels():\n raise ConfigException(\"No URL found for sending command messages. Update \"\n \"your cluster configuration.\")\n for node in Worker.PingWorkers(config.command_sender,\n config.command_response_receiver, wait_time):\n print \" \".join(map(str, node))", "def waiting_clients(self):\n return self.storage.iterkeys()", "def list_configurations(configurationType=None, filters=None, maxResults=None, nextToken=None, orderBy=None):\n pass", "async def list_tasks():", "def _config_list(res, ctx):\n\n if _has_error_code(res):\n return print_errors(res, ctx)\n\n lines = []\n for config in res['configs']:\n line = '* ' if config['current'] else ' '\n\n if ctx.verbose:\n line += config['mtime'] + ' '\n\n line += config['name']\n lines.append(line)\n\n return \"\\n\".join(lines)", "def download_listings(self):\n ExecConfigMethod(\n self.api_session, channel_id=self.channel_id, source=self.source,\n property_name='DownloadListings', function_name='DownloadListings')", "def watch_deployment_config_list(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method watch_deployment_config_list\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/watch/deploymentconfigs'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='JsonWatchEvent',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def list_configurations(MaxResults=None, NextToken=None):\n pass", "def _get_monitor_tasks(self, desired_config):\n create_monitors = list()\n delete_monitors = list()\n update_monitors = list()\n\n for hm_type in ['http', 'https', 'tcp', 'icmp', 'udp']:\n existing = self._bigip.get_monitors(hm_type)\n config_key = \"{}_monitors\".format(hm_type)\n desired = desired_config.get(config_key, dict())\n\n (create_hm, update_hm, delete_hm) = (\n self._get_resource_tasks(existing, desired)[0:3])\n\n create_monitors += create_hm\n update_monitors += update_hm\n delete_monitors += delete_hm\n\n return (create_monitors, update_monitors, delete_monitors)", "def cmd_list_resources(config=DEFAULT_LINUX_PATH):\n config = load_config_file(expand_config_path(config))\n px = connection_proxmox(config[\"proxmox\"])\n try:\n if config[\"pools\"]:\n l, h = list_resources(px, config[\"pools\"])\n return tabulate(l, h)\n else:\n print(\"Dick 'pools' is empty\")\n except KeyError:\n print(\"Missing 'pools' dict in config file\")\n sys.exit(1)", "def wait_children(self, timeout=None):\n self.join_children(timeout=timeout)\n return [x.get() for x in self.children]", "def get_job_listings(self):\r\n\r\n for attempt in range(5):\r\n try:\r\n job_listings = WebDriverWait(self.driver, 8).until(\r\n EC.presence_of_all_elements_located((By.XPATH, '//li[@class=\"jobs-search-results__list-item occludable-update p0 relative ember-view\"]')))\r\n except Exception as e:\r\n print('An error occurred: ', e)\r\n driver.refresh()\r\n else:\r\n job_results = self.driver.find_element_by_xpath('//small[@class=\"display-flex t-12 t-black--light t-normal\"]')\r\n job_results_num = str(job_results.text).split()[0].replace(',', '')\r\n first_page_url = self.driver.current_url\r\n\r\n for job in job_listings:\r\n self.driver.implicitly_wait(5)\r\n mouse = ActionChains(self.driver).move_to_element(job)\r\n mouse.perform()\r\n self.apply_to_job(job)\r\n\r\n if int(job_results_num) > 24:\r\n time.sleep(2)\r\n all_pages = self.driver.find_element_by_xpath('//li[@class=\"artdeco-pagination__indicator artdeco-pagination__indicator--number ember-view\"]')\r\n last_page = all_pages[len(all_pages)-1].text\r\n\r\n last_page_int = int(re.sub(r'[^/d]', '', last_page)) # Replace any character except the blank space with \"\"\r\n get_last_page = self.driver.find_element_by_xpath(\"//button[@aria-label='Page \"+str(total_pages_int)+\"']\")\r\n get_last_page.send_keys(Keys.RETURN)\r\n last_page_url = self.driver.current_url\r\n total_jobs = int(last_page.split('start=', 1)[1])\r\n\r\n # Go through all pages and apply\r\n for page in range(25, last_page_int):\r\n self.driver.get(first_page_url + '&start=' + str(page))\r\n time.sleep(3)\r\n for attempt in range(5):\r\n try:\r\n new_job_listings = WebDriverWait(self.driver, 8).until(\r\n EC.presence_of_all_elements_located((By.XPATH, '//li[@class=\"jobs-search-results__list-item occludable-update p0 relative ember-view\"]')))\r\n except Exception as e:\r\n print('An error occurred: ', e)\r\n driver.refresh()\r\n else:\r\n for new_job in new_job_listings:\r\n self.driver.implicitly_wait(5)\r\n mouse_new = ActionChains(self.driver).move_to_element(new_job)\r\n mouse_new.perform()\r\n self.apply_to_job(new_job)\r\n else:\r\n print('You have applied to all jobs available. Closing program...')\r\n time.sleep(3)\r\n self.driver.quit()", "def test_list_config_nodes(self):\n with self.override_role():\n self.config_client.list_config_nodes()", "def list_notebook_instance_lifecycle_configs(NextToken=None, MaxResults=None, SortBy=None, SortOrder=None, NameContains=None, CreationTimeBefore=None, CreationTimeAfter=None, LastModifiedTimeBefore=None, LastModifiedTimeAfter=None):\n pass", "def watch_build_config_list(self, **kwargs):\n\n all_params = ['pretty', 'label_selector', 'field_selector', 'watch', 'resource_version', 'timeout_seconds']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method watch_build_config_list\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1/watch/buildconfigs'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'pretty' in params:\n query_params['pretty'] = params['pretty']\n if 'label_selector' in params:\n query_params['labelSelector'] = params['label_selector']\n if 'field_selector' in params:\n query_params['fieldSelector'] = params['field_selector']\n if 'watch' in params:\n query_params['watch'] = params['watch']\n if 'resource_version' in params:\n query_params['resourceVersion'] = params['resource_version']\n if 'timeout_seconds' in params:\n query_params['timeoutSeconds'] = params['timeout_seconds']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['*/*'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='JsonWatchEvent',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def list(self):\n\n for job_name in self.upstart.get_all_jobs():\n yield self.get_service(job_name)", "def wait(self):\n [p.join() for p in self._downloaders]\n self._ckq.join()\n [p.terminate() for p in self._checkers]\n [p.join() for p in self._checkers]", "def listofParticipants():\n dirs1 = os.listdir(conf.participant_dir)\n for user in dirs1:\n direct=participant_dir + user + '/'\n previous={}\n print \"Checking for user %s\" % user\n for y in os.listdir(direct):\n if os.path.isdir(direct+'/'+y) and y[0] !='.':\n previous[y] = subprocess.check_output(['/usr/bin/git',\n 'log','-1',\n '--oneline',y],\n cwd=direct)\n subprocess.call(['/usr/bin/git', 'reset', '--hard', 'HEAD'], cwd=direct)\n subprocess.call(['/usr/bin/git', 'clean', '-d', '-fx', '\"\"'], cwd=direct)\n subprocess.call(['/usr/bin/git', 'pull', '-s', 'recursive', '-X', 'theirs'], cwd=direct)\n\n for y in os.listdir(direct):\n if os.path.isdir(direct+'/'+y) and y[0] !='.':\n after = subprocess.check_output(['/usr/bin/git',\n 'log','-1',\n '--oneline',y],\n cwd=direct)\n if y not in previous or previous[y] != after:\n yield user,y", "def setConcurrentTasks(self, config):\n self.concurrentTasks = [{'func': self.advanceStatus, 'duration': config.checkStatusDuration}]", "def test_get_list(self):\n result = self.runner.invoke(\n cli,\n [\n *CLI_LOG_OPTION,\n \"config\",\n \"get\",\n \"vendor.fetchai.connections.p2p_libp2p.config.entry_peers\",\n ],\n standalone_mode=False,\n )\n assert result.exit_code == 0\n assert result.output == \"[]\\n\"", "def list_configuration(config_file = CONFIG_FILE):\n conf = get_configuration(config_file)\n display_configuration(config_file, 'secret wallet configuration is located', conf)", "def invoke_all_and_wait(self):\n list_promise = []\n for thread in self.__list_thread:\n thread.start()\n list_promise.append(thread)\n for process in list_promise: process.join()", "def list_tasks(ctx):\n ctx.run(\"invoke --list\")", "def gather_configs(self):\n configs = []\n for what in self.order:\n for key in self.plugins[what]:\n mgr = self.plugins[what][key]\n c = mgr.config(what='get')\n if c is not None:\n c.update({\n 'description': mgr.description\n })\n # print(\"Gathering configuration from \", c)\n configs.append(c)\n return configs", "def workers_status(self):\n workers = []\n for agent in self.agents_status():\n workers += agent['workers']\n return workers", "def get_waiter_output(config_id: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[Optional[str]]] = None,\n waiter_id: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetWaiterResult]:\n ...", "def get_all_reservations(config):\n reservations = []\n region_list = regions(aws_access_key_id=config.keys.api,\n aws_secret_access_key=config.keys.secret)\n for region in region_list:\n _logger.info(\"Searching %s\", region)\n cnx = region.connect(aws_access_key_id=config.keys.api,\n aws_secret_access_key=config.keys.secret)\n for reservation in cnx.get_all_instances():\n _logger.info(\"Found %s %s\", reservation,\n [str(i.id) for i in reservation.instances])\n reservations.append(reservation)\n return reservations", "def watchers(self) -> dict:\n return self.data.get(\"watchers\", {})", "def antenny_list_configs(self):\n return self.antenny_config.list_configs()", "def x_list():\n\t_loadconfig()", "def list_configurations(ctx):\n config_set = __ensure_configuration_exists(ctx)\n formatter = ConfigSetListFormatter.build(config_set, format='plain')\n out = formatter.format()\n\n click.echo(out)", "def give_workers_list(self):\n return self._workers", "def get_job_arrivals(self):\n return []", "def aliveworkers(workers):\n \n #ping everyone using threads\n threads=[]\n results={}\n output=threading.Lock()\n \n def threadcode(worker):\n worker=worker[:]\n logging.info(\"Pinging %r\" % (worker,))\n results[worker]=sshping(worker)\n logging.info (\"Worker %r is %s.\" % (worker, [\"down\",\"up\"][results[worker]]))\n \n for i,worker in enumerate(workers):\n threads.append(threading.Thread())\n threads[i].run=lambda: threadcode(worker)\n threads[i].start()\n threads[i].join(0.1)\n \n #wait for threads to finish\n for thread in threads:\n thread.join()\n \n aliveworkers=[worker for worker,result in results.items() if result==True]\n return aliveworkers", "def get_list_of_senders(self):\n logger.info(\"Function call: get_senders\")\n return self.__handle_result(self.__send_request('senders'))", "def get_config_names():\r\n return sorted(CONFIGS.keys())", "def test_list(self, containers, docker_ping, available_configurations):\n # https://docs.python.org/3/library/unittest.mock.html#mock-names-and-the-name-attribute\n\n # mock that docker-deamon is running\n docker_ping.return_value = True\n\n # docker deamon returns a list of running node-containers\n container1 = MagicMock()\n container1.name = f\"{APPNAME}-iknl-user\"\n containers.list.return_value = [container1]\n\n # returns a list of configurations and failed inports\n def side_effect(system_folders):\n config = MagicMock(available_environments=[\"Application\"])\n config.name = \"iknl\"\n if not system_folders:\n return [[config], []]\n else:\n return [[config], []]\n\n available_configurations.side_effect = side_effect\n\n # invoke CLI method\n runner = CliRunner()\n result = runner.invoke(cli_node_list, [])\n\n # validate exit code\n self.assertEqual(result.exit_code, 0)\n\n # check printed lines\n self.assertEqual(\n result.output,\n \"\\nName Environments Status System/User\\n\"\n \"-------------------------------------------------------------------------------------\\n\"\n \"iknl ['Application'] Offline System \\n\"\n \"iknl ['Application'] Online User \\n\"\n \"-------------------------------------------------------------------------------------\\n\"\n )", "def list_config():\n console = Console()\n _config = loadConfig()\n json_data = richJSON.from_data({**asdict(_config)})\n console.print(Panel(json_data, title=\"SubmarineCliConfig\"))", "def list():\n manager = Actions()\n tasks_list = manager.get_tasks_list()\n console_utils.print_tree(manager, tasks_list)", "def list(self, jobguid=\"\", executionparams=dict()):", "def waiting_procs(self):\n return [p.model_id for p in self.primary_scheduler.queue_nodes.wait_q]", "def get_intervals(self, account):\n buckets = []\n for monitor in self.get_watchauditors(account):\n interval = monitor.watcher.get_interval()\n if not interval in buckets:\n buckets.append(interval)\n return buckets", "def list(self, jobguid=\"\", executionparams=None):", "def run_crawler(self) -> List[JobEventSchema]:\n print(f\"Ready for scraping, current task: {self.tasks}\")\n\n crawling_result = []\n for task in self.tasks:\n result = task.run()\n crawling_result.extend(result)\n return crawling_result", "def list(self):\n\n config = self.get_config()\n client = config['client']\n default_config = config[client]\n\n msg.run('Saved options for client %s' % client)\n msg.inf('Default application (%s)' % default_config.get('defapp'))\n msg.inf('environment (%s)' % default_config['environment'])\n msg.inf('databases prod (%s) test (%s)' %\n (default_config['database'],\n default_config['test_database']))\n msg.inf('Image (%s)' % default_config['image'])\n msg.inf('Nginx (%s) Debug (%s) Verbose (%s)' %\n (default_config['nginx'],\n default_config['debug'],\n default_config['verbose'])\n )\n msg.run('\\nOther clients in this environment')\n clients = [item for item in config if item != 'client']\n\n msg.inf(', '.join(clients))", "def jobs():\n result = []\n out = subprocess.check_output([\"/bin/launchctl\", \"list\"]).decode()\n for row in out.splitlines()[1:]:\n result.append(Job(row))\n return result", "def list(self):\n return self.rpc.call(MsfRpcMethod.JobList)", "def list_agents(self):\n\n agents = self.vip.rpc.call(CONTROL, \"list_agents\").get(timeout=5)\n versions = self.vip.rpc.call(CONTROL, \"agent_versions\").get(timeout=5)\n status_running = self.status_agents()\n uuid_to_status = {}\n # proc_info has a list of [startproc, endprox]\n for a in agents:\n pinfo = None\n is_running = False\n for uuid, name, proc_info in status_running:\n if a['uuid'] == uuid:\n is_running = proc_info[0] > 0 and proc_info[1] == None\n pinfo = proc_info\n break\n\n uuid_to_status[a['uuid']] = {\n 'is_running': is_running,\n 'version': versions[a['uuid']][1],\n 'process_id': None,\n 'error_code': None,\n 'permissions': {\n 'can_stop': is_running,\n 'can_start': not is_running,\n 'can_restart': True,\n 'can_remove': True\n }\n }\n\n if pinfo:\n uuid_to_status[a['uuid']]['process_id'] = proc_info[0]\n uuid_to_status[a['uuid']]['error_code'] = proc_info[1]\n\n if 'volttroncentral' in a['name'] or \\\n 'vcplatform' in a['name']:\n uuid_to_status[a['uuid']]['permissions']['can_stop'] = False\n uuid_to_status[a['uuid']]['permissions']['can_remove'] = False\n\n # The default agent is stopped health looks like this.\n uuid_to_status[a['uuid']]['health'] = {\n 'status': 'UNKNOWN',\n 'context': None,\n 'last_updated': None\n }\n\n if is_running:\n identity = self.vip.rpc.call(CONTROL, 'agent_vip_identity',\n a['uuid']).get(timeout=30)\n try:\n status = self.vip.rpc.call(identity,\n 'health.get_status').get(\n timeout=5)\n uuid_to_status[a['uuid']]['health'] = status\n except gevent.Timeout:\n _log.error(\"Couldn't get health from {} uuid: {}\".format(\n identity, a['uuid']\n ))\n except Unreachable:\n _log.error(\n \"Couldn't reach agent identity {} uuid: {}\".format(\n identity, a['uuid']\n ))\n for a in agents:\n if a['uuid'] in uuid_to_status.keys():\n _log.debug('UPDATING STATUS OF: {}'.format(a['uuid']))\n a.update(uuid_to_status[a['uuid']])\n return agents", "def brokers_list(**kwargs):\n _validate_components_prepared('brokers_list')\n rabbitmq = _prepare_component_management('rabbitmq', kwargs['verbose'])\n\n brokers = rabbitmq.list_rabbit_nodes()\n complain_about_dead_broker_cluster(brokers)\n output_columns = ('broker_name', 'running', 'alarms')\n output_rows = []\n for node in sorted(brokers['nodes']):\n output_rows.append({\n 'broker_name': node,\n 'running': node in brokers['running_nodes'],\n 'alarms': ', '.join(brokers['alarms'].get(node, [])),\n })\n\n output_table(output_rows, output_columns)", "def run(self) -> None:\n if len(self._waiting) == 0:\n raise ValueError(\"Nothing is waiting\")\n waiters = self._waiting\n self._waiting = []\n for d in waiters:\n d.callback(None)", "def poller_names(self):\n return [i for i in self._config.sections() if i not in ['Local', 'GitHub', 'Logging',\n 'DEFAULT']]", "async def running(self) -> list[dict[str, Any]]:\n data = await self.controller.request(\"get\", \"watering/program\")\n return cast(list[dict[str, Any]], data[\"programs\"])", "def list_hosts():\n task_run(\"/bin/hostname -f\",RING_1_dev__allnodes)", "def get_task_list(self):\n raise NotImplementedError()", "def test_toggle_waitlist_1(self):\n # Create needed objects.\n editor = EditorFactory()\n coordinators = get_coordinators()\n coordinators.user_set.add(editor.user)\n UserProfileFactory(user=editor.user, terms_of_use=True)\n\n partner = PartnerFactory(status=Partner.AVAILABLE)\n\n # Set up request.\n url = reverse('partners:toggle_waitlist', kwargs={'pk': partner.pk})\n\n request = RequestFactory().post(url)\n request.user = editor.user\n\n _ = PartnersToggleWaitlistView.as_view()(request, pk=partner.pk)\n partner.refresh_from_db()\n self.assertEqual(partner.status, Partner.WAITLIST)", "def fetchAllAccounts(config):\n allAccounts = []\n currentStart = 1\n currentLimit = 99\n while currentLimit > 98 :\n currentPull = fetchBatchAccounts(accountsConfig, currentStart, currentLimit)['data']\n allAccounts = allAccounts + currentPull\n currentLimit = int(len(currentPull))\n currentStart = int(currentStart) + int(currentLimit)\n return allAccounts", "def get_notifiers_list() -> List[AbstractNotifier]:\n\n cfg = read_config()\n notifiers = cfg.get('notifier')\n notifiers_list = list()\n for notifier in notifiers:\n notifier_fabric = notifiers_fabrics_dict.get(str(notifier))\n if notifier_fabric:\n notifier = notifier_fabric.create_notifier()\n notifiers_list.append(notifier)\n else:\n raise NotifierWasNotRealised\n\n if len(notifiers_list) < 0:\n raise NoNotifiersFound\n return notifiers_list", "def get_list():\n\n print(f\"Корневой каталог: {config_tools.NAME_PATH}\")\n for dirpath, dirnames, filenames in os.walk(config_tools.NAME_PATH):\n # перебрать каталоги\n for dirname in dirnames:\n print(\"Каталог:\", os.path.join(dirpath, dirname))\n # перебрать файлы\n for filename in filenames:\n print(\"Файл:\", os.path.join(dirpath, filename))", "def poller(self):\n\n def watcher(watched_event):\n if watched_event.type and watched_event.path:\n msg = \"child changed, try to get master again. type %s, state %s, path %s.\" % (\n watched_event.type, watched_event.state, watched_event.path)\n logger.info(\"[ %s(%s) ] %s\" % (self.path, \"master\" if self.is_master else \"slave\", msg))\n self.workers = self.get_workers()\n logger.debug(\"poller call register start\")\n self.register_service()\n self.register_leadership()\n logger.debug(\"poller call register end\")\n\n try:\n children = self.zk.get_children(self.SERVICE_PATH, watcher)\n except:\n logger.error(traceback.format_exc())\n return\n logger.debug(\"current worker services are %s\" % children)", "def list(config, username, hostname):\n if (not username and not hostname) or (username and hostname):\n print 'Usage: igor permissions list [OPTIONS]'\n print\n print 'Error: Exactly one of --username or --hostname is required.'\n exit()\n\n if username:\n response = make_api_request('GET', config, '/users/' + username +\n '/machines')\n machines = response.json()['machines']\n for machine in machines:\n print machine['hostname']\n elif hostname:\n response = make_api_request('GET', config, '/machines/' + hostname +\n '/users')\n users = response.json()['users']\n for user in users:\n print user['username']", "def list(self):\n for attrname in dir(self.config):\n if PARAM_PAT.match(attrname):\n yield attrname", "def __sync_bulbs__() -> list:\n\n bulbs = list()\n\n try:\n discovered_bulbs = discover_bulbs(timeout=2)\n except Exception as e:\n raise Exception(str(e))\n\n for bulb in discovered_bulbs:\n ip = bulb['ip']\n port = bulb['port']\n model = bulb['capabilities']['model']\n name = bulb['capabilities']['name']\n name = name if name != '' else ip\n identifier = bulb['capabilities']['id']\n\n found_bulb = Bulb(\n ip=ip,\n port=port,\n model=model\n )\n\n found_bulb.set_name(name)\n properties = found_bulb.get_properties()\n\n bulbs.append({\n 'bulb': found_bulb,\n 'name': name,\n 'model': model,\n 'ip': ip,\n 'metadata':\n {\n 'id': identifier,\n 'ip': ip,\n 'name': name,\n 'model': model,\n 'properties': properties\n }\n })\n\n return bulbs", "def after_launch_remote_worker(msg, config, checklist):\n return []", "def _configure_remote_workers(default_num_clients, remote_executors):\n loop, must_close_loop = _get_event_loop()\n available_executors = [ex for ex in remote_executors if ex.is_ready]\n logging.info('%s TFF workers available out of a total of %s.',\n len(available_executors), len(remote_executors))\n if not available_executors:\n raise execution_context.RetryableError(\n 'No workers are ready; try again to reconnect.')\n try:\n remaining_clients = default_num_clients\n live_workers = []\n for ex_idx, ex in enumerate(available_executors):\n remaining_executors = len(available_executors) - ex_idx\n default_num_clients_to_host = remaining_clients // remaining_executors\n remaining_clients -= default_num_clients_to_host\n if default_num_clients_to_host > 0:\n _configure_remote_executor(\n ex, {placements.CLIENTS: default_num_clients_to_host}, loop)\n live_workers.append(ex)\n finally:\n if must_close_loop:\n loop.stop()\n loop.close()\n return [\n _wrap_executor_in_threading_stack(e, can_resolve_references=False)\n for e in live_workers\n ]", "def get_beers_list(self, location_url: str):\n\n data = helpers.beautiful_url(url=location_url, \n cookies=self.cookies, \n javascript=self.javascript) \n\n if self.single_page: \n if self.beer_parent_tags:\n tag, attribute = self.beer_parent_tags\n data = data.find(tag, attribute)\n\n try:\n tag, attribute = self.beers_html_tags\n self.beers = data.find_all(tag, attribute)\n except:\n self.beers = data.find_all\n else: # get a list of all the beer urls\n print(\"multiPage\")\n tag, attribute = self.beer_multi_page_tags\n self.beers = [url['href'] for url in data.find_all(tag, attribute, href=True)]", "def poll(self, timeout: int = 0) -> list:\n return []", "def list_jobs(exproot, **kwargs):\n for jobname, args, results in load_all(exproot):\n print jobname, args, results", "def monitor_nodes(self) -> List[str]:\n return self._monitor_nodes.copy()", "def run(self):\n self.logger.info(\"Starting execution loop...\")\n with ThreadPoolExecutor(\n max_workers=len(self.config) + 10 - (len(self.config) % 10)\n ) as executor:\n for target in self.config:\n executor.submit(self.monitor, target)\n executor.shutdown(wait=True)", "def config_wait_time(config):\n return config['wait_time'] if 'wait_time' in config else default_wait_time", "def _GetThreadsQpsPerLoaderList():\n\n def _FormatThreadQps(thread_qps):\n thread_qps_pair = thread_qps.split(':')\n if len(thread_qps_pair) == 1:\n thread_qps_pair.append(0)\n return [int(val) for val in thread_qps_pair]\n\n return [\n _FormatThreadQps(thread_qps)\n for thread_qps in FLAGS.ycsb_threads_per_client\n ]", "def list_requesters():\n from mephisto.core.local_database import LocalMephistoDB\n from tabulate import tabulate\n\n db = LocalMephistoDB()\n requesters = db.find_requesters()\n dict_requesters = [r.to_dict() for r in requesters]\n click.echo(tabulate(dict_requesters, headers=\"keys\"))", "def get_waiting_jobs(self):\n open_jobs = []\n with closing(self._conn.cursor()) as cursor:\n for row in cursor.execute( \"select job_name, job_version from jobs where job_state in ('\"\n + JobState.WAITING.value + \"','\" + JobState.WAITING_PRED.value + \"','\" + JobState.RUNNING.value +\"')\"):\n open_jobs.append((row[0], row[1]))\n return open_jobs", "def get_finished_runs(self) -> typing.List[typing.Tuple[RunInfo, RunValue]]:\n\n # Proactively see if more configs have finished\n self._extract_completed_runs_from_futures()\n\n results_list = []\n while self.results:\n results_list.append(self.results.pop())\n return results_list", "def ListJobs(self, token=None):\n return aff4.FACTORY.Open(self.CRON_JOBS_PATH, token=token).ListChildren()", "def get_config(self):\n config = set()\n\n while True:\n filenames = self.get_config_files()\n\n for fn in filenames:\n if fn not in self.watch_names:\n filenames.remove(fn)\n if fn in config:\n filenames.remove(fn)\n\n # If we did not find any new config files, exit loop.\n if not filenames:\n break\n\n # Save the config files we found, sleep, then look again.\n config.update(filenames)\n\n # Sleep a bit to allow for settling. We loop until no new\n # config files are found.\n time.sleep(1.0)\n\n return config", "def run(self):\n for req, resp in self.servings:\n resp.check_timeout()", "def wait_for_rate_limiters(self) -> None:\n for limiter in self.rate_limiters:\n limiter.check_next_and_wait()", "def cli_list(ctx):\n\n _list_spiders(ctx)", "def wait_for_tasks_to_complete(batch_service_client, job_id_list, timeout):\n timeout_expiration = datetime.datetime.now() + timeout\n\n print(\"Monitoring all tasks for 'Completed' state, timeout in {}...\"\n .format(timeout), end='')\n \n incomplete_jobs = job_id_list\n while datetime.datetime.now() < timeout_expiration:\n print('.', end='')\n sys.stdout.flush()\n\n for job_id in incomplete_jobs:\n tasks = batch_service_client.task.list(job_id)\n incomplete_tasks = [task for task in tasks if\n task.state != batchmodels.TaskState.completed]\n if not incomplete_tasks:\n incomplete_jobs.remove(job_id)\n print('Job '+ job_id + ' completed!')\n \n if not incomplete_jobs: \n return incomplete_jobs\n else:\n time.sleep(3)\n\n print()\n return incomplete_jobs", "def list_uptime_check_configs(project_id: str) -> pagers.ListUptimeCheckConfigsPager:\n client = monitoring_v3.UptimeCheckServiceClient()\n configs = client.list_uptime_check_configs(request={\"parent\": project_id})\n\n for config in configs:\n pprint.pprint(config)\n return configs", "def genJobList():\n nit=10\n reply=[]\n while len(reply)<10: #assume qstat fails if less that 10 jobs on cluster\n reply=chomp(os.popen('qstat|expand|tr -s \\' \\'|cut -d\\' \\' -f 1,2,5').readlines())\n nit+=1\n if nit>10: break\n return reply", "def get_callback_list(hyperparams: Dict[str, Any]) -> List[BaseCallback]:\n\n def get_module_name(callback_name):\n return \".\".join(callback_name.split(\".\")[:-1])\n\n def get_class_name(callback_name):\n return callback_name.split(\".\")[-1]\n\n callbacks = []\n\n if \"callback\" in hyperparams.keys():\n callback_name = hyperparams.get(\"callback\")\n\n if callback_name is None:\n return callbacks\n\n if not isinstance(callback_name, list):\n callback_names = [callback_name]\n else:\n callback_names = callback_name\n\n # Handle multiple wrappers\n for callback_name in callback_names:\n # Handle keyword arguments\n if isinstance(callback_name, dict):\n assert len(callback_name) == 1, (\n \"You have an error in the formatting \"\n f\"of your YAML file near {callback_name}. \"\n \"You should check the indentation.\"\n )\n callback_dict = callback_name\n callback_name = list(callback_dict.keys())[0]\n kwargs = callback_dict[callback_name]\n else:\n kwargs = {}\n callback_module = importlib.import_module(get_module_name(callback_name))\n callback_class = getattr(callback_module, get_class_name(callback_name))\n callbacks.append(callback_class(**kwargs))\n\n return callbacks", "def poll(self):\n self.get_peers()\n self.get_trackers()\n self.get_files()" ]
[ "0.71054786", "0.6426477", "0.586981", "0.57586163", "0.5726431", "0.5626258", "0.55491465", "0.5529439", "0.5473015", "0.54290795", "0.5414866", "0.53946763", "0.53753406", "0.536865", "0.5325333", "0.5261703", "0.5254371", "0.5234681", "0.52307975", "0.51577234", "0.5136059", "0.51083416", "0.5097008", "0.5076341", "0.5075224", "0.50708354", "0.5069733", "0.5036469", "0.5027848", "0.5006232", "0.50048333", "0.50015855", "0.5000917", "0.49932933", "0.4982261", "0.4959151", "0.49569878", "0.49522188", "0.49454615", "0.49450237", "0.49440926", "0.49298993", "0.4929314", "0.49216813", "0.49199456", "0.49188238", "0.49154106", "0.49103367", "0.49082863", "0.48954758", "0.4890463", "0.4884347", "0.4884049", "0.48798776", "0.48743615", "0.48666236", "0.48643303", "0.48570016", "0.48401633", "0.48387933", "0.48349255", "0.48301557", "0.48230332", "0.48164645", "0.4814456", "0.48120448", "0.47947523", "0.478038", "0.47787762", "0.47734323", "0.4762673", "0.47589943", "0.47437924", "0.4734316", "0.47334996", "0.4733411", "0.47317502", "0.47279617", "0.472668", "0.4719142", "0.47150484", "0.47101864", "0.47095627", "0.4706439", "0.47056597", "0.46988422", "0.4697281", "0.4694169", "0.46883056", "0.46882388", "0.4683358", "0.46768746", "0.46759057", "0.46737358", "0.46714887", "0.4656381", "0.46425304", "0.46410078", "0.46389034", "0.46346995" ]
0.6771325
1
Gets information about a single waiter.
def GetWaiter(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(self, waiter_name: str = None) -> Waiter:\n pass", "def get_waiter(self, waiter_name: str = None) -> Waiter:\n pass", "def get_waiter(self, waiter_name: str = None) -> Waiter:\n pass", "def get_waiter_output(config_id: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[Optional[str]]] = None,\n waiter_id: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetWaiterResult]:\n ...", "def get_waiter(config_id: Optional[str] = None,\n project: Optional[str] = None,\n waiter_id: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWaiterResult:\n __args__ = dict()\n __args__['configId'] = config_id\n __args__['project'] = project\n __args__['waiterId'] = waiter_id\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('google-native:runtimeconfig/v1beta1:getWaiter', __args__, opts=opts, typ=GetWaiterResult).value\n\n return AwaitableGetWaiterResult(\n create_time=pulumi.get(__ret__, 'create_time'),\n done=pulumi.get(__ret__, 'done'),\n error=pulumi.get(__ret__, 'error'),\n failure=pulumi.get(__ret__, 'failure'),\n name=pulumi.get(__ret__, 'name'),\n success=pulumi.get(__ret__, 'success'),\n timeout=pulumi.get(__ret__, 'timeout'))", "def wait(self):\n ident = get_ident()\n if ident not in self.events:\n # this is a new client\n # add an entry for it in the self.events dict\n # each entry has two elements, a threading.Event() and a timestamp\n self.events[ident] = [threading.Event(), time.time()]\n return self.events[ident][0].wait()", "def __await__(self):\n return self.waiter.__await__()", "def __await__(self):\n return self.waiter.__await__()", "def wait_for_event(event):\n received = event.wait()\n name = threading.current_thread().getName()\n print \"Waited, got {}, name {}\".format(received, name)", "def info(client):\n\n return client.get_info()", "def wait(self, **kwargs):\n return self.client.api.wait(self.id, **kwargs)", "def get_lock(name):\n return _handler_locks[name]", "def data_wait(self):\n return self.get(timeout=self._timeout)", "def waiterComplete(self, id):\n return self.__insertOrderHistory(id, \"waiterComplete\", {})", "def getNode(self):\r\n try:\r\n output,error = Popen(\"qstat | grep \"+self.jobId, shell=True, stdout=PIPE, stderr=PIPE).communicate()\r\n if self.jobId in output:\r\n return output.split(\"\\t\")[7]\r\n if len(error) > 0:\r\n logging.error(error)\r\n except ValueError:\r\n logging.info(\"Error: waiting for not submitted job...\")", "def test_get_info(self):\n self.addCleanup(self.sdkapi.guest_delete, self.userid)\n\n self.sdkapi.guest_create(self.userid, 1, 1024, disk_list=self.disks)\n self.sdkapi.guest_deploy(self.userid, self.image_name)\n\n # get info in shutdown state\n info_off = self.sdkapi.guest_get_info(self.userid)\n self.assertEquals(info_off['power_state'], 'off')\n self.assertEquals(info_off['mem_kb'], 0)\n self.assertEquals(info_off['cpu_time_us'], 0)\n\n # get info in active state\n self.sdkapi.guest_start(self.userid)\n self.assertTrue(self.sdkutils.wait_until_guest_in_power_state(\n self.userid, 'on'))\n time.sleep(1)\n info_on = self.sdkapi.guest_get_info(self.userid)\n self.assertEquals(info_on['power_state'], 'on')\n self.assertNotEqual(info_on['cpu_time_us'], 0)\n self.assertNotEqual(info_on['mem_kb'], 0)\n\n # get info in paused state\n self.sdkapi.guest_pause(self.userid)\n info_on = self.sdkapi.guest_get_info(self.userid)\n self.assertEquals(info_on['power_state'], 'on')\n self.assertNotEqual(info_on['cpu_time_us'], 0)\n self.assertNotEqual(info_on['mem_kb'], 0)", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def get(self, wait = True):\n return self.current_session.get(wait)", "def get_request(self):\n\t\t#self.__sem.lock()\n\t\ttry:\n\t\t\t\n\t\t\tr = self.get(thread.get_ident(),None)\n\t\t\tif r:\n\t\t\t\treturn r\n\t\t\traise VDOM_exception(_(\"No request associated with current thread\"))\n\t\texcept:\n\t\t\traise VDOM_exception(_(\"No request associated with current thread\"))\n\t\t#finally:\n\t\t#\tself.__sem.unlock()", "def __step_waiter(self, step_id):\n\n # don't forget to tip the waiter :)\n step_waiter = self.emr_client.get_waiter('step_complete')\n try:\n step_waiter.wait(ClusterId=self.clusID,\n StepId=step_id[0],\n WaiterConfig={\n 'Delay': 15,\n 'MaxAttempts': 480\n })\n\n except WaiterError as e:\n if 'Max attempts exceeded' in e.message:\n print('EMR Step did not complete in two hours')\n else:\n print(e.message)", "def get_eventhub_info(self):\n self._create_connection()\n eh_name = self.address.path.lstrip('/')\n target = \"amqps://{}/{}\".format(self.address.hostname, eh_name)\n mgmt_client = uamqp.AMQPClient(target, auth=self.auth, debug=self.debug)\n mgmt_client.open(self.connection)\n try:\n mgmt_msg = Message(application_properties={'name': eh_name})\n response = mgmt_client.mgmt_request(\n mgmt_msg,\n constants.READ_OPERATION,\n op_type=b'com.microsoft:eventhub',\n status_code_field=b'status-code',\n description_fields=b'status-description')\n eh_info = response.get_data()\n output = {}\n if eh_info:\n output['name'] = eh_info[b'name'].decode('utf-8')\n output['type'] = eh_info[b'type'].decode('utf-8')\n output['created_at'] = datetime.datetime.fromtimestamp(float(eh_info[b'created_at'])/1000)\n output['partition_count'] = eh_info[b'partition_count']\n output['partition_ids'] = [p.decode('utf-8') for p in eh_info[b'partition_ids']]\n return output\n except:\n raise\n finally:\n mgmt_client.close()", "def waiterConfirm(self, id):\n return self.__insertOrderHistory(id, \"waiterConfirmed\", {})", "async def get_provisioner_info(self) -> Dict:\n provisioner_info = {}\n return provisioner_info", "def waiter(self, interval=None, timeout=None):\n _interval = self.get_wait_interval() if interval is None else interval\n _timeout = self.get_wait_timeout() if timeout is None else timeout\n return Waiter(_interval, _timeout)", "def get_hypervisor_info(self):\n try:\n req = Request(self.compute_url +\n \"/os-hypervisors/detail\" )\n self._upgrade_to_authenticated_request(req)\n resp = urlopen(req)\n content = resp.read().decode('utf-8')\n encoded = json.loads(content)\n resp.close()\n except URLError as e:\n return {}\n except Exception as e:\n raise Exception(\"Unable to process compute reponse: %s\" % e)\n\n return encoded['hypervisors']", "def get(self) -> Info:\n return InfoService.get()", "def cli_wait_for(parser):\n subparser = argparse.ArgumentParser(description='Wait for a job',\n parents=[parser])\n subparser.add_argument('-number', required=True, type=int,\n default=None, help='Job number')\n subparser.add_argument('-name', type=str, default=None, help='Job name')\n args = subparser.parse_args()\n\n if not args.name and not args.number:\n print(\"Argument Error: -name or -number is required\")\n subparser.print_help()\n sys.exit(1)\n elif args.name and args.number:\n print(\"Argument Error: Only one of -name and -number can be input\")\n subparser.print_help()\n sys.exit(1)\n\n kwargs = dict({\n 'number': None,\n 'name': None})\n if args.name:\n kwargs.update({\n 'name': args.name})\n if args.number:\n kwargs.update({\n 'number': args.number})\n\n utils.wait_for(config['username'], config['apikey'], args.apiurl, **kwargs)", "def info(self):\n _, data = yield from self.transport.perform_request('GET', '/')\n return data", "def get_information(self):\n try:\n return self._get_information()\n except(AttributeError, KeyError) as e:\n self._logger.error(f\"Error scrapping the tab information: {e}\")", "def waiting(self):\r\n\r\n return self._serial_object.in_waiting", "def get(self):\n \n if self._state == self.State.transfering_no_waiters:\n d = defer.Deferred(self._get_canceller)\n self._get_deferreds.append(d)\n self._state = self.State.transfering_waiters\n return d\n \n elif self._state == self.State.transfering_waiters:\n d = defer.Deferred(self._get_canceller)\n self._get_deferreds.append(d)\n return d\n \n elif self._state == self.State.transfering_waiters_free:\n d = defer.Deferred(self._get_canceller)\n self._get_deferreds.append(d)\n return d\n \n elif self._state == self.State.stored:\n return defer.succeed(self._value)\n \n elif self._state == self.State.freed:\n raise ValueError(\"This value instance should not be used anymore\")\n \n else:\n raise ValueError(\"Invalid state\")", "def get_shift_report_info_waiter(self, sh_reg_id, is_manager_called=False):\n try:\n staff_id = self.db_handler.get_shift_registration_by_shift_reg_id(sh_reg_id)[2]\n is_supervisor = self.is_staff_supervisor_on_shift(sh_reg_id, staff_id)\n msg = ''\n\n if not is_manager_called:\n if is_supervisor:\n msg += f'{emojize(\" :cop:\", use_aliases=True)}Ви були головним на цій зміні!\\n'\n\n check_in, check_out, rating, payment = self.db_handler.get_waiter_personal_info_from_shift_registration(sh_reg_id)\n\n msg += f'{emojize(\" :heavy_plus_sign:\", use_aliases=True)}check-in: {check_in if check_in is not None and check_in !=\"\" else \"Інформація тимчасово відсутня\"}\\n'\\\n f'{emojize(\" :heavy_minus_sign:\", use_aliases=True)}check-out: {check_out if check_out is not None and check_out !=\"\" else \"Інформація тимчасово відсутня\"}\\n' \\\n f'{emojize(\" :hourglass:\", use_aliases=True)}на зміні: {check_out - check_in}\\n'\\\n f'{emojize(\" :chart_with_upwards_trend:\", use_aliases=True)}Рейтинг: {rating if rating is not None and rating !=\"\" else \"Інформація тимчасово відсутня\"}\\n'\\\n f'{emojize(\" :moneybag:\", use_aliases=True)}Нараховано: *{payment if payment is not None and payment !=\"\" else \"Інформація тимчасово відсутня\"}*\\n'\n\n return msg\n except Exception as err:\n method_name = sys._getframe().f_code.co_name\n\n self.logger.write_to_log('exception', 'model')\n self.logger.write_to_err_log(f'exception in method {method_name} - {err}', 'model')", "def wait_for_event_timeout(event):\n received = event.wait(2)\n name = threading.current_thread().getName()\n print \"Waited with timeout, got {}, name {}\".format(received, name)", "async def _read_event(self, timeout: int=None):\n event = None\n try:\n event = await wait_for(self.get_event(), timeout=timeout)\n except TimeoutError:\n pass\n\n return event", "def get_info(self, charger):\n data = {\n \"device_id\": self.uuid,\n \"cmd\": \"get_info\",\n \"token\": charger.token(),\n \"account_token\": self.api_token\n }\n headers = {\n \"Content-Type\": \"application/json\"\n }\n\n response = requests.post(\"{}/box_api_secure\".format(self.BASE_URL),\n data=json.dumps(data),\n headers=headers)\n response_json = response.json()\n return response_json", "def get_ticket(self, wid, project, nowait=False):\n\n path = os.path.join(self.prjdir, project)\n q = WorkQueue(path)\n if nowait and not q.isquiet():\n # If we don't want to wait and there is Q, we'll just leave\n return\n\n if not q.add(json.dumps(wid.to_h(), sort_keys=True, indent=4)):\n # Marking the wid to be forgotten ensures it's not sent\n # back to BOSS, and the process blocks\n wid.forget = True\n else:\n wid.result = True", "def get_notification():\n condition.acquire()\n if not notifications:\n ret = condition.wait(2)\n if not ret:\n condition.release()\n raise TimeoutError(\"Timed out while waiting for notification\")\n\n notice = notifications.pop(0)\n condition.release()\n return notice", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arp: Optional[pulumi.Input[bool]] = None,\n control_node_id: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n honeypot_bind_lists: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['HoneypotProbeHoneypotBindListArgs']]]]] = None,\n honeypot_probe_id: Optional[pulumi.Input[str]] = None,\n ping: Optional[pulumi.Input[bool]] = None,\n probe_type: Optional[pulumi.Input[str]] = None,\n probe_version: Optional[pulumi.Input[str]] = None,\n proxy_ip: Optional[pulumi.Input[str]] = None,\n service_ip_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n status: Optional[pulumi.Input[str]] = None,\n uuid: Optional[pulumi.Input[str]] = None,\n vpc_id: Optional[pulumi.Input[str]] = None) -> 'HoneypotProbe':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _HoneypotProbeState.__new__(_HoneypotProbeState)\n\n __props__.__dict__[\"arp\"] = arp\n __props__.__dict__[\"control_node_id\"] = control_node_id\n __props__.__dict__[\"display_name\"] = display_name\n __props__.__dict__[\"honeypot_bind_lists\"] = honeypot_bind_lists\n __props__.__dict__[\"honeypot_probe_id\"] = honeypot_probe_id\n __props__.__dict__[\"ping\"] = ping\n __props__.__dict__[\"probe_type\"] = probe_type\n __props__.__dict__[\"probe_version\"] = probe_version\n __props__.__dict__[\"proxy_ip\"] = proxy_ip\n __props__.__dict__[\"service_ip_lists\"] = service_ip_lists\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"uuid\"] = uuid\n __props__.__dict__[\"vpc_id\"] = vpc_id\n return HoneypotProbe(resource_name, opts=opts, __props__=__props__)", "def _wait_for_instance_key(self, parent, key):\n samples = TimeoutSampler(\n wait_timeout=30,\n sleep=1,\n func=lambda: getattr(self.instance, parent, None),\n exceptions_dict=NOT_FOUND_ERROR_EXCEPTION_DICT,\n )\n for sample in samples:\n if sample:\n return sample.get(key)", "def get_info(self):\n return None", "def get_health_info(handle, timeout):\n health = dict()\n\n health['stat'] = ceph_mon_command(handle, 'health' , timeout)\n # TODO command not known with ceph_mon_command\n #health['detail'] = ceph_mon_command(handle, 'health detail', timeout)\n health['detail'] = shell_command('ceph health detail') + b'\\n'\n health['df'] = ceph_mon_command(handle, 'df' , timeout)\n health['report'] = ceph_mon_command(handle, 'report' , timeout)\n\n return health", "def get_offering_status(nextToken=None):\n pass", "def get(self, block=True, timeout=None):\n return self.q.get(block, timeout)", "def get_probe(self, timeout=None):\r\n\r\n # Send a request to the server.\r\n success, reply = self._wait_for_reply(cb.PROBEPLZ, \\\r\n cb.PROBENR, timeout=timeout)\r\n \r\n # Parse the reply, which looks like this:\r\n # 'probenr_nr=%d'\r\n msg, probenr = reply.split('_')\r\n # Get the number of the probed stimulus.\r\n probenr = int(probenr[probenr.find('=')+1:])\r\n \r\n return probenr", "def ListWaiters(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def get_exchange_info(self):\n return self.request.get(path=\"/info\")", "def coin_info(self):\n res = r.get(self.url + self.coin)\n return self.execute(res)", "def get_waiting_worker(self):\r\n if self.item in (Item.A, Item.B):\r\n for worker in self.workers:\r\n if not worker.working and self.item not in worker.items:\r\n return worker", "def getFirstWorker(self):\n return self.entries[0]", "def get(self):\n args = ledger_name_choices.parse_args(req=None, strict=False)\n return DdlServices.describe_ledger(**args)", "def waiters(self):\n waiters = []\n\n for name, item in self._definition.get('waiters', {}).items():\n name = self._get_name('waiter', Waiter.PREFIX + name)\n waiters.append(Waiter(name, item))\n\n return waiters", "def _get_client_info():\n client = get_distribution('reportportal-client')\n return client.project_name, client.version", "def get(self):\n if not self.finished():\n self.wait()\n return self._result", "def get_info(self): \n return {\n \"ident\": self.ident,\n \"interval\": self._interval,\n \"exception\": self._exception,\n \"execute\": self._execute,\n \"args\": self._args,\n \"kwargs\": self._kwargs}", "def getstatus(self):\n with self.lock:\n return (self.status, self.time_start)", "def get_job(self, identifier: str):\n self._log_operation('Getting job {i}'.format(i=identifier))\n return self._job_queue.get_job_details(identifier)", "def get_info(self, name):\n return self.info[name]", "def get_client(self, name):\n return self.get_clients(as_dict=True).get(name)", "def _get_client(self, requester_name: str) -> Any:\n return self.datastore.get_client_for_requester(requester_name)", "def getInfo(notification):", "def info(self) -> Optional[Dict[str, Any]]:\n return self._state.get(\"info\", None)", "def get_info(self, key: str) -> TaskInfo:\n return self.task_graph.nodes[key][\"info\"]", "def test_wait(self, mocker):\n\n tid = 289466\n site = \"mysite\"\n first_response = self.generate_task_dictionary(\n tid, state=\"waiting\", completed=False\n )\n\n responses = [\n {\"json\": first_response},\n {\"json\": self.generate_task_dictionary(tid)},\n ]\n url = (\n \"https://cloudapi.acquia.com/v1/\"\n \"sites/prod:{site}/tasks/{tid}.json\".format(tid=tid, site=site)\n )\n\n mocker.register_uri(\"GET\", url, responses)\n\n task = self.client.site(site).task(tid).wait()\n self.assertEqual(task[\"id\"], tid)\n self.assertEqual(task[\"state\"], \"done\")", "async def get_thread_info(self) -> Any:\n return await self.AD.threading.get_thread_info()", "def _get_torrent_info_hash(self):\n return self.torrent.create_info_hash()", "async def info_timer(self, handle: str) -> Union[tuple, None]:\n return await self.AD.sched.info_timer(handle, self.name)", "async def test_api_supervisor_info(\n hassio_handler, aioclient_mock: AiohttpClientMocker\n) -> None:\n aioclient_mock.get(\n \"http://127.0.0.1/supervisor/info\",\n json={\n \"result\": \"ok\",\n \"data\": {\"supported\": True, \"version\": \"2020.11.1\", \"channel\": \"stable\"},\n },\n )\n\n data = await hassio_handler.get_supervisor_info()\n assert aioclient_mock.call_count == 1\n assert data[\"supported\"]\n assert data[\"version\"] == \"2020.11.1\"\n assert data[\"channel\"] == \"stable\"", "def waiting_messages(self, client):\n return self.storage.get(client, None)", "def get_ticket_info(self):\n return json.dumps(self.__dict__)", "def task(self, name):\n with self.db_lock:\n return self.rcon.hget(self.task_key, name)", "def GetInfo(self, reason=None):\n query = []\n _AppendReason(query, reason)\n return self._SendRequest(HTTP_GET, \"/%s/info\" % GANETI_RAPI_VERSION,\n query, None)", "def message(self):\n self.wait()\n return self._message", "def waitAndGet(self, event_name, timeout=DEFAULT_TIMEOUT):\n if timeout:\n if timeout > MAX_TIMEOUT:\n raise Error(\n self._ad, 'Specified timeout %s is longer than max timeout %s.' %\n (timeout, MAX_TIMEOUT))\n try:\n raw_event = self._callEventWaitAndGet(self._id, event_name, timeout)\n except Exception as e:\n if 'EventSnippetException: timeout.' in str(e):\n raise TimeoutError(\n self._ad, 'Timed out after waiting %ss for event \"%s\" triggered by'\n ' %s (%s).' % (timeout, event_name, self._method_name, self._id))\n raise\n return snippet_event.from_dict(raw_event)", "def get_sleep_timer(self):\n return self.get(COMMAND_UIC, 'GetSleepTimer')", "def get_event(self):\n return self.keys.events.get()", "def get_object(selenium, obj):\n return _get_ui_service(selenium, obj).get_obj_from_info_page(obj)", "def info(self, key = None):\n return self.client.get(self.name).getBodyData(key)", "def info(self):\n resp = self.server.request(\"get\", \"/jobs/%s/%s\" % (self.sessionid,\n self.name))\n return self.server.json_body(resp)", "def get(self) -> Any:\n return self._queue.get()", "async def get_status():", "def waiting(self):\n return self.converters.waiting(self)", "def wait(self):\n headers = self.parse(self.stdin.readline())\n payload = self.parse(self.stdin.read(int(headers.pop('len'))))\n self.log.debug(\"Received %s from supervisor\", headers['eventname'])\n return Event(headers, payload)", "def details(self, identifier):\n return self.client.request_with_method(Methods.GET % (self.name, identifier,))", "def info(self):\r\n return self._get('info', {})", "def getInfo(self):\n return self._info", "def info(self):\n return self._info", "def get_Ethereum_info(wallet: str, info: dict):\r\n\tresp = requests.get('https://api.blockcypher.com/v1/eth/main/addrs/' + wallet)\r\n\tif resp.status_code == 200:\r\n\t\tresponse = json.loads(resp.text)\r\n\t\tinfo['received'] = response['total_received'] / 10**18\r\n\t\tinfo['received_dollars'] = info['received'] * COIN_VALUES['Ethereum']\r\n\t\tinfo['updated'] = datetime.datetime.now().isoformat()\r\n\treturn info", "def get(self, block=True, timeout=None):\n return self.queue.get(block, timeout)" ]
[ "0.732819", "0.732819", "0.732819", "0.732819", "0.732819", "0.732819", "0.732819", "0.732819", "0.732819", "0.732819", "0.732819", "0.732819", "0.7034089", "0.7034089", "0.7034089", "0.58368564", "0.5562174", "0.545833", "0.5453501", "0.5453501", "0.51552576", "0.500022", "0.4947939", "0.49035475", "0.48841918", "0.4875349", "0.4825962", "0.4807038", "0.4788976", "0.47877777", "0.47390124", "0.4737998", "0.4695756", "0.4685639", "0.46742424", "0.46569946", "0.46445638", "0.4630567", "0.4588575", "0.4581137", "0.4562163", "0.4545563", "0.45319268", "0.4531307", "0.45269105", "0.451455", "0.4498437", "0.44955802", "0.44914228", "0.44891006", "0.44867277", "0.44807485", "0.4479121", "0.44681302", "0.44663617", "0.4464845", "0.44604304", "0.44589514", "0.44558623", "0.44513303", "0.44408354", "0.44378853", "0.4431598", "0.44300112", "0.4429392", "0.44257826", "0.4419385", "0.4417579", "0.44163597", "0.44154325", "0.4408601", "0.43974262", "0.43972743", "0.43907362", "0.43831834", "0.43831033", "0.43781188", "0.43777123", "0.43753618", "0.4371543", "0.43702546", "0.43677658", "0.43599233", "0.43575418", "0.43548673", "0.43548217", "0.43546364", "0.43537593", "0.4349463", "0.43465167", "0.43464494", "0.4341324", "0.43402284", "0.4339346", "0.43381315", "0.43347985", "0.43267617", "0.43228635", "0.43220606", "0.43220502" ]
0.51276976
21
Creates a Waiter resource. This operation returns a longrunning Operation resource which can be polled for completion. However, a waiter with the given name will exist (and can be retrieved) prior to the operation completing. If the operation fails, the failed Waiter resource will still exist and must be deleted prior to subsequent creation attempts.
def CreateWaiter(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_waiter(self, waiter_name: str = None) -> Waiter:\n pass", "def get_waiter(self, waiter_name: str = None) -> Waiter:\n pass", "def get_waiter(self, waiter_name: str = None) -> Waiter:\n pass", "def wait_operation(\n self,\n ) -> Callable[[operations_pb2.WaitOperationRequest], None]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"delete_operation\" not in self._stubs:\n self._stubs[\"wait_operation\"] = self.grpc_channel.unary_unary(\n \"/google.longrunning.Operations/WaitOperation\",\n request_serializer=operations_pb2.WaitOperationRequest.SerializeToString,\n response_deserializer=None,\n )\n return self._stubs[\"wait_operation\"]", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def WaitOperation(\n self,\n request: google.longrunning.operations_pb2.WaitOperationRequest,\n context: grpc.ServicerContext,\n ) -> google.longrunning.operations_pb2.Operation:", "def wait_to_create(name, namespace, timeout):\n return watch.wait_created_cr(name, namespace,\n timeout=timeout, group=GROUP, plural=PLURAL,\n version=VERSION)", "def waiter(self, interval=None, timeout=None):\n _interval = self.get_wait_interval() if interval is None else interval\n _timeout = self.get_wait_timeout() if timeout is None else timeout\n return Waiter(_interval, _timeout)", "def _make_wait_task(self, name):\n signal_file = os.path.join(self.tmpdir, name)\n open(signal_file, 'wb').close()\n args = [\n '-task-name',\n 'wait',\n '-priority',\n '20',\n '--',\n 'python',\n '-u',\n '-c',\n # Cheezy wait.\n ('import os,time;'\n 'print(\\'hi\\');'\n '[time.sleep(0.1) for _ in range(100000) if os.path.exists(\\'%s\\')];'\n 'print(\\'hi again\\')') % signal_file,\n ]\n wait_task_id = self.client.task_trigger(args)\n # Assert that the 'wait' task has started but not completed, otherwise\n # this defeats the purpose.\n self._wait_for_state(wait_task_id, u'PENDING', u'RUNNING')\n yield\n # Double check.\n result = self.client.task_result(wait_task_id)\n self.assertEqual(u'RUNNING', result[u'state'], result)\n # Unblock the wait_task_id on the bot.\n os.remove(signal_file)\n # Ensure the initial wait task is completed.\n actual_summary, _ = self.client.task_collect(wait_task_id)\n tags = [\n u'authenticated:bot:whitelisted-ip',\n u'pool:default',\n u'priority:20',\n u'realm:none',\n u'service_account:none',\n u'swarming.pool.template:none',\n u'swarming.pool.version:pools_cfg_rev',\n u'user:joe@localhost',\n ]\n performance_stats = actual_summary['shards'][0].pop('performance_stats')\n self.assertPerformanceStatsEmpty(performance_stats)\n self.assertResults(\n self.gen_expected(name=u'wait',\n tags=tags,\n output=re.compile(u'(\\\\S|\\\\s)*hi\\nhi again\\n')),\n actual_summary)", "def wait_operation(\n self,\n request: Optional[operations_pb2.WaitOperationRequest] = None,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Union[float, object] = gapic_v1.method.DEFAULT,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> operations_pb2.Operation:\n # Create or coerce a protobuf request object.\n # The request isn't a proto-plus wrapped type,\n # so it must be constructed via keyword expansion.\n if isinstance(request, dict):\n request = operations_pb2.WaitOperationRequest(**request)\n\n # Wrap the RPC method; this adds retry and timeout information,\n # and friendly error handling.\n rpc = gapic_v1.method.wrap_method(\n self._transport.wait_operation,\n default_timeout=None,\n client_info=DEFAULT_CLIENT_INFO,\n )\n\n # Certain fields should be provided within the metadata header;\n # add these here.\n metadata = tuple(metadata) + (\n gapic_v1.routing_header.to_grpc_metadata(((\"name\", request.name),)),\n )\n\n # Send the request.\n response = rpc(\n request,\n retry=retry,\n timeout=timeout,\n metadata=metadata,\n )\n\n # Done; return the response.\n return response", "def WaitForOperation(self, operation_ref, message):\n operation_poller = poller.Poller(self.client.instances)\n return waiter.WaitFor(operation_poller, operation_ref, message)", "def wait_for_operation(cls, client, operation_id):\n operation = cls.get(client, operation_id)\n operation.wait()\n return cls.get(client, operation.id)", "async def create_leader_election( redises, resource, node, ttl ):\n return asyncleaderelection.LeaderElection( redises, resource, id = node, ttl = ttl )", "async def create_async(\n self,\n network_access_profile: str,\n unique_name: Union[str, object] = values.unset,\n data_enabled: Union[bool, object] = values.unset,\n data_limit: Union[int, object] = values.unset,\n ip_commands_url: Union[str, object] = values.unset,\n ip_commands_method: Union[str, object] = values.unset,\n sms_commands_enabled: Union[bool, object] = values.unset,\n sms_commands_url: Union[str, object] = values.unset,\n sms_commands_method: Union[str, object] = values.unset,\n ) -> FleetInstance:\n data = values.of(\n {\n \"NetworkAccessProfile\": network_access_profile,\n \"UniqueName\": unique_name,\n \"DataEnabled\": data_enabled,\n \"DataLimit\": data_limit,\n \"IpCommandsUrl\": ip_commands_url,\n \"IpCommandsMethod\": ip_commands_method,\n \"SmsCommandsEnabled\": sms_commands_enabled,\n \"SmsCommandsUrl\": sms_commands_url,\n \"SmsCommandsMethod\": sms_commands_method,\n }\n )\n\n payload = await self._version.create_async(\n method=\"POST\",\n uri=self._uri,\n data=data,\n )\n\n return FleetInstance(self._version, payload)", "def wait_for_operation(\n self,\n operation: dict,\n max_polls: int = MAX_POLLS,\n poll_interval: int = POLL_INTERVAL,\n ) -> dict:\n return None", "def new(\n cls,\n name: str,\n description: str,\n registration_schema: JSON,\n result_schema: JSON,\n database_session: Session) -> 'Service':\n raise NotImplementedError()", "def _create_soap_object(self, name):\n return self.client.factory.create(name)", "def create(self, resource, timeout=None):\n req = ResourceCreateRequest()\n\n if resource is not None:\n req.resource.CopyFrom(\n plumbing.convert_resource_to_plumbing(resource))\n tries = 0\n plumbing_response = None\n while True:\n try:\n plumbing_response = self.stub.Create(\n req,\n metadata=self.parent.get_metadata('Resources.Create', req),\n timeout=timeout)\n except Exception as e:\n if self.parent.shouldRetry(tries, e):\n tries += 1\n self.parent.jitterSleep(tries)\n continue\n raise plumbing.convert_error_to_porcelain(e) from e\n break\n\n resp = models.ResourceCreateResponse()\n resp.meta = plumbing.convert_create_response_metadata_to_porcelain(\n plumbing_response.meta)\n resp.resource = plumbing.convert_resource_to_porcelain(\n plumbing_response.resource)\n resp.rate_limit = plumbing.convert_rate_limit_metadata_to_porcelain(\n plumbing_response.rate_limit)\n return resp", "def create_instance(\n self, base_config: dict, labels: dict, wait_for_operation: bool = True\n ) -> Tuple[dict, str]:\n return", "def create_lock() -> Lock:\n return Lock()", "def _create_fake_operation(resource_link, verb, name):\n return {\n 'targetLink': resource_link,\n 'operationType': verb,\n 'name': name,\n 'status': 'DONE',\n 'progress': 100,\n }", "def _MakeCreateRequest(args, messages, resources, project,\n future_reservation_ref):\n future_reservation = util.MakeFutureReservationMessageFromArgs(\n messages, resources, args, future_reservation_ref)\n future_reservation.description = args.description\n future_reservation.namePrefix = args.name_prefix\n\n return messages.ComputeFutureReservationsInsertRequest(\n futureReservation=future_reservation,\n project=project,\n zone=future_reservation_ref.zone)", "def create_lock(self, resource, **kwargs):\n lock = DistLock(resource=resource, created_by_factory=True, **kwargs)\n lock.redis_nodes = self.redis_nodes\n lock.quorum = self.quorum\n lock.factory = self\n return lock", "def lock(self, name, timeout=None, sleep=0.1):\n return Lock(self, name, timeout=timeout, sleep=sleep)", "def _RunCreate(compute_api, args):\n resources = compute_api.resources\n future_reservation_ref = resource_args.GetFutureReservationResourceArg(\n ).ResolveAsResource(\n args,\n resources,\n scope_lister=compute_flags.GetDefaultScopeLister(compute_api.client))\n\n messages = compute_api.client.messages\n project = future_reservation_ref.project\n create_request = _MakeCreateRequest(args, messages, resources, project,\n future_reservation_ref)\n\n service = compute_api.client.apitools_client.futureReservations\n return compute_api.client.MakeRequests([(service, 'Insert', create_request)])", "def create_ble(device_name=None, serial_number=None,\n scan_timeout=None, loop=None):\n impl = JadeInterface.create_ble(device_name, serial_number,\n scan_timeout, loop)\n return JadeAPI(impl)", "def WaitForOperation(self, operation_ref):\n return waiter.WaitFor(\n waiter.CloudOperationPollerNoResources(\n self.client.projects_locations_operations\n ),\n operation_ref,\n 'Waiting for [{0}] to finish'.format(operation_ref.Name()),\n )", "async def wait(self) -> None:\n await checkpoint()\n event = create_event()\n self._waiters.append(event)\n self.release()\n try:\n await event.wait()\n except BaseException:\n if not event.is_set():\n self._waiters.remove(event)\n\n raise\n finally:\n with open_cancel_scope(shield=True):\n await self.acquire()", "def _new_worker(\n self,\n work: WorkType,\n node: DOMNode,\n *,\n name: str | None = \"\",\n group: str = \"default\",\n description: str = \"\",\n exit_on_error: bool = True,\n start: bool = True,\n exclusive: bool = False,\n thread: bool = False,\n ) -> Worker:\n worker: Worker[Any] = Worker(\n node,\n work,\n name=name or getattr(work, \"__name__\", \"\") or \"\",\n group=group,\n description=description or repr(work),\n exit_on_error=exit_on_error,\n thread=thread,\n )\n self.add_worker(worker, start=start, exclusive=exclusive)\n return worker", "def job_create(self, sender, name=None):\n self._require_running()\n name = name or self.DEFAULT_JOB_NAME\n job_id = uuid.uuid4().hex\n assert job_id not in self._jobs\n assert sender is not None\n assert sender.connection\n job = Job(\n job_id,\n name,\n self._session_root.joinpath(job_id),\n sender,\n self._loop\n )\n self._jobs[job_id] = job\n self._jobs_by_connection[sender.connection][job_id] = job\n self._log.debug('Created job %s', job)\n return job_id", "def create_service(self, service_name, *args, **kwargs):\n\n creator = self._service_creators.get(service_name, None)\n\n if creator is None:\n return None\n\n return creator(*args, **kwargs)", "def create(self):\n\n # Validate Inputs\n create_dict = {\n \"model_id\": self.model.id,\n }\n\n try:\n # Create Task\n self.spinner.start()\n task_obj = self.dal.task.create(Task(create_dict))\n finally:\n self.spinner.stop()\n return task_obj", "def WaitForOperation(self, operation_ref):\n return waiter.WaitFor(\n waiter.CloudOperationPollerNoResources(\n self.client.projects_locations_operations), operation_ref,\n 'Waiting for [{0}] to finish'.format(operation_ref.Name()))", "def create(self, name, *args, **kwargs):\n if self.can_create(name):\n return self._recipes[name](*args, **kwargs)\n\n return None", "def get_waiter(config_id: Optional[str] = None,\n project: Optional[str] = None,\n waiter_id: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWaiterResult:\n __args__ = dict()\n __args__['configId'] = config_id\n __args__['project'] = project\n __args__['waiterId'] = waiter_id\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('google-native:runtimeconfig/v1beta1:getWaiter', __args__, opts=opts, typ=GetWaiterResult).value\n\n return AwaitableGetWaiterResult(\n create_time=pulumi.get(__ret__, 'create_time'),\n done=pulumi.get(__ret__, 'done'),\n error=pulumi.get(__ret__, 'error'),\n failure=pulumi.get(__ret__, 'failure'),\n name=pulumi.get(__ret__, 'name'),\n success=pulumi.get(__ret__, 'success'),\n timeout=pulumi.get(__ret__, 'timeout'))", "def create(ctx, iface, resource_config, params, **_):\n\n lb_name = params.get(LB_NAME)\n if not lb_name:\n targs = \\\n utils.find_rels_by_node_type(\n ctx.instance,\n LB_TYPE)\n lb_name = \\\n targs[0].target.instance.runtime_properties[\n EXTERNAL_RESOURCE_ID]\n params.update({LB_NAME: lb_name})\n\n ctx.instance.runtime_properties[LB_NAME] = \\\n lb_name\n\n # Actually create the resource\n iface.create(params)", "def bdev_rbd_create(client, pool_name, rbd_name, block_size, name=None, user=None, config=None, cluster_name=None, uuid=None):\n params = {\n 'pool_name': pool_name,\n 'rbd_name': rbd_name,\n 'block_size': block_size,\n }\n\n if name:\n params['name'] = name\n if user is not None:\n params['user_id'] = user\n if config is not None:\n params['config'] = config\n if cluster_name is not None:\n params['cluster_name'] = cluster_name\n else:\n print(\"WARNING:bdev_rbd_create should be used with specifying -c to have a cluster name after bdev_rbd_register_cluster.\")\n if uuid is not None:\n params['uuid'] = uuid\n\n return client.call('bdev_rbd_create', params)", "def create(self, callback=None):\n\n parms = [{'budget': self.budget,\n 'deployment': {'deploymentId': self.deployment},\n 'description': self.description,\n 'name': self.name,\n 'minimumServers': self.minimum_servers,\n 'maximumServers': self.maximum_servers,\n 'breachIncrement': self.breach_increment,\n 'breachPeriodInMinutes': self.breach_period_in_minutes,\n 'cooldownPeriodInMinutes': self.cooldown_period_in_minutes,\n 'lowerCpuThreshold': self.lower_cpu_threshold,\n 'upperCpuThreshold': self.upper_cpu_threshold,\n 'lowerRamThreshold': self.lower_ram_threshold,\n 'upperRamThreshold': self.upper_ram_threshold}]\n\n payload = {'addTier':camel_keys(parms)}\n\n response=self.post(data=json.dumps(payload))\n if self.last_error is None:\n self.load()\n return response\n else:\n raise TierCreationException(self.last_error)", "def __await__(self):\n return self.waiter.__await__()", "def __await__(self):\n return self.waiter.__await__()", "def _create_counter(self, name):\n otel_safe_name = _get_otel_safe_name(name)\n\n if _is_up_down_counter(name):\n counter = self.meter.create_up_down_counter(name=otel_safe_name)\n else:\n counter = self.meter.create_counter(name=otel_safe_name)\n\n logging.debug(\"Created %s as type: %s\", otel_safe_name, _type_as_str(counter))\n return counter", "def __init__(__self__, resource_name, opts=None, allocated_capacity=None, command=None, connections=None, default_arguments=None, description=None, execution_property=None, glue_version=None, max_capacity=None, max_retries=None, name=None, number_of_workers=None, role_arn=None, security_configuration=None, tags=None, timeout=None, worker_type=None, __props__=None, __name__=None, __opts__=None):\n if __name__ is not None:\n warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning)\n resource_name = __name__\n if __opts__ is not None:\n warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning)\n opts = __opts__\n if opts is None:\n opts = pulumi.ResourceOptions()\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError('Expected resource options to be a ResourceOptions instance')\n if opts.version is None:\n opts.version = utilities.get_version()\n if opts.id is None:\n if __props__ is not None:\n raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')\n __props__ = dict()\n\n __props__['allocated_capacity'] = allocated_capacity\n if command is None:\n raise TypeError(\"Missing required property 'command'\")\n __props__['command'] = command\n __props__['connections'] = connections\n __props__['default_arguments'] = default_arguments\n __props__['description'] = description\n __props__['execution_property'] = execution_property\n __props__['glue_version'] = glue_version\n __props__['max_capacity'] = max_capacity\n __props__['max_retries'] = max_retries\n __props__['name'] = name\n __props__['number_of_workers'] = number_of_workers\n if role_arn is None:\n raise TypeError(\"Missing required property 'role_arn'\")\n __props__['role_arn'] = role_arn\n __props__['security_configuration'] = security_configuration\n __props__['tags'] = tags\n __props__['timeout'] = timeout\n __props__['worker_type'] = worker_type\n __props__['arn'] = None\n super(Job, __self__).__init__(\n 'aws:glue/job:Job',\n resource_name,\n __props__,\n opts)", "def get_named_lock(self, name):\r\n # Global critical section\r\n self._named_locks_lock.acquire()\r\n if not name in self._named_locks:\r\n self._named_locks[name] = BoundedSemaphore()\r\n self._named_locks_lock.release()\r\n # End global critical section\r\n\r\n self.log.debug(\"Grabbing named lock (%s)\" % name)\r\n self._named_locks[name].acquire()\r\n self.log.debug(\"Got named lock (%s)\" % name)", "async def make(epoller: Epoller, ram: RAM, fd: FileDescriptor) -> AsyncFileDescriptor:\n status = FDStatus(EPOLL.NONE)\n epolled = await epoller.register(\n fd, EPOLL.IN|EPOLL.OUT|EPOLL.RDHUP|EPOLL.PRI|EPOLL.ERR|EPOLL.HUP|EPOLL.ET,\n status.posedge,\n )\n return AsyncFileDescriptor(ram, fd, status, epolled)", "def _create_resource_provider(self, uuid, name):\n url = \"/resource_providers\"\n payload = {\n 'uuid': uuid,\n 'name': name,\n }\n resp = self.post(url, payload)\n if resp.status_code == 201:\n msg = _LI(\"Created resource provider record via placement API \"\n \"for resource provider with UUID {0} and name {1}.\")\n msg = msg.format(uuid, name)\n LOG.info(msg)\n return objects.ResourceProvider(\n uuid=uuid,\n name=name,\n generation=1,\n )\n elif resp.status_code == 409:\n # Another thread concurrently created a resource provider with the\n # same UUID. Log a warning and then just return the resource\n # provider object from _get_resource_provider()\n msg = _LI(\"Another thread already created a resource provider \"\n \"with the UUID {0}. Grabbing that record from \"\n \"the placement API.\")\n msg = msg.format(uuid)\n LOG.info(msg)\n return self._get_resource_provider(uuid)\n else:\n msg = _LE(\"Failed to create resource provider record in \"\n \"placement API for UUID %(uuid)s. \"\n \"Got %(status_code)d: %(err_text)s.\")\n args = {\n 'uuid': uuid,\n 'status_code': resp.status_code,\n 'err_text': resp.text,\n }\n LOG.error(msg, args)", "def new_workunit(self, name, labels=None, cmd=''):\r\n parent = self._threadlocal.current_workunit\r\n with self.new_workunit_under_parent(name, parent=parent, labels=labels, cmd=cmd) as workunit:\r\n self._threadlocal.current_workunit = workunit\r\n try:\r\n yield workunit\r\n finally:\r\n self._threadlocal.current_workunit = parent", "def name_create(self, name):\n values = {\n 'name': name,\n }\n return self.create(values).name_get()[0]", "def create(self, name, *args, **kwargs):\n resource_name = self._resource_name(name)\n log.info(\n \"Creating {} '{}'...\".format(self._model_name, resource_name))\n resource = self.collection.create(*args, name=resource_name, **kwargs)\n self._ids.add(resource.id)\n return resource", "def create_ble(device_name=None, serial_number=None,\n scan_timeout=None, loop=None):\n this_module = sys.modules[__name__]\n if not hasattr(this_module, \"JadeBleImpl\"):\n raise JadeError(1, \"BLE support not installed\", None)\n\n impl = JadeBleImpl(device_name or DEFAULT_BLE_DEVICE_NAME,\n serial_number or DEFAULT_BLE_SERIAL_NUMBER,\n scan_timeout or DEFAULT_BLE_SCAN_TIMEOUT,\n loop=loop)\n return JadeInterface(impl)", "def create_lock(self, lock_name):\n path = '/locks/create/%s' % lock_name\n response = self.rest.request(method='post',\n content_type='text/plain', path=path)\n return response.text", "def cli_wait_for(parser):\n subparser = argparse.ArgumentParser(description='Wait for a job',\n parents=[parser])\n subparser.add_argument('-number', required=True, type=int,\n default=None, help='Job number')\n subparser.add_argument('-name', type=str, default=None, help='Job name')\n args = subparser.parse_args()\n\n if not args.name and not args.number:\n print(\"Argument Error: -name or -number is required\")\n subparser.print_help()\n sys.exit(1)\n elif args.name and args.number:\n print(\"Argument Error: Only one of -name and -number can be input\")\n subparser.print_help()\n sys.exit(1)\n\n kwargs = dict({\n 'number': None,\n 'name': None})\n if args.name:\n kwargs.update({\n 'name': args.name})\n if args.number:\n kwargs.update({\n 'number': args.number})\n\n utils.wait_for(config['username'], config['apikey'], args.apiurl, **kwargs)", "def bdev_delay_create(client, base_bdev_name, name, avg_read_latency, p99_read_latency, avg_write_latency, p99_write_latency, uuid=None):\n params = {\n 'base_bdev_name': base_bdev_name,\n 'name': name,\n 'avg_read_latency': avg_read_latency,\n 'p99_read_latency': p99_read_latency,\n 'avg_write_latency': avg_write_latency,\n 'p99_write_latency': p99_write_latency,\n }\n if uuid:\n params['uuid'] = uuid\n return client.call('bdev_delay_create', params)", "def create(self):\n o = self._create_impl()\n self.logger.debug(f\"created {o}\")\n self._notify(o)", "async def create_async(\n self,\n friendly_name: str,\n configuration: str,\n assignment_callback_url: Union[str, object] = values.unset,\n fallback_assignment_callback_url: Union[str, object] = values.unset,\n task_reservation_timeout: Union[int, object] = values.unset,\n ) -> WorkflowInstance:\n data = values.of(\n {\n \"FriendlyName\": friendly_name,\n \"Configuration\": configuration,\n \"AssignmentCallbackUrl\": assignment_callback_url,\n \"FallbackAssignmentCallbackUrl\": fallback_assignment_callback_url,\n \"TaskReservationTimeout\": task_reservation_timeout,\n }\n )\n\n payload = await self._version.create_async(\n method=\"POST\",\n uri=self._uri,\n data=data,\n )\n\n return WorkflowInstance(\n self._version, payload, workspace_sid=self._solution[\"workspace_sid\"]\n )", "def create_pool(self,\n instance_id: str,\n *,\n name: str = None,\n origins: List['OriginInput'] = None,\n description: str = None,\n enabled: bool = None,\n healthy_origins_threshold: int = None,\n monitor: str = None,\n notification_channel: str = None,\n healthcheck_region: str = None,\n healthcheck_subnets: List[str] = None,\n x_correlation_id: str = None,\n **kwargs\n ) -> DetailedResponse:\n\n if instance_id is None:\n raise ValueError('instance_id must be provided')\n if origins is not None:\n origins = [convert_model(x) for x in origins]\n headers = {\n 'X-Correlation-ID': x_correlation_id\n }\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='create_pool')\n headers.update(sdk_headers)\n\n data = {\n 'name': name,\n 'origins': origins,\n 'description': description,\n 'enabled': enabled,\n 'healthy_origins_threshold': healthy_origins_threshold,\n 'monitor': monitor,\n 'notification_channel': notification_channel,\n 'healthcheck_region': healthcheck_region,\n 'healthcheck_subnets': healthcheck_subnets\n }\n data = {k: v for (k, v) in data.items() if v is not None}\n data = json.dumps(data)\n headers['content-type'] = 'application/json'\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n headers['Accept'] = 'application/json'\n\n url = '/instances/{0}/pools'.format(\n *self.encode_path_vars(instance_id))\n request = self.prepare_request(method='POST',\n url=url,\n headers=headers,\n data=data)\n\n response = self.send(request)\n return response", "def create(self, resource, **data):\n body = ''\n if resource == 'robot/job':\n body = data['body']\n else:\n body = urllib.urlencode(data)\n\n return self.request('/' + resource, 'POST', body=body)", "def create_task(self, name, value):\n pass", "def create(self, name=None, description=None):\n uri = URITemplate(self.baseuri + '/{owner}').expand(\n owner=self.username)\n return self.session.post(uri, json=self._attribs(name, description))", "def new_workunit(self, name, labels=None, cmd=''):\r\n with self.run_tracker.new_workunit(name=name, labels=labels, cmd=cmd) as workunit:\r\n yield workunit", "def new(name, template, version):\n NewCommandExecutor().new(name, template, version)", "def newTask(name, description, assigner, id=None, priority=None, submitter_email=None, whose=None):\n if whose:\n user_id = jutdaapi.find_user(whose)\n if not user_id:\n raise ValueError('bad whose assignment: '+str(whose))\n #title = name + ' for: '+assigner.title()\n # that was the old scheme\n title = '('+assigner.title()+') '+name\n\n if priority != None:\n #priority = (int(priority) + 2) / 2\n priority = int(priority)\n RA_queue = 3\n #if assigner != 'no one':\n # description += '<tasktrackermeta assigner=\"'+assigner+'\"/>'\n if isinstance(id, str):\n description += '<tasktrackermeta id=\"'+id+'\"/>'\n ticket_id = jutdaapi.create_ticket(RA_queue, title, description,\n priority=priority, submitter_email=submitter_email)\n # Is there a race condition here? In this kind of database\n # I would assume not.\n time.sleep(1)\n ticket = jutdaapi.get_detailed_ticket(ticket_id)\n t = ticketToTask(ticket)\n return t", "async def create_span(\n self,\n request: Optional[Union[trace.Span, dict]] = None,\n *,\n retry: OptionalRetry = gapic_v1.method.DEFAULT,\n timeout: Union[float, object] = gapic_v1.method.DEFAULT,\n metadata: Sequence[Tuple[str, str]] = (),\n ) -> trace.Span:\n # Create or coerce a protobuf request object.\n request = trace.Span(request)\n\n # Wrap the RPC method; this adds retry and timeout information,\n # and friendly error handling.\n rpc = gapic_v1.method_async.wrap_method(\n self._client._transport.create_span,\n default_retry=retries.Retry(\n initial=0.1,\n maximum=1.0,\n multiplier=1.2,\n predicate=retries.if_exception_type(\n core_exceptions.DeadlineExceeded,\n core_exceptions.ServiceUnavailable,\n ),\n deadline=120.0,\n ),\n default_timeout=120.0,\n client_info=DEFAULT_CLIENT_INFO,\n )\n\n # Certain fields should be provided within the metadata header;\n # add these here.\n metadata = tuple(metadata) + (\n gapic_v1.routing_header.to_grpc_metadata(((\"name\", request.name),)),\n )\n\n # Send the request.\n response = await rpc(\n request,\n retry=retry,\n timeout=timeout,\n metadata=metadata,\n )\n\n # Done; return the response.\n return response", "def create(args):\n print('Creates an HPC fleet with given name \"{}\"'.format(args.fleet_name))", "def create(self, name):\n success, response = self._client_api.gen_request(req_type='post',\n path='/projects/{}/bots'.format(self.project.id),\n json_req={'name': name})\n if success:\n bot = entities.Bot.from_json(_json=response.json(),\n project=self.project,\n bots=self, client_api=self._client_api)\n else:\n raise exceptions.PlatformException(response)\n assert isinstance(bot, entities.Bot)\n return bot", "def __init__(self, cb: CircuitBreaker, name: str) -> None:\n self._breaker: CircuitBreaker = cb\n self._name: str = name", "def _create_gauge(self, name: str, attributes: Attributes = None):\n otel_safe_name = _get_otel_safe_name(name)\n key = _generate_key_name(name, attributes)\n\n gauge = self.meter.create_observable_gauge(\n name=otel_safe_name,\n callbacks=[partial(self.read_gauge, _generate_key_name(name, attributes))],\n )\n self.map[key] = Observation(DEFAULT_GAUGE_VALUE, attributes)\n\n return gauge", "def create_service(service, version, creds=None):\n # Instantiate an Http instance\n http = httplib2.Http()\n\n if creds:\n # Authorize the Http instance with the passed credentials\n creds.authorize(http)\n\n return build(service, version, http=http)", "def create(self, resource_name, data_dict):\n try:\n resource_cls = getattr(self, resource_name).resource\n except AttributeError:\n raise AttributeError(\"No resource named %s is defined.\" % resource_name)\n\n return resource_cls.create(self, data_dict)", "def create_kernel(name: str) -> str:\n ...", "def bdev_aio_create(client, filename, name, block_size=None, readonly=False):\n params = {'name': name,\n 'filename': filename}\n\n if block_size:\n params['block_size'] = block_size\n\n if readonly:\n params['readonly'] = readonly\n\n return client.call('bdev_aio_create', params)", "def create_label(self, name: str):\n return create_label(self.api_key, name)", "def create(self, object_name):\n return self.client.factory.create(object_name)", "def add_loading_spinner(\n self,\n name: str,\n width: int = 16,\n height: int = 16,\n stroke_width: int = 2,\n color: Optional[str] = None,\n tooltip: Optional[str] = None,\n value: Optional[float] = None,\n ): # noqa: E501\n pr = flet.ProgressRing(\n width=width,\n height=height,\n stroke_width=stroke_width,\n color=color,\n tooltip=tooltip,\n value=value,\n )\n self._client.add_element(pr, name)\n return pr", "def get(resource_name, id, opts=None, cluster=None, deployment_controller=None, deployment_maximum_percent=None, deployment_minimum_healthy_percent=None, desired_count=None, enable_ecs_managed_tags=None, health_check_grace_period_seconds=None, iam_role=None, launch_type=None, load_balancers=None, name=None, network_configuration=None, ordered_placement_strategies=None, placement_constraints=None, platform_version=None, propagate_tags=None, scheduling_strategy=None, service_registries=None, tags=None, task_definition=None, wait_for_steady_state=None):\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n __props__[\"cluster\"] = cluster\n __props__[\"deployment_controller\"] = deployment_controller\n __props__[\"deployment_maximum_percent\"] = deployment_maximum_percent\n __props__[\"deployment_minimum_healthy_percent\"] = deployment_minimum_healthy_percent\n __props__[\"desired_count\"] = desired_count\n __props__[\"enable_ecs_managed_tags\"] = enable_ecs_managed_tags\n __props__[\"health_check_grace_period_seconds\"] = health_check_grace_period_seconds\n __props__[\"iam_role\"] = iam_role\n __props__[\"launch_type\"] = launch_type\n __props__[\"load_balancers\"] = load_balancers\n __props__[\"name\"] = name\n __props__[\"network_configuration\"] = network_configuration\n __props__[\"ordered_placement_strategies\"] = ordered_placement_strategies\n __props__[\"placement_constraints\"] = placement_constraints\n __props__[\"platform_version\"] = platform_version\n __props__[\"propagate_tags\"] = propagate_tags\n __props__[\"scheduling_strategy\"] = scheduling_strategy\n __props__[\"service_registries\"] = service_registries\n __props__[\"tags\"] = tags\n __props__[\"task_definition\"] = task_definition\n __props__[\"wait_for_steady_state\"] = wait_for_steady_state\n return Service(resource_name, opts=opts, __props__=__props__)", "def create(cls, task_name, cfd_mesh):\n if task_name not in cls._available_tasks:\n raise KeyError(\"Invalid task name: %s\"%task_name)\n tcls = cls._available_tasks[task_name]\n obj = tcls(cfd_mesh)\n return obj", "def create_wait_timer(cls,\n validator_address,\n certificates):\n\n local_mean = cls.compute_local_mean(certificates)\n previous_certificate_id = \\\n certificates[-1].identifier if certificates else NullIdentifier\n\n # Create an enclave timer object and then use it to create a\n # WaitTimer object\n enclave_timer = \\\n cls.poet_enclave.create_wait_timer(\n validator_address=validator_address,\n previous_certificate_id=previous_certificate_id,\n local_mean=local_mean,\n minimum_wait_time=cls.minimum_wait_time)\n timer = cls(enclave_timer)\n\n LOGGER.info('wait timer created; %s', timer)\n\n return timer", "def create_service():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file('credentials.json'\n , SCOPES)\n creds = flow.run_local_server(port=9797)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n service = build('drive', 'v3', credentials=creds)\n return service", "def acquire(self, timeout=None):\r\n if timeout is None:\r\n resource = self._resources.get()\r\n else:\r\n if isinstance(timeout, Amount):\r\n timeout = timeout.as_(Time.SECONDS)\r\n resource = self._resources.get(True, timeout)\r\n return Resource(self, resource)", "def CreateComponent(self, name, state):\n component = SimpleTestClientComponent()\n component.__dict__.update(state)\n return component", "def __createLock(self):\n lockUrl = self.metaData.getLink(\"lock\")\n assert lockUrl is not None\n\n lockBody = json.dumps({\"lockIntent\" : \"lockedForEdit\"})\n header = self._baseHeader.copy()\n header['Content-type'] = \"application/vnd.huddle.data+json\"\n lockResponse = self._adapter.postRequest(lockUrl, header, lockBody)\n\n return lockResponse", "def create_sync(self, *args, **kwargs):\n\n check = self.create(*args, **kwargs)\n\n check_id = check.response['check']['id']\n progress = check.response['check']['progress']\n\n while progress < 1:\n progress_resp = self.track_progress(check_id)\n progress = progress_resp.response['progress'][str(check_id)]\n sleep(5)\n\n return self.get(check_id)", "def make_waitable(self):\n if not self.is_waitable():\n self._condition = threading.Condition()", "def create_resource(\n service_name: str, config_name: str = None, **resource_args\n):\n session = get_session(config_name)\n return session.resource(service_name, **resource_args)", "def client(self,\n name,\n method=None,\n url=None,\n status_callback_event=None,\n status_callback_method=None,\n status_callback=None,\n **kwargs):\n return self.append(Client(\n name,\n method=method,\n url=url,\n status_callback_event=status_callback_event,\n status_callback_method=status_callback_method,\n status_callback=status_callback,\n **kwargs\n ))", "def create_feature(\n self,\n ) -> Callable[\n [featurestore_service.CreateFeatureRequest], Awaitable[operations_pb2.Operation]\n ]:\n # Generate a \"stub function\" on-the-fly which will actually make\n # the request.\n # gRPC handles serialization and deserialization, so we just need\n # to pass in the functions for each.\n if \"create_feature\" not in self._stubs:\n self._stubs[\"create_feature\"] = self.grpc_channel.unary_unary(\n \"/google.cloud.aiplatform.v1beta1.FeaturestoreService/CreateFeature\",\n request_serializer=featurestore_service.CreateFeatureRequest.serialize,\n response_deserializer=operations_pb2.Operation.FromString,\n )\n return self._stubs[\"create_feature\"]", "def create_pumper():\n return _Kalamazoo()", "def create_service(cls, proto_py_module, service_name):\n\n return cls.create_services(proto_py_module, service_name)", "def create_pool(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n pool = conn.load_balancer.create_pool(\n protocol=data['pool']['protocol'],\n lb_algorithm=data['pool']['lb_algorithm'],\n session_persistence=data['pool'].get('session_persistence'),\n listener_id=kwargs['listener_id'],\n loadbalancer_id=kwargs['loadbalancer_id'],\n name=data['pool'].get('name'),\n description=data['pool'].get('description'),\n admin_state_up=data['pool'].get('admin_state_up'),\n tls_enabled=data['pool'].get('tls_enabled'),\n # Replace empty string by None (uses default tls cipher string)\n tls_ciphers=data['pool'].get('tls_ciphers') or None,\n )\n\n if data.get('members'):\n args = (request, kwargs['loadbalancer_id'], add_member)\n kwargs = {'callback_kwargs': {'pool_id': pool.id,\n 'index': 0}}\n thread.start_new_thread(poll_loadbalancer_status, args, kwargs)\n elif data.get('monitor'):\n args = (request, kwargs['loadbalancer_id'], create_health_monitor)\n kwargs = {'callback_kwargs': {'pool_id': pool.id}}\n thread.start_new_thread(poll_loadbalancer_status, args, kwargs)\n\n return _get_sdk_object_dict(pool)", "def create_resource():\n return wsgi.Resource(WorkersController())" ]
[ "0.639531", "0.639531", "0.639531", "0.61406636", "0.5927985", "0.5927985", "0.5927985", "0.5927985", "0.5927985", "0.5927985", "0.5927985", "0.5927985", "0.5927985", "0.5927985", "0.5927985", "0.5927985", "0.573098", "0.5721729", "0.55530214", "0.5279856", "0.5229172", "0.5131282", "0.4804883", "0.47770038", "0.47526258", "0.47498882", "0.47398913", "0.47255245", "0.4693124", "0.46864077", "0.46736303", "0.46533248", "0.46416363", "0.4638941", "0.46197423", "0.46191815", "0.46030858", "0.4596107", "0.45907202", "0.45901987", "0.45762482", "0.45747516", "0.45609036", "0.45603177", "0.455279", "0.45347962", "0.45235828", "0.45215327", "0.44887665", "0.4484553", "0.4484553", "0.4456451", "0.4452885", "0.44473433", "0.443637", "0.44244388", "0.44140327", "0.44139427", "0.44087282", "0.44071925", "0.44061193", "0.4399021", "0.43937117", "0.43921584", "0.4386512", "0.43839452", "0.43773666", "0.43723407", "0.4369031", "0.43674484", "0.43653", "0.43635726", "0.4337764", "0.4332337", "0.43310824", "0.4327361", "0.4326378", "0.4323562", "0.43149415", "0.43080306", "0.4306011", "0.42960614", "0.42912024", "0.42902067", "0.4288488", "0.4287352", "0.4286307", "0.4270241", "0.42681152", "0.42653835", "0.42631033", "0.42604452", "0.4258309", "0.42567888", "0.42566887", "0.42561314", "0.42546564", "0.42531762", "0.4249609", "0.42488882" ]
0.57157534
18
Deletes the waiter with the specified name.
def DeleteWaiter(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete(self, name):\n with self._lock:\n self._delete(name)", "def delete(self, name):\n\n pass", "def delete(self, name=None):\n raise NotImplementedError", "def delete(self, name):\n self.connect()\n self._write('DEL %s\\r\\n' % name)\n return self._get_numeric_response()", "def delete(self, name):\n self.backend.delete(name)", "def delete(self, name):\n try:\n self.container.delete_object(name)\n except ResponseError as e:\n reraise(e)", "def delete_file(self, name):\n del self.files[name]", "def remove(self, name):\n path = '%s/%s' % (self.path, name)\n lock = '%s%s' % (path, LOCKED_SUFFIX)\n os.unlink(path)\n os.unlink(lock)", "def bdev_rbd_delete(client, name):\n params = {'name': name}\n return client.call('bdev_rbd_delete', params)", "def delete_lock(self, lock_name):\n path = '/locks/delete/%s' % lock_name\n response = self.rest.request(content_type='text/plain',\n method='delete', path=path)\n return response.ok", "def delete(cls, name):\n\n secret = cls.get_instance(name)\n secret.delete_instance(recursive=True)", "def delete_task(self, name):\n with self.db_lock:\n return self.rcon.hdel(self.task_key, name)", "def delete_file(name):\n subprocess.check_output(cmd_preamble + [\"rm\", name])", "def delete(self, name):\n assert name, \"Must input a valid dataset name.\"\n self.manager.delete_data(name)", "def delete_task(self, name):\n return self.DeleteTask(name, 0)", "def delete(self, block, name):\n self._kvs.delete(self._key(block, name))", "def bdev_delay_delete(client, name):\n params = {'name': name}\n return client.call('bdev_delay_delete', params)", "def bdev_aio_delete(client, name):\n params = {'name': name}\n return client.call('bdev_aio_delete', params)", "def delete(self, *, name: types.TSeedName) -> None:\n if not (self._base_path / self._get_file_name(name)).exists():\n raise exceptions.SeedNotFoundError(f\"could not find seed {name}\")\n (self._base_path / self._get_file_name(name)).unlink()", "def rm(self, name: str) -> None:\n path = self.get_path(name)\n if os.path.exists(path):\n os.remove(path)", "def deleteUser(self,name):\n raise BorkedDeleteUser", "def _del(self, name):\n raise NotImplementedError", "def delete_node(self, name):\n\n name = self._validate_name(name)\n if name in self.nodes:\n del self.nodes[name]", "def delete_fleet(Name=None):\n pass", "def destroy(self, name):\n self._assert_space()\n\n service_instance = self._get_service_instance(name)\n if service_instance:\n lastop = service_instance.last_operation\n if 'delete' == lastop['type']:\n return service_instance\n return self._cc \\\n .service_instances(service_instance.guid) \\\n .set_query(accepts_incomplete='true') \\\n .delete()\n return None", "def bdev_uring_delete(client, name):\n params = {'name': name}\n return client.call('bdev_uring_delete', params)", "def delete(self, name, project=None):\n qlist = self._list(project)\n key = self._queue(project, name)\n self._db.delete(key)\n self._db.zremrangebyscore(qlist, -1, 1)", "def delete_bucket(self, name):\n return", "def delete_entry(self, name):\n try:\n self.__datacatalog.delete_entry(name=name)\n self.__log_entry_operation('deleted', entry_name=name)\n except Exception as e:\n logging.info(\n 'An exception ocurred while attempting to'\n ' delete Entry: %s', name)\n logging.debug(str(e))", "def delete_committer(self, name: str) -> None:\n for index, committer in enumerate(self._info[\"committers\"]):\n if committer[\"name\"] == name:\n del self._info[\"committers\"][index]\n return\n raise ValueError(f\"Committer {name} is not on the committer list\")", "def bdev_iscsi_delete(client, name):\n params = {'name': name}\n return client.call('bdev_iscsi_delete', params)", "def delete(self, name):\n err = C.git_remote_delete(self._repo._repo, to_bytes(name))\n check_error(err)", "def _delete_host_by_name(self, name):\n host_rs = self.client.search(\"hosts\", name=name)\n if hasattr(host_rs, \"hits\") and host_rs.total != 0:\n host = host_rs.hits[0]\n host.delete()", "def ex_destroy_storage_service(self, name):\n\n response = self._perform_storage_service_delete(self._get_storage_service_path(name))\n self.raise_for_response(response, 200)\n\n return True", "def delete(self, name):\n global items\n items = _Helper.all_item_except_searching_for(name)\n return {\"message\": f\"Item {name} deleted successfully\"}, 204", "def delete(task_name):\n tasks.delete_one({'name': task_name})", "def get_waiter(self, waiter_name: str = None) -> Waiter:\n pass", "def get_waiter(self, waiter_name: str = None) -> Waiter:\n pass", "def get_waiter(self, waiter_name: str = None) -> Waiter:\n pass", "def delete(self, wait=False):\n \n self.client.delete(self.path)\n helpers.maybe_wait_until_deleted(self, wait, self.path)\n \n return True", "def delete_bookmark(self, name):\n eh = SimpleErrorHandler()\n \n self._client.execute('bookmark', name, d=True, eh=eh)\n\n return bool(eh)", "def bdev_xnvme_delete(client, name):\n params = {'name': name}\n return client.call('bdev_xnvme_delete', params)", "def key_delete(self, name=None):\n raise NotImplementedError", "def bdev_crypto_delete(client, name):\n params = {'name': name}\n return client.call('bdev_crypto_delete', params)", "def delete(self):\n self.manager.delete(self.name)", "def delete(self):\n self.manager.delete(self.name)", "def remove(self, name: str) -> None:\n del self.components[name]", "def delete(self, name):\n path = self.directory / f\"{name}.yaml\"\n if path.exists():\n path.unlink()", "def remove(self, name):\n raise NotImplementedError", "def delete_user(self, instance, name):\n return instance.delete_user(name)", "def delete(self, name):\n if name in self._dict:\n self._dict.pop(name)\n self.save()\n else:\n raise PoseError(\"%s is not in database\" % _name)", "def delete(id, name):\n kargs={'host': c.cfg['host'], \"api_version\": c.cfg['api_version'], \"url_path\": \"/tasks\"}\n\n if name != None:\n click.echo(\"remove task by name is not supported yet\")\n sys.exit(1)\n try:\n dict_resp= estask.Task(kargs).delete_task_by_id(id)\n except Exception as e:\n sys.exit(\"failed to delete task: %s\" %str(e))\n\n if 'status' in dict_resp and dict_resp['status'].lower() != 'success':\n sys.exit(\"Fail: %s\"%str(dict_resp))\n\n try:\n click.echo(\"Success: %s\" %(str(dict_resp[\"message\"])))\n except Exception as e:\n sys.exit(\"Fail: %s %s\" %(str(e), str(dict_resp)))", "def bdev_daos_delete(client, name):\n params = {'name': name}\n return client.call('bdev_daos_delete', params)", "def delete_table(self, name: str) -> None:", "def vm_diskdelete(args):\n name = args.name\n diskname = args.diskname\n pool = args.pool\n config = Kconfig(client=args.client, debug=args.debug, region=args.region, zone=args.zone, namespace=args.namespace)\n k = config.k\n if diskname is None:\n common.pprint(\"Missing diskname. Leaving...\", color='red')\n os._exit(1)\n common.pprint(\"Deleting disk %s\" % diskname)\n k.delete_disk(name=name, diskname=diskname, pool=pool)\n return", "def remove(name):", "def delete_segment(self, name: str) -> None:\n self._status.check_authority_for_draft()\n\n delete_data: Dict[str, Any] = {\"segmentName\": name}\n delete_data.update(self._status.get_status_info())\n\n self._client.open_api_do(\"DELETE\", \"segments\", self.dataset_id, json=delete_data)", "def delete_instance(self, instance_name, check=True):\n page_instances = self.page_instances()\n\n with page_instances.table_instances.row(\n name=instance_name).dropdown_menu as menu:\n menu.button_toggle.click()\n menu.item_delete.click()\n\n page_instances.form_confirm.submit()\n\n if check:\n self.close_notification('success')\n page_instances.table_instances.row(\n name=instance_name).wait_for_absence(EVENT_TIMEOUT)", "def delete(self, name):\n raise NotImplementedError(\n \"subclasses of Storage must provide a delete() method\"\n )", "def bdev_zone_block_delete(client, name):\n params = {'name': name}\n return client.call('bdev_zone_block_delete', params)", "def bdev_malloc_delete(client, name):\n params = {'name': name}\n return client.call('bdev_malloc_delete', params)", "def bdev_error_delete(client, name):\n params = {'name': name}\n return client.call('bdev_error_delete', params)", "def remove_resource(self, name):\n self._NDL_API('removeresource', { 'vm': name, }, None)", "def delete_credential(name: str):\n # first load any existing credentials\n try:\n creds = load_auth()\n except FileNotFoundError:\n # if no auth file exists we can just treat that as there being no credentials\n creds = []\n\n if '@' in name:\n username, hostname = name.split('@')\n else:\n username = name\n hostname = None\n\n # next, try to figure out which one we're supposed to remove\n matches = []\n match_indices = []\n\n for idx, cred in enumerate(creds):\n # the username must match\n if cred.username != username:\n continue\n # if specified, the hostname must match\n if hostname is not None and cred.hostname != hostname:\n continue\n\n matches.append(cred)\n match_indices.append(idx)\n\n if len(matches) == 0:\n err = f\"No matching credential found with username '{username}'\"\n if hostname is not None:\n err += f\" with hostname '{hostname}'\"\n raise RuntimeError(err)\n elif len(matches) > 1:\n raise RuntimeError(_construct_ambiguous_deletion_message(username, hostname, matches))\n\n # At this point we should have exactly one match, which we can delete\n del creds[match_indices[0]]\n write_auth_data(configure.get_config_path(\"auth\"), creds)\n prune_outdated_auth()", "def remove_wallet(self, name):\n cmd = \"\"\" DELETE FROM %s WHERE %s = '%s' \"\"\" %(TABLE_WALLETS,\n COL_WALLETS_NAME,\n name)\n self.__dbcursor.execute(cmd)", "def delete(self, name):\n try:\n del self._d_features[name]\n return(True)\n except:\n return(False)", "def release_named_lock(self, name):\r\n self.log.debug(\"Releasing named lock (%s)\" % name)\r\n self._named_locks[name].release()", "def deleteCredential(self, credentialName):\n try:\n utility.execLog(\"Deleting Credential: %s\" % credentialName)\n self.browserObject, status, result = self.selectCredential(credentialName)\n if not status:\n return self.browserObject, False, result\n # Checking for Default Credentials - 'Delete' will be Disabled\n disabled = self.handleEvent(EC.presence_of_element_located((By.ID, self.CredentialsObjects('deleteCredentials'))), action=\"GET_ATTRIBUTE_VALUE\", attributeName=\"disabled\")\n if \"true\" in disabled:\n return self.browserObject, False, \"Unable to Delete Default Credential: %s\" % credentialName\n # Clicking on Delete\n self.handleEvent(EC.element_to_be_clickable((By.ID, self.CredentialsObjects('deleteCredentials'))), action=\"CLICK\")\n utility.execLog(\"Checking for Confirm Box...\")\n try:\n currentTitle = self.handleEvent(EC.element_to_be_clickable((By.XPATH, self.CommonObjects('GetFormTitle'))), action=\"GET_TEXT\")\n except:\n return self.browserObject, False, \"Unable to Load Confirm Box To Delete Credential\"\n if \"Confirm\" in currentTitle:\n utility.execLog(\"Confirm Box Loaded...Confirming to Delete Credential: '%s'\" % credentialName)\n self.handleEvent(EC.element_to_be_clickable((By.ID, self.CommonObjects('ConfirmYes'))), action=\"CLICK\")\n else:\n utility.execLog(\"Failed to Verify Confirm Delete Box :: Actual --> '%s' :: Expected --> '%s'\" % (currentTitle, \"Confirm\"))\n return self.browserObject, False, \"Failed to Verify Confirm Delete Box :: Actual --> '%s' :: Expected --> '%s'\" % (currentTitle, \"Confirm\")\n # Checking for Error Deleting a Credential\n try:\n errorRedBox = self.handleEvent(EC.visibility_of_element_located((By.XPATH, self.CommonObjects('RedBoxError'))), wait_time=10)\n if errorRedBox:\n errorMessage = self.handleEvent(EC.element_to_be_clickable((By.XPATH, self.CommonObjects('RedBoxErrorMessages'))),action=\"GET_TEXT\")\n return self.browserObject, False, \"Failed to Delete Credential :: '%s' :: Error -> %s\" % (credentialName, errorMessage)\n except:\n # Refresh Table\n self.handleEvent(EC.element_to_be_clickable((By.ID, self.CredentialsObjects('credentialsRefresh'))), action=\"CLICK\")\n time.sleep(3)\n # VALIDATION: Selecting deleted Credential\n self.browserObject, status, result = self.selectCredential(credentialName)\n if status:\n return self.browserObject, False, \"Failed to Delete Credential :: '%s' :: Error -> %s\" % (credentialName, \"Validation Error\")\n else:\n return self.browserObject, True, \"Successfully Deleted Credential: '%s'\" % credentialName\n except Exception as e:\n return self.browserObject, False, \"Exception while Deleting Credential :: '%s' :: Error -> %s\" % (credentialName, str(e) + format_exc())", "def delete(self, name, user):\n connection = self.connect()\n cursor = connection.cursor()\n cursor.execute(self.sql[\"delete\"], {\"name\": name, \"user\": user})\n if cursor.rowcount < 1:\n raise DoesNotExistException(\n \"Could not find an applicable saved roll with that name.\"\n )\n connection.commit()", "def fusion_api_delete_hypervisor_manager(self, name=None, uri=None, api=None, headers=None):\n return self.hypervisor_mgr.delete(name=name, uri=uri, api=api, headers=headers)", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def get_waiter(waiter_name=None):\n pass", "def delete(self, actor_id, name):\n (myself, check) = auth.init_actingweb(appreq=self,\n actor_id=actor_id,\n path='callbacks',\n config=self.config)\n if not myself or check.response[\"code\"] != 200:\n return\n path = name.split('/')\n if path[0] == 'subscriptions':\n peerid = path[1]\n subid = path[2]\n if not check.check_authorisation(path='callbacks', subpath='subscriptions', method='DELETE', peerid=peerid):\n self.response.set_status(403, 'Forbidden')\n return\n sub = myself.get_subscription_obj(peerid=peerid, subid=subid, callback=True)\n if sub:\n sub.delete()\n self.response.set_status(204, 'Deleted')\n return\n self.response.set_status(404, 'Not found')\n return\n if not check.check_authorisation(path='callbacks', subpath=name, method='DELETE'):\n self.response.set_status(403, 'Forbidden')\n return\n if not self.on_aw.delete_callbacks(name=name):\n self.response.set_status(403, 'Forbidden')", "def delete(self, name=None):\n Console.ok(f\"DELETE: Using {Registry.PROTOCOL_NAME} Protocol\")\n return self.protocol.delete(name)", "def delete(self, name):\n\n for i in self.bots:\n if i.name == name:\n i.exit()\n self.remove(i)\n i.cfg['enable'] = 0\n i.cfg.save()\n logging.debug('%s disabled' % i.name)\n return 1", "def delete_compute_target_by_name(ws, name):\n ws.compute_targets[name].delete()", "def delete(args, config):\n print('Deletes a selected HPC fleet with name \"{}\"'.format(args.fleet_name))", "def bdev_passthru_delete(client, name):\n params = {'name': name}\n return client.call('bdev_passthru_delete', params)", "def deleteInstrumentFromName(self, name):\n matching_instruments = list(filter(lambda x: x.name == name,\n self.instruments))\n assert len(matching_instruments) == 1\n del self.instruments[name]", "def remove(name, send_events=True, moving=False):", "def destroy_by_name(name, driver):\n\n matches = [node for node in list_nodes(driver) if node.name == name]\n if len(matches) == 0:\n logger.warn('no node named %s' % name)\n return False\n else:\n return all([node.destroy() for node in matches])", "def delete(self, policy_name):\n path = self.vault.normalize(\"/sys/policies/acl/\" + policy_name)\n address = self.vault.vault_adress + \"/v1\" + path\n # Actually run vault\n logging.info(\"Deleting the policy: %s\", address)\n self.vault.requests_request(\"DELETE\", address, headers=self.vault.token_header)", "def bdev_ocf_delete(client, name):\n params = {'name': name}\n\n return client.call('bdev_ocf_delete', params)", "def delete(self, name):\n instance = self.get_one_instance('name', name)\n\n if type(instance) != self.Component:\n set_session_var('errors', str(instance))\n return None\n\n res = delete_in_db(instance)\n\n if res != 'deleted':\n set_session_var('errors', str(res))\n else:\n set_session_var('success', res)\n\n return True", "def cleanup(name, client=None):\n credential_specs_path = _get_path(client)\n path = os.path.join(credential_specs_path, name + '.json')\n fs.rm_safe(path)", "def remove_curve(self, name):\n self._curve_reg.__delitem__(name)", "def delete(self, name):\n if (self.model_dir / (str(name) + '.pkl')).exists():\n (self.model_dir / (str(name) + '.pkl')).unlink()", "def delete_address(self) -> object:\n self.delete_button.click()\n\n return DeletionModal(self).wait_for_component_to_be_present()", "def deleteCookie(self, name):\n cmdId = self.executeCommand(Command.DELETE_COOKIE, {'name': name})\n return cmdId", "def delete_entry_group(self, name):\n self.__datacatalog.delete_entry_group(name=name)" ]
[ "0.7185444", "0.69208735", "0.6794032", "0.67044383", "0.6475918", "0.62396824", "0.6199465", "0.6158003", "0.6086726", "0.60831106", "0.60684264", "0.60101855", "0.60017025", "0.597735", "0.596643", "0.5948519", "0.59348965", "0.59313756", "0.59124154", "0.5899736", "0.5895692", "0.58922225", "0.5871571", "0.58480674", "0.5847167", "0.5808935", "0.57869", "0.57858294", "0.5757995", "0.5726876", "0.57246035", "0.5713897", "0.5704362", "0.57038057", "0.56769353", "0.56511366", "0.5649638", "0.5649638", "0.5649638", "0.5645322", "0.5634927", "0.5608875", "0.55923223", "0.559212", "0.55810654", "0.55810654", "0.5577549", "0.55483246", "0.55438656", "0.5543558", "0.55406487", "0.5530472", "0.5507307", "0.5504308", "0.5485997", "0.54851854", "0.54825574", "0.5482239", "0.54625815", "0.54536444", "0.5449016", "0.5446388", "0.5436994", "0.543292", "0.5425384", "0.54116774", "0.53991085", "0.53971004", "0.5389598", "0.53739375", "0.5373543", "0.5373543", "0.5373543", "0.5373543", "0.5373543", "0.5373543", "0.5373543", "0.5373543", "0.5373543", "0.5373543", "0.5373543", "0.5373543", "0.53695595", "0.5350985", "0.53329074", "0.5331413", "0.53298354", "0.5319456", "0.53133273", "0.53101015", "0.53090596", "0.5279174", "0.527413", "0.5271426", "0.52711874", "0.526287", "0.52544117", "0.5250783", "0.52442455", "0.5240304" ]
0.57201225
31
Save seed into temp file.
def saveseed(self, seed): savefile = gettempdir() + '/last_test_seed_fate.tmp' if args.verbose: print('Saving run into ' + savefile) with open(savefile, 'w') as f: f.write(str(seed))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_seed(self, seed: np.ndarray):\n print(\"Reconstructed trace saved as seed to \", CONFIG_DIR)\n np.savetxt(CONFIG_DIR / self.name_seed, seed.view(float).reshape(-1, 2))", "def insert_test( hash, random, seq ):\n try:\n with open(os.path.join(SEED_DIRECTORY, \"%s_%s\" % (hash, 0)), \"w+\") as f:\n record_used('seeds', hash)\n pickle.dump({'hash': hash, 'random': random, 'seq': seq }, f)\n except IOError:\n if not os.environ.get('CALIENDO_TEST_SUITE', None):\n logger.warning( \"Failed to open %s\" % hash)", "def local_seed(self) -> str:\n assert self.definition.settings.sp_root_dir\n seed_file = self.definition.settings.sp_root_dir.joinpath(\"seed.txt\")\n if not seed_file.exists():\n seed = str(encode_hex(bytes(random.randint(0, 255) for _ in range(20))))\n seed_file.write_text(seed)\n else:\n seed = seed_file.read_text().strip()\n return seed", "def seed():", "async def save(self, job, options=None):\n if options is None:\n options = {}\n\n if not options.get('secretseed'):\n bundle = False\n filename = '/data/freenas-v1.db'\n else:\n bundle = True\n filename = tempfile.mkstemp()[1]\n os.chmod(filename, 0o600)\n with tarfile.open(filename, 'w') as tar:\n tar.add('/data/freenas-v1.db', arcname='freenas-v1.db')\n tar.add('/data/pwenc_secret', arcname='pwenc_secret')\n\n def read_write():\n with open(filename, 'rb') as f:\n f2 = os.fdopen(job.write_fd, 'wb')\n while True:\n read = f.read(1024)\n if read == b'':\n break\n f2.write(read)\n f2.close()\n await self.middleware.run_in_thread(read_write)\n\n if bundle:\n os.remove(filename)", "def save(self, filename='test'):\n file = open(filename+'.txt','w')\n pickle.dump(self, file)\n file.close()", "def make_temp_file():\n global TEST_DATA_PATH\n TEST_DATA_PATH = tempfile.mkstemp()", "def save_tmp_file(self, data):\n with open(self.tmp_file, 'wb') as f:\n f.write(data)", "def _save(self):\n\t\t\n\t\tdirectory = self.Output_path\n\n\t\t# replace with \n\t\t# file_name = hermes.mk_themis_file_name(themis_obj = self)\n\t\tfile_name = f'Themis_{self.CELL_ID[\"experiment\"]}_u{self.CELL_ID[\"unit\"]}_c{self.CELL_ID[\"cell\"]}_r{self.CELL_ID[\"run\"]}.pkl'\n\n\t\tsave_path = directory / file_name\n\n\t\t# Atomic saving (helpful?)\n\t\ttemp_path = save_path.with_suffix(save_path.suffix + '.tmp')\n\t\t\n\t\tself.SavePath = save_path\n\n\t\t\n\t\twith open(temp_path, 'wb') as f:\n\t\t\tpickle.dump(self, f)\n\n\t\ttemp_path.rename(save_path)\n\n\t\tprint(f'Saved {self.RUN_KEY} as {save_path}')", "def save(self):\n\t\tself.CONFIG.save()\n\t\tself.temp_files.save()", "def save(self):\n\n pattern = '{}_{}_{}ep.pt' if self.checkpoint_filename_pattern is None else self.checkpoint_filename_pattern\n filename = pattern.format('sherlock1', time.strftime(\"%Y-%m-%d_%H-%M-%S\"),\n self.monitors['loss_train'].num_epochs)\n full_filename = self.full_path(filename)\n c = {\n 'state_dict': self.net.state_dict(),\n 'optimizer': self.optimizer.state_dict(),\n 'monitors': self.monitors,\n 'parent': self.parent,\n 'args': vars(args) # convert args to dict\n }\n torch.save(c, full_filename)\n if not args.tuning and args.delete and self.last_checkpoint is not None:\n os.remove(self.last_checkpoint)\n self.last_checkpoint = full_filename\n return filename", "def temporary(cls):\n fh, path = tempfile.mkstemp(suffix='.hdf5')\n os.close(fh)\n self = cls(path, 'w')\n self.path = path\n return self", "def persist(self, filepath):\n joblib.dump('hello-steppy', filepath)", "def seed():\n pass", "def seed():\n pass", "def save(self):\n if self.hasChanged:\n filePath = self.path\n tempPath = filePath+'.tmp'\n fileDir = os.path.split(filePath)[0]\n if not os.path.exists(fileDir): os.makedirs(fileDir)\n cPickle.dump(self.data,open(tempPath,'w'))\n renameFile(tempPath,filePath,True)\n self.hasChanged = False", "def save_checkpoint(self, filename='checkpoint.pth'):\n torch.save(self.state_dict(), filename)", "def _save(self, filename = str(int(time()))):\n if filename:\n with open(filename, 'w') as f:\n f.write('null')\n self.prompt_time = 0\n exit()", "def save(self, path):\n individual = self.population.fittest_individual()\n order = [int(l) for l in individual.label_order]\n fitness = individual.fitness\n data = {'name': self.ds.name,\n 'num_labels': len(order),\n 'order': order,\n 'fitness': fitness\n }\n with open(path, 'w') as f:\n json.dump(data, f)", "def gen_int(filename):\n random.seed()\n random.randint(-100,100)\n with open(filename, \"w\") as f:\n for i in range(1000):\n f.write(str(random.randint(-100,100)))\n f.write(\" \")\n # f.write(\"hello\")", "def temp_dump(self, session_id):\n f = open(pathlib.Path(basedir).joinpath('static', 'temp', session_id, 'hero_pickle_storage.json'), 'w')\n stored_info = jsonpickle.encode(self)\n f.write(stored_info)\n f.close()", "def saveCheckpoint(self):\n time_stamp = time.strftime('%Y%m%d%H%M%S', time.gmtime())\n state_filename = os.path.join(self.saving_dir, 'checkpoint.' + time_stamp + '.pth.tar')\n mem_filename = os.path.join(self.saving_dir, 'memory.' + time_stamp + '.pth.tar')\n state = self.getSavingState()\n memory = {\n 'memory': self.memory\n }\n torch.save(state, state_filename)\n torch.save(memory, mem_filename)", "def save(self):\n memento = self.create_memento()\n import datetime\n f = open(str(datetime.datetime.now()).replace(' ','_')+'.saved_story','w')\n cPickle.dump(memento,f)\n f.close()\n zcanvas.message(\"Saved!\")", "def save(self, filename=\"fitter.pickle\"):\n\n with open(filename, \"wb\") as outfile:\n pickle.dump(self, outfile)", "def _generate_to_tempfile(self, generator):\r\n (output_fd, output_path) = tempfile.mkstemp()\r\n with os.fdopen(output_fd, 'w') as output:\r\n generator.write(output)\r\n return output_path", "def dump_to_tmpfile(obj):\n\n import tempfile\n\n fname = tempfile.mktemp()\n with open(fname, \"w\") as txtfile:\n txtfile.write(str(obj))\n\n print(\"str(obj) was written to {}\".format(fname))\n\n return fname", "def save_checkpoint(state, filename):\n print (\"=> Saving a new best\")\n torch.save(state, filename) # save checkpoint", "def save_training(self):\n\n filename = str(hashlib.sha1(str(self.training_data).encode(\"utf-8\"))\n .hexdigest())\n path = \"./training/\" + filename + \".json\"\n\n data = {\n \"states\": self.states,\n \"transitions\": self.transitions,\n \"matrix\": self.matrix.tolist()\n }\n\n with open(path, \"w\") as outfile:\n json.dump(data, outfile)", "def update_seed_parameters(parameters, samples):\n\n with open(\"../../output/seed.tmp\", \"w\") as f:\n f.write(f\"{parameters[0]+parameters[1]}\\n\")\n f.write(f\"{samples}\")", "def test_to_file(self):\n fd, fp = mkstemp()\n close(fd)\n st = SampleTemplate.create(self.metadata, self.new_study)\n st.to_file(fp)\n self._clean_up_files.append(fp)\n with open(fp, 'U') as f:\n obs = f.read()\n self.assertEqual(obs, EXP_SAMPLE_TEMPLATE)\n\n fd, fp = mkstemp()\n close(fd)\n st.to_file(fp, {'2.Sample1', '2.Sample3'})\n self._clean_up_files.append(fp)\n\n with open(fp, 'U') as f:\n obs = f.read()\n self.assertEqual(obs, EXP_SAMPLE_TEMPLATE_FEWER_SAMPLES)", "def delete(seed):\n shutil.rmtree(os.path.join(DATA_DIR, seed))", "def save():", "def _save(self):\n # TODO: Use local.punny dump (when written)\n with open(filename, 'w') as f:\n pickle = Pickler(f)\n pickle.dump(self.pungen.puns)", "def save(self, filename):\n\n torch.save(self.state_dict(), filename)", "def save_checkpoint(state, filename='checkpoint.pth.tar'):\n torch.save(state, filename)", "def save(self):\n with open(\"samples.txt\", \"a\") as f:\n f.write(str(self) + \"\\n\")", "def save_checkpoint(state, filename):\n torch.save(state, filename) # save checkpoint", "def saveTS(tournament, fileName):\n fd = open(fileName)\n pickle.dump(tournament, fd)\n TournamentSystem._logger.debug(\"Dumped game state to %s\", fileName)", "def saveTeachersData():\n with open(\"TeacherData.txt\",\"wb\") as teacherData:\n pickle.dump(teacherEntities,teacherData)", "def save_checkpoint(self, filename=None):\n filename = os.path.join(self.args.checkpoint_dir, filename)\n state = {\n 'epoch': self.current_epoch + 1,\n 'iteration': self.current_iter,\n 'state_dict': self.model.state_dict(),\n 'optimizer': self.optimizer.state_dict(),\n 'best_MIou':self.best_MIou\n }\n torch.save(state, filename)", "def save(self):\n try:\n torch.save(self.model.state_dict(), os.path.join(self.save_path, \"save_point.pth\"))\n except:\n print(\"Unable to save the model\")", "def save(self):\n\t\tPath(self.PATH).mkdir(parents=True,exist_ok=True)\n\n\t\twith open(self.account_file, \"wb\") as file:\n\t\t\tpickle.dump(self, file)", "def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):\n directory = \"runs/%s/\"%(args.name)\n if not os.path.exists(directory):\n os.makedirs(directory)\n filename = directory + filename\n torch.save(state, filename)\n if is_best:\n shutil.copyfile(filename, 'runs/%s/'%(args.name) + 'model_best.pth.tar')", "def save(self, target):\n from six.moves.cPickle import dump\n data = self.serialize()\n with open(target, 'wb') as f:\n dump(data, f)", "def save(self, target):\n from six.moves.cPickle import dump\n data = self.serialize()\n with open(target, 'wb') as f:\n dump(data, f)", "def testSave(self):\n\n # Generate temp file path\n index = os.path.join(tempfile.gettempdir(), \"bm25\")\n os.makedirs(index, exist_ok=True)\n\n model = self.method(\"bm25\")\n model.save(index)\n model.load(index)", "def save_sample_dict(self):\n with open(self._sample_dict_path, 'w+') as fp:\n pickle.dump(self.sample_dict, fp)", "def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):\n directory = \"runs/%s/\" % (args.name)\n if not os.path.exists(directory):\n os.makedirs(directory)\n filename = directory + filename\n torch.save(state, filename)\n if is_best:\n shutil.copyfile(filename, 'runs/%s/' % (args.name) + 'model_best.pth.tar')", "def generate_test_data(obj, name):\n with open('tests/{}'.format(name), 'wb') as f:\n pickle.dump(obj, f)", "def store_outcome(model_name, dataset, strict, forgiving):\n model_name = model_name.replace('/', '-')\n\n with open(f'outcomes-{model_name}-{dataset}.pkl', 'wb') as file:\n\n pickle.dump((strict, forgiving), file)", "def save_checkpoint(args,state, is_best, filename=\"checkpoint.pth.tar\"):\n directory = \"runs/%s-net/\" % (args.name)\n\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n\n epoch = state['epoch']\n\n filename = directory + filename\n torch.save(state, filename)\n\n if is_best:\n shutil.copyfile(filename, \"runs/%s-net/\" % (args.name) + \"model_best.pth.tar\")\n\n if epoch==0 or epoch==2:\n shutil.copyfile(filename, \"runs/%s-net/\" % (args.name) + \"model_epoch_%d.pth.tar\" % epoch )", "def to_file(self, filename):\n\n output_dict = {'random_forest': self.random_forest,\n 'apply_preprocessing': self.apply_preprocessing,\n 'apply_postprocessing': self.apply_postprocessing}\n pickle.dump(output_dict, open(filename, \"wb\"))", "def save_checkpoint(self, name=''):\n self.checkpoint_path.mkdir(exist_ok=True)\n if name:\n path = self.checkpoint_path / f'{name}_{self.epoch}.tar'\n else:\n path = self.checkpoint_path / f'{self.epoch}.tar'\n torch.save(self.get_state(), path)", "def save_checkpoint(state: dict, is_best: bool, filename: str = 'checkpoint.pth.tar', args: Namespace = None):\n directory = f\"runs/{args.name}/\"\n if not os.path.exists(directory):\n os.makedirs(directory)\n filename = directory + filename\n torch.save(state, filename)\n if is_best:\n shutil.copyfile(filename, f'runs/{args.name}/model_best.pth.tar')", "def save(self, fname):\n pass", "def save(self) -> None:\n self.saver.save_model_and_weights(self.model)\n self.saver.save_data_shuffle_indices(\n self.data.eval_shuffler.ds_inds\n )\n self.saver.save_input_scaler(self.data.x.scaler)", "def save(self):\n torch.save(self.state_dict(), self.checkpoint_path)\n with open(self.config_path, 'w') as f:\n print(self, file=f)", "def save(config):\n (fd, name) = tempfile.mkstemp()\n f = os.fdopen(fd, \"a\")\n f.write(yaml.dump(config))\n f.close()\n return name", "def save_generator(self, path=helpers.GEN_PATH_IMG):\n torch.save(self.generator.state_dict(), path)", "def test_to_file(self):\n with TemporaryDirectory() as tmp:\n df_test = make_simple_dataframe()\n Base = BaseDataClass.from_object(df_test)\n fp_save = os.path.join(tmp, \"test_save.csv\")\n Base.to_file(fp_save)\n assert os.path.exists(fp_save)", "def test_write(self):\n temp_file = tempfile.mkstemp()[1]\n try:\n with open(temp_file, \"w+\") as fh:\n self.new_manifest.write(fh)\n tools.eq_(self.new_manifest, load_manifest(temp_file))\n finally:\n os.unlink(temp_file)", "def test_pickle(self):\n X = self.generate_X()\n task = mmRDTR()\n task.fit(X)\n with tempfile.TemporaryFile(mode='w+b') as tf:\n cPickle.dump(task, tf)", "def save_features_to_file(self):\n if not os.path.exists(self.features_save_path):\n os.makedirs(self.features_save_path)\n for s in self.sets:\n self.save_features_to_file_by_set(s)", "def save(self):\n # TODO: save the file", "def write_checkpoint(self):\n self.file_checkpoint_data = open(self.path_checkpoint, \"a+\")\n array_to_write = [str(self.radious), self.type_feature, self.type_filtering, self.h_filterig]\n self.file_checkpoint_data.write(','.join(array_to_write) + \"\\n\")\n self.file_checkpoint_data.flush()", "def finalise(self):\n self.logger.info(\"Saving final versions of model...\")\n self.save_checkpoint(filename='final.pth.tar')", "def save_state(self, training_state: _TrainingState, fname: str):\n with open(fname, \"wb\") as fp:\n pickle.dump(training_state, fp)", "def saveto(file, tmpfile):\n args = {\"file\": file, \"tmpfile\": tmpfile}\n send_command(\"saveto\", args)", "def checkpoint(self):\n self.logger.info('Checkpointing Sampler')\n with open(self.resume_file, \"wb\") as f:\n pickle.dump(self, f)", "def test_005_write_file(self):\n __test = chess_storage.ChessStorage()\n __test_data = list(range(consts.TEST_LIST_LENGHT))\n __dir_game_saves = os.path.dirname(__file__)\n __dir_game_saves = os.path.join(__dir_game_saves, 'games')\n __dir_game_saves = os.path.join(__dir_game_saves, consts.TEST_FILENAME)\n # pylint: disable = protected-access\n __save_test = __test._ChessStorage__write_file(__dir_game_saves, __test_data)\n # pylint: enable = protected-access\n self.assertEqual(__save_test, consts.ERROR_CODES[\"SUCCESSFULL\"])", "def checkpoint(self):\n save()", "def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):\n directory = \"checkoutpoint/%s/\" % args.name\n if not os.path.exists(directory):\n os.makedirs(directory)\n filename = directory + filename\n torch.save(state, filename)\n if is_best:\n shutil.copyfile(filename, 'checkoutpoint/%s/' % args.name + 'model_best.pth.tar')", "def save():\n pass", "def save(self, path=\"./trained_model.checkpoint\"):\n torch.save({\"state_dict\":self.working_q.state_dict}, path)", "def save(self, filename):\n with open(filename, \"w\") as fp:\n dump(self, fp)", "def save_data_pickle(self, save_full=False):\n self.train.to_pickle('../input/train_mod.pkl')\n self.test.to_pickle('../input/test_mod.pkl')\n if save_full:\n self.train_full.to_pickle('../input/train_full_mod.pkl')", "def save_checkpoint(state, is_best, file_path, file_name='checkpoint.pth.tar'):\n\n save_path = file_path + '/' + file_name\n torch.save(state, save_path)\n if is_best:\n shutil.copyfile(save_path, file_path + '/model_best.pth.tar')", "def seed(path):\n return os.path.join(os.path.split(os.path.realpath(__file__))[0], path)", "def save_checkpoint(self, state, is_best, filename='checkpoint.pth.tar'):\n filename = os.path.join(self.experiment_dir, filename)\n torch.save(state, filename)\n if is_best:\n filename_best = os.path.join(self.experiment_dir,'best.pth.tar')\n torch.save(state,filename_best)\n best_pred = state['best_pred']\n with open(os.path.join(self.experiment_dir, 'best_pred.txt'), 'w') as f:\n f.write(str(best_pred))\n if not os.path.exists(os.path.join(self.directory,'best_pred.txt')):\n with open(os.path.join(self.directory,'best_pred.txt'),'w') as f:\n f.write(str(best_pred))\n shutil.copyfile(filename, os.path.join(self.directory, 'model_best.pth.tar'))\n else:\n with open(os.path.join(self.directory,'best_pred.txt'),'r') as f:\n max_iou = float(f.readline())\n if best_pred > max_iou:\n with open(os.path.join(self.directory,'best_pred.txt'),'w') as f:\n f.write(str(best_pred))\n shutil.copyfile(filename, os.path.join(self.directory, 'model_best.pth.tar'))", "def save(self, p):\n pickle.dump(p, open('save.dat', 'wb'))\n print(\"Game Saved!\")", "def dump(self, path):\n torch.save(self,path)", "def save_checkpoint(self, filename, extra_state):\n self.call_async(0, '_async_save_checkpoint', filename=filename, extra_state=extra_state).gen()", "def save(self, filename):\n with open(filename, 'w') as f:\n pickle.dump((self.mean, self.std_dev), f)", "def test_save_local(self):\n generator = Generator(FakeValencia(), save_local=True)\n job = generator.sample(num_raw_bits=100)\n # Need to manually save since the backend is not an IBMQBackend.\n generator._save_local(100, job.wsr, job.job, job.shots)\n saved_fn = None\n file_prefix = Generator._file_prefix + '_'\n for entry in os.listdir():\n if entry.startswith(file_prefix):\n saved_fn = entry\n break\n self.assertTrue(saved_fn, \"No saved file found.\")\n job.saved_fn = saved_fn\n r_job = Generator.recover(saved_fn, mock.MagicMock())\n job.block_until_ready()\n try:\n self.assertFalse(any(fn.startswith(file_prefix) for fn in os.listdir()))\n except AssertionError:\n os.remove(saved_fn)\n raise\n self.assertEqual(r_job.wsr, job.wsr)\n self.assertEqual(r_job.shots, job.shots)", "def _testfile():\r\n import tempfile\r\n return os.path.join(tempfile.gettempdir(), 'trash-%s' % os.getpid())", "def save_checkpoint(filename, model, state=None):\n if not state:\n torch.save(model.state_dict(), os.path.join('checkpoints/', filename))\n else:\n _state = {\n 'epoch': state['epoch'],\n 'state_dict': state['state_dict'].state_dict(),\n 'optimizer': state['optimizer'].state_dict()\n }\n\n torch.save(_state, os.path.join('checkpoints/', filename))", "def save_checkpoint(filename, epoch, model, optimizer=None, best_score=0):\n torch.save({\n 'model' : model.state_dict(),\n 'optim' : optimizer.state_dict() if optimizer is not None else None,\n 'epoch' : epoch,\n 'best_score' : best_score\n }, filename)", "def saveStateOfThisRun(self):\n with open('stateFile.json', 'w') as statefile:\n json.dump(self.fileTobeUploaded, statefile, indent=4)", "def tempcontrol_preset_save(self):\n with open(\n self.tempcontrol_presets_path\n + \"{}.json\".format(self.tempcontrol_preset_currentFilename),\n \"w\",\n ) as output:\n output.write(json.dumps(self.tempcontrol_conf))", "def write(self):\n # # Sometimes file is not written properly. So delete and rewrite it\n # os.system('rm {}'.format(snip_dir + '/' + self.name))\n # if 'NUM_TIME_STEPS' not in self.define.keys():\n # warnings.warn('NUM_TIME_STEPS missing in header. Execution may hang!')\n with open(snip_dir + '/' + self.name, 'w') as f:\n f.write('/* Temporary generated file for snip process definitions before compilation */\\n')\n f.write(self.__str__())\n\n # os.system('ls {}'.format(snip_dir + '/' + self.name))", "def save(self, target, withdP=False):\n from six.moves.cPickle import dump\n data = self.serialize(withdP=withdP)\n with open(target, 'wb') as f:\n dump(data, f)", "def save(self, target, withdP=False):\n from six.moves.cPickle import dump\n data = self.serialize(withdP=withdP)\n with open(target, 'wb') as f:\n dump(data, f)", "def _temp_file(self, val):\n fd, fn = tempfile.mkstemp()\n fp = os.fdopen(fd, \"wb\")\n if val:\n if not isinstance(val, bytes):\n fp.write(val.encode(\"utf-8\", \"surrogateescape\"))\n else:\n fp.write(val)\n fp.close()\n return fn", "def save(self, to_path):\n with open(to_path, 'wb') as f:\n torch.save(self.state_dict(), f)", "def save(self, to_path):\n with open(to_path, 'wb') as f:\n torch.save(self.state_dict(), f)", "def save_checkpoint(dir, state, is_best, filename='checkpoint.pth.tar'):\n directory = \"%s/\" % (dir)\n if not os.path.exists(directory):\n os.makedirs(directory)\n filename = directory + filename\n torch.save(state, filename)\n if is_best:\n shutil.copyfile(filename, '%s/' %\n (dir) + 'model_best.pth.tar')", "def saveFeatures(self, filename):\n print(\"Saving features info, spikeset hash\",)\n f = open(filename, 'wb')\n # compute a hash for the spikeset\n b = self.spikes.view(np.uint8)\n hashkey = hashlib.sha1(b).hexdigest()\n print(hashkey, \"to file\", filename, \".\")\n pickle.dump(hashkey, f)\n pickle.dump(self.feature_special, f)", "def save(self, filename):\n\n checkpoint = {'input_size': self.state_size,\n 'output_size': self.action_size,\n 'actor_hidden_layers': [each.out_features for each in self.actor_local.hidden_layers if each._get_name()!='BatchNorm1d'],\n 'actor_state_dict': self.actor_local.state_dict(),\n 'critic_hidden_layers': [each.out_features for each in self.critic_local.hidden_layers if each._get_name()!='BatchNorm1d'],\n 'critic_state_dict': self.critic_local.state_dict()}\n\n torch.save(checkpoint, filename)", "def save(self, path):\n torch.save(self, path)", "def save(self, path):\n torch.save(self, path)" ]
[ "0.71522933", "0.6273968", "0.62532884", "0.6251843", "0.604025", "0.60345274", "0.60236645", "0.6002649", "0.5998057", "0.5997992", "0.5983726", "0.5970545", "0.5964728", "0.5925001", "0.5925001", "0.5902035", "0.5890514", "0.58809185", "0.5874178", "0.58705467", "0.5869121", "0.58676714", "0.58513814", "0.5847211", "0.5844433", "0.5839203", "0.58270246", "0.58026654", "0.5786167", "0.577891", "0.57456505", "0.57167786", "0.57130116", "0.5710445", "0.5700541", "0.5698627", "0.5690066", "0.56749624", "0.5652072", "0.564963", "0.56477666", "0.56445974", "0.56381863", "0.56348556", "0.56348556", "0.56324655", "0.56323785", "0.5631757", "0.5631077", "0.5627727", "0.5603482", "0.559838", "0.5597841", "0.5595995", "0.55944085", "0.55862254", "0.5583963", "0.55836254", "0.55768377", "0.5567296", "0.555817", "0.55571085", "0.5555856", "0.555065", "0.55489355", "0.55438125", "0.5535273", "0.5531796", "0.5517077", "0.5515619", "0.5511071", "0.55061895", "0.5503408", "0.5501056", "0.5493635", "0.5482055", "0.54798967", "0.5477305", "0.5476861", "0.54721147", "0.54648817", "0.5463578", "0.5462103", "0.54614866", "0.5454186", "0.54310465", "0.54295033", "0.5423928", "0.54149973", "0.5414536", "0.5411155", "0.5411155", "0.54088515", "0.540533", "0.540533", "0.5402936", "0.540257", "0.5402111", "0.5402032", "0.5402032" ]
0.8186387
0
Run the test based on given seed.
def run_test(self, seed, commands_per_run): if args.verbose: print('Sample text:\n' + str(self.document.text)) print('Starting selection: ' + str(self.document.selection)) for i in range(commands_per_run): userinput = self.document.ui.getinput() if args.verbose: try: name = userinput.__name__ except AttributeError: name = str(userinput) print('{}: Input = {}, Mode = {}'.format(i + 1, name, self.document.mode)) try: self.document.mode.processinput(self.document, userinput) except: print('Current text:\n{}'.format(self.document.text)) print('Current selection: {}'.format(self.document.selection)) print('Current pattern: {}'.format(self.document.search_pattern)) raise self.successes += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_test(self, source):\r\n ret = 1\r\n for seed in range(1, 40):\r\n if source.run(temp_params={\"fitness_function\": (lambda x: -np.sum(x)**2+10),\r\n \"population_size\": 10,\r\n \"time_constraint\": 2,\r\n \"axes\": [(0, 5)],\r\n \"seed\": seed}) != \\\r\n source.run(temp_params={\"fitness_function\": (lambda x: -np.sum(x) ** 2 + 10),\r\n \"population_size\": 10,\r\n \"time_constraint\": 2,\r\n \"axes\": [(0, 5)],\r\n \"seed\": seed}):\r\n ret = 0\r\n if ret == 0:\r\n if self.verbosity > 0:\r\n print(\"ERROR: Random seed non functional, results cannot be replicated.\")\r\n return 0\r\n else:\r\n if self.verbosity > 1:\r\n print(\"Random seed functional, results replicable if a seed is used.\")\r\n return 1", "def run(self, seed=None):\n if seed is not None:\n random_seed.set_seed(seed)\n self.reset()", "def seed(seed: int):\n # all sampling is actually happening in the move_cube module\n move_cube.seed(seed)", "def set_seed(self, seed: int):\n self.rsimulator.set_seed(seed)\n # Maybe call new game here?", "def run(seed, ModelClass=Model):\n model = ModelClass(random_seed=seed)\n return model.one_trial(1, 10)", "def seed():", "def seed():\n pass", "def seed():\n pass", "def seed(self, seed=None):\n raise NotImplementedError()", "def seed(self, seed=None):\n raise NotImplementedError()", "def seed(self, seed: Optional[int]) -> None:\n ...", "def seed(self, seed: int) -> None:\n self.game.set_seed(seed)", "def seed(seed: int) -> None:\n ...", "def set_seed(self, seed):\n self.seed = seed", "def test_seed_coverage(self):\n # Define entry point\n ENTRY = 0x40056d\n\n # We start the execution with a random value located at 0x1000.\n lastInput = list()\n worklist = list([{0x1000: 1}])\n\n while worklist:\n # Take the first seed\n seed = worklist[0]\n\n # Symbolize inputs\n self.symbolize_inputs(seed)\n\n # Init context memory\n self.init_ctxt()\n\n # Emulate\n self.seed_emulate(ENTRY)\n\n lastInput += [dict(seed)]\n del worklist[0]\n\n newInputs = self.new_inputs()\n for inputs in newInputs:\n if inputs not in lastInput and inputs not in worklist:\n worklist += [dict(inputs)]\n\n self.assertIn({4096: 101,\n 4097: 108,\n 4098: 105,\n 4099: 116,\n 4100: 101}, lastInput)", "def seed(self, seed=None):\n raise self.gym.seed(seed)", "def Randomize(seed=None):\n random.seed()", "def set_seed(self, seed: int):\n self.__sim.seed(seed)", "def _seed(self, seed):\n self.world.seed(seed)", "def seed(*args, **kwargs): # real signature unknown\n pass", "def setUp(self):\n # record the randomness used in case the test fails:\n rand_seed = int(time.time())\n sr.seed(rand_seed)\n print(\"seed for this test: \" + str(rand_seed))", "def setup_seed(seed):\n torch.manual_seed(seed)\n torch.cuda.manual_seed_all(seed)\n np.random.seed(seed)\n random.seed(seed)\n torch.backends.cudnn.deterministic = True", "def setUp(self):\n # record the randomness used in case the test fails:\n self.rand_seed = int(time.time())\n sr.seed(self.rand_seed)\n print(\"seed for this test: \" + str(self.rand_seed))", "def set_seed(seed):\n assert (type(seed) == int and seed >= 0)\n return params_func(cmd, \"set_seed\", params=[seed])", "def test_single_game_seed_works(self):\n sim = ss.Simulation(seed=23)\n game1 = sim.single_game()\n sim = ss.Simulation(seed=23)\n game2 = sim.single_game()\n assert game1 == game2, 'Your seed in Simulation class is not working.'", "def seed(self, seed):\n\n random.seed(seed)\n np.random.seed(seed)", "def set_seeds(seed):\n random.seed(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n torch.cuda.manual_seed(seed)\n torch.backends.cudnn.deterministic = True\n return None", "def set_seed(seed: int):\n torch.manual_seed(seed)\n np.random.seed(seed)\n random.seed(seed)", "def set_seed(seed):\n torch.manual_seed(seed)\n random.seed(seed)\n np.random.seed(seed)", "def random_test(n_nightly_runs: int = 10, seed: Optional[int] = None):\n def convert_test_func(test_func: Callable):\n seeds = _get_seeds(n_nightly_runs=n_nightly_runs, seed=seed)\n\n def fixate_seed_and_yield_test_run(*args, seed, **kwargs):\n old_state = random.getstate()\n random.seed(seed)\n try:\n yield test_func(*args, seed=seed, **kwargs)\n except Exception:\n _print_seed(seed=seed, decorator_name='random_test')\n raise\n finally:\n random.setstate(old_state)\n # We need to use pytest.mark.parametrize rather than running the test in a for loop. If we\n # do the latter, pytest won't re-create the fixtures for each run.\n return pytest.mark.parametrize('seed', seeds)(_convert_function_to_function_or_coroutine(\n caller_func=fixate_seed_and_yield_test_run, callee_func=test_func))\n\n return convert_test_func", "def set_seed(seed: int):\n random.seed(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)", "def set_seed(seed: int):\n np.random.seed(seed)\n torch.manual_seed(seed)", "def run_seeds(self, nbrun):\n self._raise_not_supported()", "def seed(self, seed=None):\n if seed is not None:\n self._rng.seed(seed)", "def set_seeds(seed, env=None):\n torch.manual_seed(seed)\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False\n np.random.seed(seed)\n random.seed(seed)\n if env is not None:\n env.seed(seed)", "def seed_all(seed):\n\n np.random.seed(seed)\n torch.manual_seed(seed)\n random.seed(seed)", "def sample(self, seed=None):\n raise NotImplementedError()", "def set_seed(args):\n random.seed(args.seed)\n np.random.seed(args.seed)\n # Maybe different op seeds(for dropout) for different procs is better. By:\n # `paddle.seed(args.seed + paddle.distributed.get_rank())`\n paddle.seed(args.seed)", "def test_game(seed, num_of_iterations, expected_state):\n\n game = GameOfLife(seed, num_of_iterations=num_of_iterations)\n iteration = 0\n while iteration < num_of_iterations:\n game.state_generator().__next__()\n iteration += 1\n return np.array_equal(game.board, expected_state)", "def set_seed(seed):\n\ttorch.manual_seed(seed)\n\ttorch.cuda.manual_seed_all(seed)\n\tnp.random.seed(seed)", "def seed_everything(seed):\n random.seed(seed)\n os.environ[\"PYTHONHASHSEED\"] = str(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n torch.cuda.manual_seed(seed)\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False", "def set_seed(seed):\n torch.manual_seed(seed)\n torch.cuda.manual_seed(seed)\n random.seed(seed)\n np.random.seed(seed)", "def initialize(self, seed=None):\r\n self.seed(seed)", "def runtest(self):", "def initialize_randomness(seed):", "def seed(self, seed=None):\r\n if seed is None:\r\n seed = self.default_seed\r\n #backport\r\n #seed = self.default_seed if seed is None else seed\r\n seedgen = numpy.random.RandomState(seed)\r\n for old_r, new_r in self.random_streams.random_state_variables:\r\n old_r_seed = seedgen.randint(2 ** 30)\r\n old_r_container = self.memo[old_r].value\r\n if old_r_container.value is None:\r\n #the cast to int here makes it work on 32bit machines,\r\n #not sure why\r\n old_r_container.value = numpy.random.RandomState(\r\n int(old_r_seed))\r\n else:\r\n #the cast to int here makes it work on 32bit machines,\r\n #not sure why\r\n old_r_container.value.seed(int(old_r_seed))", "def seed_samples(self, sample_seed):\n np.random.seed(sample_seed)\n random.seed(sample_seed)\n torch.manual_seed(sample_seed)\n torch.cuda.manual_seed(sample_seed)\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False", "def seed_samples(self, sample_seed):\n np.random.seed(sample_seed)\n random.seed(sample_seed)\n torch.manual_seed(sample_seed)\n torch.cuda.manual_seed(sample_seed)\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False", "def set_seeds(seed: int=42):\n # Set the seed for general torch operations\n torch.manual_seed(seed)\n # Set the seed for CUDA torch operations (ones that happen on the GPU)\n torch.cuda.manual_seed(seed)", "def seed(self, seed=None):\n #restore a previous state\n if seed is not None: self._seed(seed)\n \n #now generate a new seed and reseed\n seed = self.generate_seed()\n self._seed(seed)", "def set_seed(seed: int):\n random.seed(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n if torch.cuda.is_available():\n torch.cuda.manual_seed_all(seed)", "def run(self, test, env):\n\n raise NotImplementedError", "def random_seed(seed):\n state = RandomState()\n random.seed(seed) # alter state\n np.random.seed(seed)\n torch.manual_seed(seed)\n yield\n state.set_global()", "def set_seed(cls, seed: Any) -> None:\n cls.rand = Random(seed)", "def set_random_seed(self, seed):\n np.random.seed(seed)\n return", "def set_random_seed(self, seed):\n np.random.seed(seed)\n return", "def set_seeds(seed=42):\n random.seed(seed)\n os.environ['PYTHONHASHSEED'] = str(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n torch.cuda.manual_seed(seed)\n torch.cuda.manual_seed_all(seed)\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False # for faster training, but not deterministic", "def run_random_test(args):\n test_file, outdir = args\n start = time.time()\n outfile_path1, outfile_path2 = FileName.get_random_rst_name()\n run_random(test_file, rs, outfile_path1, outfile_path2)\n dt = time.time() - start\n print(\"run_random_test Done. Elapsed time is %.2f seconds.\" % dt)", "def set_random_seed(seed):\n\n # Sets the seed for the inbuilt Python functions.\n random.seed(seed)\n os.environ[\"PYTHONHASHSEED\"] = str(seed)\n\n # Sets the seed for the NumPy library.\n np.random.seed(seed)\n\n # Sets the seed for the PyTorch library.\n torch.manual_seed(seed)\n torch.cuda.manual_seed(seed)\n torch.backends.cudnn.deterministic = True", "def set_all_seeds(seed):\n os.environ['PYTHONHASHSEED'] = str(seed)\n random.seed(seed)\n torch.manual_seed(seed)\n np.random.seed(seed)\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False", "def _use_seed(seed):\n torch_rng_state = torch.random.get_rng_state()\n torch.manual_seed(seed)\n yield\n torch.random.set_rng_state(torch_rng_state)", "def set_seed(seed: int = None):\n\n if seed is not None:\n random.seed(seed)\n np.random.seed(seed)\n torch.manual_seed(0)", "def seed_everything(seed=0):\n random.seed(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n torch.cuda.manual_seed_all(seed)\n os.environ['PYTHONHASHSEED'] = str(seed)\n torch.backends.cudnn.deterministic = cudnn_deterministic", "def set_seed(seed, cudnn=True):\n random.seed(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n torch.random.manual_seed(seed)\n torch.cuda.manual_seed(seed)\n # note: the below slows down the code but makes it reproducible\n if (seed is not None) and cudnn:\n torch.backends.cudnn.deterministic = True", "def set_seed(seed, cudnn=True):\n random.seed(seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n torch.random.manual_seed(seed)\n torch.cuda.manual_seed(seed)\n # note: the below slows down the code but makes it reproducible\n if (seed is not None) and cudnn:\n torch.backends.cudnn.deterministic = True", "def seed_all(seed: int = 1930):\n print(\"Using Seed Number {}\".format(seed))\n\n os.environ[\"PYTHONHASHSEED\"] = str(\n seed\n ) # set PYTHONHASHSEED env var at fixed value\n torch.manual_seed(seed)\n torch.cuda.manual_seed_all(seed)\n torch.cuda.manual_seed(seed) # pytorch (both CPU and CUDA)\n np.random.seed(seed) # for numpy pseudo-random generator\n random.seed(seed) # set fixed value for python built-in pseudo-random generator\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False\n torch.backends.cudnn.enabled = False", "def setRandomSeed(self, seed):\n return self._set(randomSeed=seed)", "def setRandomSeed(self, seed):\n return self._set(randomSeed=seed)", "def setRandomSeed(self, seed):\n return self._set(randomSeed=seed)", "def setRandomSeed(self, seed):\n return self._set(randomSeed=seed)", "def setRandomSeed(self, seed):\n return self._set(randomSeed=seed)", "def runTests(self):\n \n pass", "def set_random_seed(seed):\n random.seed(seed)\n np.random.seed(seed)\n logging.info(f\"Set simulation random seed to: {seed}\")", "def run_seed(self, mode):\n # Clear data from tables\n # clear_data()\n if mode == MODE_CLEAR:\n return\n # industry = create_industy()\n # structure_type, structure_type1 = create_structure_type()\n # structure, structure2 = create_structure(structure_type)\n # stock = create_stock(industry, structure)\n # create_price_list(stock)\n # create_news(stock, structure, structure2)\n # create_analysis(structure)\n # create_market_indices()\n create_section_group()", "def set_global_seeds(seed):\n \n torch.manual_seed(seed)\n np.random.seed(seed)\n random.seed(seed)", "def test_run(self):\n sut = ExperimentEmail()\n train = os.path.join(os.path.dirname(__file__), \"data\", \"sample.csv\")\n val = os.path.join(os.path.dirname(__file__), \"data\", \"sample.csv\")\n outdir = tempfile.mkdtemp()\n\n # Act\n sut.run(train, val, outdir, batch_size=32, epochs=2)", "def SetRandomSeed(seed):\n global option\n option['random_seed'] = seed", "def set_random_seeds(seed: int):\n random.seed(seed)\n np.random.seed(seed)\n torch.random.manual_seed(seed)\n torch.cuda.manual_seed_all(seed)\n torch.backends.cudnn.deterministic = True\n torch.backends.cudnn.benchmark = False\n\n logging.debug(\"SystemLog: Set random seed {}\".format(seed))", "def set_seed(args):\n random.seed(args.seed)\n np.random.seed(args.seed)\n torch.manual_seed(args.seed)\n if args.n_gpu > 0:\n torch.cuda.manual_seed_all(args.seed)", "def set_seed(args):\n random.seed(args.seed)\n np.random.seed(args.seed)\n torch.manual_seed(args.seed)\n if args.n_gpu > 0:\n torch.cuda.manual_seed_all(args.seed)", "def test_run_sim_1():\n rnd = rand.Arrivals(36, 41)\n sim.run_sim(3, 2, 5, 6, 22, rnd)", "def set_seed(self, seed):\n if seed is None:\n warnings.warn(\n \"Initializing player with seed from Axelrod module random number generator. \"\n \"Results may not be seed reproducible.\"\n )\n self._seed = _module_random.random_seed_int()\n else:\n self._seed = seed\n self._random = RandomGenerator(seed=self._seed)", "def eg_ok(n=1):\n\n random.seed(n)", "def set_seed(seed_value=42):\n random.seed(seed_value)\n np.random.seed(seed_value)\n torch.manual_seed(seed_value)\n torch.cuda.manual_seed_all(seed_value)", "def set_seed(seed_value=42):\n random.seed(seed_value)\n np.random.seed(seed_value)\n torch.manual_seed(seed_value)\n torch.cuda.manual_seed_all(seed_value)", "def main():\n ex = Experiment(SEED)\n ex.main()", "def test_call(self, MetricClass, seed):\n m = MetricClass()\n strategy = RandomTrader(seed=seed).run(make_randomwalk(seed=seed))\n result0 = np.array(m.result(strategy)) # from `result` method\n result1 = np.array(m(strategy)) # from __call__\n assert np.equal(result0, result1).all()", "def target_dummy(config: Configuration, seed: int) -> int:\n return seed", "def set_seed(seed=0):\n # Python std lib random seed\n random.seed(seed)\n # Numpy, tensorflow, torch\n torch.manual_seed(seed)\n np.random.seed(seed)\n tf.random.set_seed(seed)\n # Additional seeds potentially required when using a gpu\n # (see https://www.youtube.com/watch?v=TB07_mUMt0U&t=1804s)\n os.environ['TF_CUDNN_DETERMINISTIC'] = 'true'\n os.environ['TF_DETERMINISTIC_OPS'] = 'true'\n os.environ['PYTHONHASHSEED'] = str(seed)", "def run_test(self):\n raise NotImplementedError", "def init_seed(seed=None):\n if seed is None:\n seed = int(time.time())\n\n LOGGER.info(\"Using seed=%d\", seed)\n np.random.seed(seed)\n torch.manual_seed(seed)\n random.seed(seed)", "def set_seed(self,seed):\r\n if seed is None:\r\n warnings.warn(\r\n \"Initializing player with seed from Axelrod module random number generator. \"\r\n \"Results may not be seed reproducible.\")\r\n self._seed = _module_random.random_seed_int()\r\n else:\r\n self._seed = seed\r\n self._random = RandomGenerator(seed=self._seed)\r\n self.base._random = self._random\r\n self.trust._random = self._random\r\n self.conviction._random = self._random\r\n \r\n self.generator = torch.Generator()\r\n self.generator.manual_seed(int(seed))", "def calculate(self, seed=1):\n self._split_data(seed=seed)\n test_user_list = list(set(self.test['userId'].unique()))\n user_list = [test_user_list[random.randint(0, len(test_user_list)) - 1]\n for i in range(20)]\n # self.precision(user_list)\n # self.recall(user_list)\n # self.coverage(user_list)\n self.popularity(user_list)", "def set_seed(seed: int = 42) -> None:\n np.random.seed(seed)\n torch.manual_seed(seed)\n if torch.cuda.is_available:\n torch.cuda.manual_seed_all(seed) # type:ignore", "def test_run_sim():\n rnd = rand.Arrivals(31, 40)\n sim.run_sim(2, 1, 3, 4, 24, rnd)", "def run(self, seed='old'):\n if seed == 'old':\n founds, number_found = self.find_in_base()\n param = number_found - self.M_N\n\n if param < 0:\n print \"We have only {0} usable chromosomes in the database, per {1} required.\".format(number_found, self.M_N)\n l, __ = self.evolve_partials(abs(param))\n combined = founds+[l[i].x for i in range(len(l))]\n\n elif param > 0:\n combined = random.sample(founds, self.M_N)\n\n else:\n combined = founds\n\n if seed == 'fresh':\n print \"Evolving fresh chromosomes...\"\n l, __ = self.evolve_partials(self.M_N)\n combined = [l[i].x for i in range(len(l))]\n\n if len(combined) != self.M_N: raise ValueError\n print \"\\nLaunching Multi-Objective evolution...\"\n isl, prob = self.mlt_obj_evo(combined)\n self.writing_finals(isl, prob)", "def _set_random_seed(seed):\r\n if seed is not None and seed > 0:\r\n random.seed(seed)\r\n np.random.seed(seed)\r\n torch.manual_seed(seed)\r\n if torch.cuda.device_count() > 0:\r\n mpu.model_parallel_cuda_manual_seed(seed)\r\n else:\r\n raise ValueError('Seed ({}) should be a positive integer.'.format(seed))", "def check_seed():\n np.random.seed(1000)\n standard = [\n {0: -3.0, 1: -5.0, 'index': 0},\n {0: -6.0, 1: -8.0, 'index': 1},\n {0: 5.0, 1: -1.0, 'index': 2},\n {0: 1.0, 1: -7.0, 'index': 3},\n {0: -2.0, 1: -3.0, 'index': 4},\n {0: 7.0, 1: 3.0, 'index': 5},\n {0: -4.0, 1: -2.0, 'index': 6},\n {0: 2.0, 1: 6.0, 'index': 7}\n ]\n\n this_machine = create_points(8)\n\n flag = True\n for i in range(8) :\n flag &= this_machine[i][0] == standard[i][0] \n flag &= this_machine[i][1] == standard[i][1] \n flag &= this_machine[i][\"index\"] == i\n \n if not flag :\n print(\"\"\"\n The Python installation on this machine is odd: it appears to\n use a non-standard random number generator -- run \n this script on the machines in the Otter lab instead.\n If that fails too, send an email to ag0015@surrey.ac.uk.\n \"\"\")\n print (\"You got these test points:\", this_machine)\n print (\"You should have got:\", standard)\n exit(-1)\n else :\n print (\"Check passed\")", "def reproducible(seed: int = 0) -> None:\n\n os.environ[\"PYTHONHASHSEED\"] = \"0\"\n\n np.random.seed(seed)\n python_random.seed(seed)\n tf.random.set_seed(seed)", "def random_seed(seed_value: int) -> None:\r\n np.random.seed(seed_value) # cpu vars\r\n torch.manual_seed(seed_value) # cpu vars\r\n random.seed(seed_value) # Python\r\n if torch.cuda.is_available():\r\n torch.cuda.manual_seed(seed_value)\r\n torch.cuda.manual_seed_all(seed_value) # gpu vars\r\n torch.backends.cudnn.deterministic = True # needed\r\n torch.backends.cudnn.benchmark = False" ]
[ "0.72557", "0.7165194", "0.7157774", "0.7147115", "0.71157515", "0.70374316", "0.6993909", "0.6993909", "0.6942664", "0.6942664", "0.68477833", "0.6824419", "0.6699443", "0.66688615", "0.6521962", "0.64988214", "0.64300764", "0.6428376", "0.64246386", "0.6414504", "0.6401042", "0.63444203", "0.6328711", "0.6321799", "0.6295205", "0.6262371", "0.6242217", "0.6228037", "0.62253064", "0.62150955", "0.61918163", "0.6132823", "0.61272454", "0.6121335", "0.61164594", "0.61155385", "0.610587", "0.6096008", "0.608094", "0.60675573", "0.606155", "0.603269", "0.6022343", "0.60041326", "0.5996328", "0.5992071", "0.5988973", "0.5988973", "0.59879476", "0.598618", "0.59810305", "0.59780484", "0.5977547", "0.5976946", "0.59759057", "0.59759057", "0.5975655", "0.5975503", "0.59616554", "0.5954698", "0.5949733", "0.59404665", "0.593842", "0.5934631", "0.5934631", "0.5927371", "0.5925552", "0.5925552", "0.5925552", "0.5925552", "0.5925552", "0.59245914", "0.59140784", "0.5912674", "0.5908745", "0.590631", "0.5898057", "0.58954936", "0.5894244", "0.5894244", "0.5886135", "0.58579654", "0.58375293", "0.5835049", "0.5835049", "0.583443", "0.58325464", "0.5831778", "0.58314824", "0.58243984", "0.5823467", "0.58223015", "0.5821756", "0.581998", "0.5807668", "0.5807623", "0.579895", "0.5795977", "0.5788861", "0.5772344" ]
0.656294
14
Model to embed books and wikilinks using the functional API. Trained to discern if a tag is present in a article
def anime_embedding_model(anime_index, tag_index, embedding_size=50, classification=False): # Both inputs are 1-dimensional anime = Input(name='anime', shape=[1]) tag = Input(name='tag', shape=[1]) # Embedding the anime (shape will be (None, 1, 50)) anime_embedding = Embedding(name='anime_embedding', input_dim=len(anime_index), output_dim=embedding_size)(anime) # Embedding the tag (shape will be (None, 1, 50)) tag_embedding = Embedding(name='tag_embedding', input_dim=len(tag_index), output_dim=embedding_size)(tag) # Merge the layers with a dot product along the second axis (shape will be (None, 1, 1)) merged = Dot(name='dot_product', normalize=True, axes=2)([anime_embedding, tag_embedding]) # Reshape to be a single number (shape will be (None, 1)) merged = Reshape(target_shape=[1])(merged) # If classifcation, add extra layer and loss function is binary cross entropy if classification: merged = Dense(1, activation='sigmoid')(merged) model = Model(inputs=[anime, tag], outputs=merged) model.compile(optimizer='Adam', loss='binary_crossentropy', metrics=['accuracy']) # Otherwise loss function is mean squared error else: model = Model(inputs=[anime, tag], outputs=merged) model.compile(optimizer='Adam', loss='mse') return model
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, name, isbn, author, tags=None):\n super().__init__(name, isbn, tags)\n self.author = author\n self.resource_type = \"Book\"", "def tagged(request,slug):\n\n tag = get_object_or_404(Tag, slug=slug)\n books = Book.objects.filter(tags=tag)\n \n for book in books:\n book\n\n context = {\n 'tag':tag,\n 'books':books,\n }\n return render(request, 'favorite.html', context)", "def hasEmbedded(self, tag):\n if self.embeddedTags and self.embeddedTags[-1] == tag:\n return True\n else:\n return False", "def Embedded(self) -> bool:", "def is_book_exist(self, book_info):\n for type, link in book_info.links.items():\n try:\n bookfile = BookFile.objects.get( link_hash = md5(link).hexdigest() )\n books = bookfile.book_set.all()\n if books:\n return True, books[0]\n except BookFile.DoesNotExist:\n continue\n try:\n book = Book.objects.get(author__name=book_info.authors, title=book_info.title)\n return True, book\n except Book.DoesNotExist:\n continue\n return False, None", "def test_books(self):\r\n link_re = re.compile(r'<(?P<link>[^>]+)>\\; rel=\\\"(?P<rel>[^\\\"]+)\\\"')\r\n\r\n response = self.get_resource('author-test-book',\r\n data=dict(author=self.author.pk))\r\n self.assertTrue(response.has_header(\"Link\"))\r\n self.assertEquals(\r\n response[\r\n \"Link\"], '<%s?page=2&author=5>; rel=\"next\"' % self.reverse('author-test-book')) # nolint\r\n # Get objects by links on Link header\r\n response = self.client.get(link_re.findall(response['Link'])[0][0])\r\n\r\n links = link_re.findall(response['Link'])\r\n\r\n self.assertEquals(links[0][0], '%s?page=3&author=5' %\r\n self.reverse('author-test-book'))\r\n self.assertEquals(links[0][1], 'next')\r\n\r\n self.assertEquals(\r\n links[1][0], '%s?author=5' % self.reverse('author-test-book'))\r\n self.assertEquals(links[1][1], 'previous')\r\n\r\n response = self.get_resource(\r\n 'author-test-book', data={\r\n 'author': self.author.pk, 'adr-max': 0\r\n })\r\n self.assertFalse(response.has_header(\"Link\"))\r\n\r\n response = self.get_resource(\r\n 'author-test-book',\r\n data={\r\n 'author': self.author.pk, 'adr-max': 'all'\r\n })\r\n self.assertEquals(response.status_code, 200)\r\n self.assertFalse(response.has_header(\"Link\"))", "def test_tag_links_present(self):\n response = self.client.get(self.get_url(self.study.pk))\n for tagged_trait in self.tagged_traits:\n tag_study_url = reverse(\n 'tags:tag:study:list', kwargs={'pk': tagged_trait.tag.pk, 'pk_study': self.study.pk})\n self.assertIn(tag_study_url, str(response.content))", "def __init__(self, tags):\n self.tags = tags", "def tags():", "def __init__(self, author_id=None, author_name=None, body=None, categories_confident=None, categories_id=None, categories_level=None, categories_taxonomy=None, clusters=None, links_permalink=None, entities_id=None, entities_surface_forms_text=None, entities_links_wikipedia=None, entities_links_wikidata=None, entities_title_surface_forms_text=None, entities_body_surface_forms_text=None, id=None, language=None, media_images_content_length_max=None, media_images_content_length_min=None, media_images_count_max=None, media_images_count_min=None, media_images_format=None, media_images_height_max=None, media_images_height_min=None, media_images_width_max=None, media_images_width_min=None, media_videos_count_max=None, media_videos_count_min=None, sentiment_body_polarity=None, sentiment_title_polarity=None, social_shares_count_facebook_max=None, social_shares_count_facebook_min=None, social_shares_count_reddit_max=None, social_shares_count_reddit_min=None, source_domain=None, source_id=None, source_links_in_count_max=None, source_links_in_count_min=None, source_locations_city=None, source_locations_country=None, source_locations_state=None, source_rankings_alexa_country=None, source_rankings_alexa_rank_max=None, source_rankings_alexa_rank_min=None, source_scopes_city=None, source_scopes_country=None, source_scopes_level=None, source_scopes_state=None, story_url=None, story_language=None, text=None, title=None, translations_en_body=None, translations_en_text=None, translations_en_title=None, entity=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._author_id = None\n self._author_name = None\n self._body = None\n self._categories_confident = None\n self._categories_id = None\n self._categories_level = None\n self._categories_taxonomy = None\n self._clusters = None\n self._links_permalink = None\n self._entities_id = None\n self._entities_surface_forms_text = None\n self._entities_links_wikipedia = None\n self._entities_links_wikidata = None\n self._entities_title_surface_forms_text = None\n self._entities_body_surface_forms_text = None\n self._id = None\n self._language = None\n self._media_images_content_length_max = None\n self._media_images_content_length_min = None\n self._media_images_count_max = None\n self._media_images_count_min = None\n self._media_images_format = None\n self._media_images_height_max = None\n self._media_images_height_min = None\n self._media_images_width_max = None\n self._media_images_width_min = None\n self._media_videos_count_max = None\n self._media_videos_count_min = None\n self._sentiment_body_polarity = None\n self._sentiment_title_polarity = None\n self._social_shares_count_facebook_max = None\n self._social_shares_count_facebook_min = None\n self._social_shares_count_reddit_max = None\n self._social_shares_count_reddit_min = None\n self._source_domain = None\n self._source_id = None\n self._source_links_in_count_max = None\n self._source_links_in_count_min = None\n self._source_locations_city = None\n self._source_locations_country = None\n self._source_locations_state = None\n self._source_rankings_alexa_country = None\n self._source_rankings_alexa_rank_max = None\n self._source_rankings_alexa_rank_min = None\n self._source_scopes_city = None\n self._source_scopes_country = None\n self._source_scopes_level = None\n self._source_scopes_state = None\n self._story_url = None\n self._story_language = None\n self._text = None\n self._title = None\n self._translations_en_body = None\n self._translations_en_text = None\n self._translations_en_title = None\n self._entity = None\n self.discriminator = None\n\n if author_id is not None:\n self.author_id = author_id\n if author_name is not None:\n self.author_name = author_name\n if body is not None:\n self.body = body\n if categories_confident is not None:\n self.categories_confident = categories_confident\n if categories_id is not None:\n self.categories_id = categories_id\n if categories_level is not None:\n self.categories_level = categories_level\n if categories_taxonomy is not None:\n self.categories_taxonomy = categories_taxonomy\n if clusters is not None:\n self.clusters = clusters\n if links_permalink is not None:\n self.links_permalink = links_permalink\n if entities_id is not None:\n self.entities_id = entities_id\n if entities_surface_forms_text is not None:\n self.entities_surface_forms_text = entities_surface_forms_text\n if entities_links_wikipedia is not None:\n self.entities_links_wikipedia = entities_links_wikipedia\n if entities_links_wikidata is not None:\n self.entities_links_wikidata = entities_links_wikidata\n if entities_title_surface_forms_text is not None:\n self.entities_title_surface_forms_text = entities_title_surface_forms_text\n if entities_body_surface_forms_text is not None:\n self.entities_body_surface_forms_text = entities_body_surface_forms_text\n if id is not None:\n self.id = id\n if language is not None:\n self.language = language\n if media_images_content_length_max is not None:\n self.media_images_content_length_max = media_images_content_length_max\n if media_images_content_length_min is not None:\n self.media_images_content_length_min = media_images_content_length_min\n if media_images_count_max is not None:\n self.media_images_count_max = media_images_count_max\n if media_images_count_min is not None:\n self.media_images_count_min = media_images_count_min\n if media_images_format is not None:\n self.media_images_format = media_images_format\n if media_images_height_max is not None:\n self.media_images_height_max = media_images_height_max\n if media_images_height_min is not None:\n self.media_images_height_min = media_images_height_min\n if media_images_width_max is not None:\n self.media_images_width_max = media_images_width_max\n if media_images_width_min is not None:\n self.media_images_width_min = media_images_width_min\n if media_videos_count_max is not None:\n self.media_videos_count_max = media_videos_count_max\n if media_videos_count_min is not None:\n self.media_videos_count_min = media_videos_count_min\n if sentiment_body_polarity is not None:\n self.sentiment_body_polarity = sentiment_body_polarity\n if sentiment_title_polarity is not None:\n self.sentiment_title_polarity = sentiment_title_polarity\n if social_shares_count_facebook_max is not None:\n self.social_shares_count_facebook_max = social_shares_count_facebook_max\n if social_shares_count_facebook_min is not None:\n self.social_shares_count_facebook_min = social_shares_count_facebook_min\n if social_shares_count_reddit_max is not None:\n self.social_shares_count_reddit_max = social_shares_count_reddit_max\n if social_shares_count_reddit_min is not None:\n self.social_shares_count_reddit_min = social_shares_count_reddit_min\n if source_domain is not None:\n self.source_domain = source_domain\n if source_id is not None:\n self.source_id = source_id\n if source_links_in_count_max is not None:\n self.source_links_in_count_max = source_links_in_count_max\n if source_links_in_count_min is not None:\n self.source_links_in_count_min = source_links_in_count_min\n if source_locations_city is not None:\n self.source_locations_city = source_locations_city\n if source_locations_country is not None:\n self.source_locations_country = source_locations_country\n if source_locations_state is not None:\n self.source_locations_state = source_locations_state\n if source_rankings_alexa_country is not None:\n self.source_rankings_alexa_country = source_rankings_alexa_country\n if source_rankings_alexa_rank_max is not None:\n self.source_rankings_alexa_rank_max = source_rankings_alexa_rank_max\n if source_rankings_alexa_rank_min is not None:\n self.source_rankings_alexa_rank_min = source_rankings_alexa_rank_min\n if source_scopes_city is not None:\n self.source_scopes_city = source_scopes_city\n if source_scopes_country is not None:\n self.source_scopes_country = source_scopes_country\n if source_scopes_level is not None:\n self.source_scopes_level = source_scopes_level\n if source_scopes_state is not None:\n self.source_scopes_state = source_scopes_state\n if story_url is not None:\n self.story_url = story_url\n if story_language is not None:\n self.story_language = story_language\n if text is not None:\n self.text = text\n if title is not None:\n self.title = title\n if translations_en_body is not None:\n self.translations_en_body = translations_en_body\n if translations_en_text is not None:\n self.translations_en_text = translations_en_text\n if translations_en_title is not None:\n self.translations_en_title = translations_en_title\n if entity is not None:\n self.entity = entity", "def test_tags_on_article(self):\n self.article.tags.add(self.tag1, self.tag2)\n self.assertEqual('Django', str(self.article.tags.all()[0]))", "def nytArticleSearch(tag):\n nytKey=\"7d9f3b88013d99f9f9ab2a8a82545671:19:73424493\" #new york times article search api key\n url= 'http://api.nytimes.com/svc/search/v2/articlesearch.json?q=%s&fq=type_of_material:(\"News\")&api-key=%s'\n url=url%(tag,nytKey)\n request = urllib2.urlopen(url)\n result = request.read()\n r = json.loads(result)\n return r['response']['docs']", "def get_book():\n soup = Soup(CONTENT)\n dotd = soup.find(id='deal-of-the-day')\n image_base = dotd.find(class_='dotd-main-book-image')\n title_base = dotd.find(class_='dotd-title')\n\n title = title_base.find('h2').text.strip()\n description = title_base.parent.find_all('div')[2].text.strip()\n image = image_base.find('img').attrs['src']\n link = image_base.find('a').attrs['href']\n\n return Book(title, description, image, link)", "def test_tag_hyperlinks(self):\n for h in self.hyperlinks:\n if h['name'] in ['C++', 'Java', 'Python', 'ROS', 'MATLAB']:\n self.assertTrue(\n '.md' in h['url'],\n msg='Hyperlink \"%s\" is wrongly detected as a tag in \"%s\".' % (h['md'], h['file'])\n )", "def search_by_tags(request):\n resultTopics = []\n resultPosts = []\n if request.method == 'POST':\n data = request.data\n print(data)\n search_query = data['query']\n data_tags = list(set(data['tags']))\n print(data_tags)\n tagObjects = []\n if len(data_tags) > 0:\n tagObjects = Tag.objects.filter(hidden_tags__overlap=data_tags) | Tag.objects.filter(reduce(operator.and_, (Q(wikidataID=tag_id) for tag_id in data_tags)))\n for tagObject in tagObjects:\n print(\"LOL\")\n tag_topics = tagObject.topics.all()\n tag_posts = tagObject.posts.all()\n for topic in tag_topics:\n if topic not in resultTopics:\n resultTopics.append(topic)\n for post in tag_posts:\n if post not in resultPosts:\n resultPosts.append(post)\n # for tag in data[\"tags\"]:\n # try:\n # tagObjects = Tag.objects.filter(wikidataID=tag)\n # except Tag.DoesNotExist:\n # continue;\n # for tagObject in tagObjects:\n # tag_topics = tagObject.topics.all()\n # tag_posts = tagObject.posts.all()\n # for topic in tag_topics:\n # if topic not in resultTopics:\n # resultTopics.append(topic)\n # for post in tag_posts:\n # if post not in resultPosts:\n # resultPosts.append(post)\n print(resultTopics);\n print(resultPosts);\n\n query_topics = Topic.objects.filter(name__icontains=search_query)\n query_posts = Post.objects.filter(content__icontains=search_query)\n for topic in query_topics:\n if topic not in resultTopics:\n resultTopics.append(topic)\n for post in query_posts:\n if post not in resultPosts:\n resultPosts.append(post)\n\n all_relations = Relation.objects.all()\n for topic in resultTopics:\n for relation in all_relations:\n if (topic == relation.topic_from) and (relation.topic_to not in resultTopics):\n resultTopics.append(relation.topic_to)\n if (topic == relation.topic_to) and (relation.topic_from not in resultTopics):\n resultTopics.append(relation.topic_from)\n\n TopicSerializer.Meta.depth = 1\n PostNestedSerializer.Meta.depth = 1\n\n topicSerializer = TopicNestedSerializer(resultTopics, many=True)\n #topicSerializer.Meta.depth = 1\n postSerializer = PostNestedSerializer(resultPosts, many=True)\n #postSerializer.Meta.depth = 1\n\n return Response({'topics':topicSerializer.data, 'posts':postSerializer.data})", "def __init__(self, tags=''):\n self.tags = tags", "def referenced_articles(self, url):\n pass", "def build_article(self, ap_id, title, timestamp, author, content, summary, article_url=None):\n if article_url is None:\n article_url = ap_id\n return {\n \"@context\": self.rabble_context(),\n \"type\": \"Article\",\n \"id\": ap_id,\n \"name\": title,\n \"published\": timestamp,\n \"attributedTo\": author,\n \"content\": content,\n \"preview\": {\n \"type\": \"Note\",\n \"name\": \"Summary\",\n \"content\": summary,\n },\n \"url\": article_url,\n }", "def get_books_from_api(request, url='https://www.googleapis.com/books/v1/volumes?q=Hobbit'):\n response = requests.get(url)\n data = response.json()\n items = data.get('items')\n if items is None:\n items = []\n for item in items:\n book = item.get('volumeInfo')\n title = book.get('title', '--')\n authors = book.get('authors', ['unknown'])\n publishedDate = book.get('publishedDate')\n isbns = book.get('industryIdentifiers', [])\n pages = book.get('pageCount')\n cover_url = book.get('imageLinks')\n if cover_url:\n cover_url = cover_url.get('thumbnail')\n language = book.get('language')\n authors_list = []\n for author in authors:\n auth = get_author_object(author)\n authors_list.append(auth)\n isbn_10 = None\n isbn_13 = None\n for isbn in isbns:\n if isbn['type'] == 'ISBN_10':\n isbn_10 = isbn['identifier']\n elif isbn['type'] == 'ISBN_13':\n isbn_13 = isbn['identifier']\n lang = get_language_object(language)\n try:\n published = datetime.strptime(publishedDate, '%Y-%m-%d')\n except ValueError:\n year = int(publishedDate[:4])\n month = None\n day = None\n except TypeError:\n year = None\n month = None\n day = None\n else:\n year = published.year\n month = published.month\n day = published.day\n try:\n book = get_object_or_404(Book, title=title, publishedYear=year, publishedMonth=month, publishedDay=day,\n language=lang, pages=pages, cover=cover_url, isbn_10=isbn_10, isbn_13=isbn_13)\n for name in book.authors.all():\n if name not in authors_list:\n raise Http404\n except Http404:\n book = Book.objects.create(title=title, publishedYear=year, publishedMonth=month, publishedDay=day,\n language=lang, pages=pages, cover=cover_url, isbn_10=isbn_10, isbn_13=isbn_13)\n book.authors.set(authors_list)\n return redirect('all-books')", "def get_tags(self, obj):\n if QuestionTag.objects.filter(question=obj).exists():\n id_tags = QuestionTag.objects.filter(question=obj).values('tag__id')\n tags_obj = Tag.objects.filter(id__in=id_tags)\n return TagSerializer(tags_obj, many=True).data\n else:\n return \"No tags\"", "def run_demo():\n while True:\n embeddings = beer_emb.embed_doc(input(\"Test if words are beer-related: \"),\n word_filter=False)\n for word_vec in embeddings:\n print(is_beer_related(word_vec))", "def test_retrieve_books(self):\n book = sample_book(publisher=self.publisher)\n book.author.add(sample_author())\n # book.publisher.add(sample_publisher())\n\n res = self.client.get(reverse('books'))\n books = Book.objects.all()\n serializer = BookSerializer(books, many=True)\n self.assertEqual(res.data, serializer.data)\n self.assertEqual(res.status_code, status.HTTP_200_OK)", "def is_tagged(self,tag_name,element):\n return (tag_name in self.tag2elements.keys()) and (element in self.tag2elements[tag_name])", "def get_book_details(book_id, key):\n\n # call goodreads search method with book id here\n payload = {\"key\": key}\n\n query = requests.get(\"https://www.goodreads.com/book/show/{}.json\".format(book_id), params=payload)\n # parse response to get data needed to create a book object\n\n doc = untangle.parse(query.content)\n book_data = doc.GoodreadsResponse.book\n book = {}\n\n # create dictionary of book object data, subdictionary of edition data\n\n # book info\n #==========\n book[\"title\"] = book_data.title.cdata.encode(\"utf8\")\n book[\"author_name\"], book[\"author_gr_id\"] = get_author_data(book_data.authors)\n book['work_id'] = int(book_data.work.id.cdata.encode('utf8'))\n book[\"description\"] = book_data.description.cdata\n\n # edition info\n #=============\n book[\"edition\"] = {}\n book[\"edition\"][\"isbn\"] = valid_isbn(book_data.isbn.cdata.encode(\"utf8\"))\n book[\"edition\"][\"format_id\"] = get_format_id(book_data.format.cdata.encode(\"utf8\"))\n book[\"edition\"][\"pic_url\"] = book_data.image_url.cdata.encode(\"utf8\")\n book[\"edition\"][\"publisher\"] = book_data.publisher.cdata.encode(\"utf8\")\n book[\"edition\"][\"num_pages\"] = valid_page_count(book_data.num_pages.cdata.encode(\"utf8\"))\n year = date_is_valid(book_data.work.original_publication_year.cdata.encode(\"utf8\"))\n month = date_is_valid(book_data.work.original_publication_month.cdata.encode(\"utf8\"))\n day = date_is_valid(book_data.work.original_publication_day.cdata.encode(\"utf8\"))\n book[\"edition\"][\"date\"] = datetime.date(year, month, day)\n book[\"edition\"][\"gr_url\"] = book_data.url.cdata.encode(\"utf8\")\n book[\"edition\"][\"gr_id\"] = int(book_data.id.cdata.encode(\"utf8\"))\n\n return book", "def IsDocumentRelated(self, *args, **kwargs):\n pass", "def test_tagged_feed_link(self):\n TagFactory(name=\"green\", slug=\"green\")\n url = urlparams(reverse(\"questions.list\", args=[\"all\"]), tagged=\"green\")\n response = self.client.get(url)\n self.assertEqual(200, response.status_code)\n doc = pq(response.content)\n feed_links = doc('link[type=\"application/atom+xml\"]')\n self.assertEqual(2, len(feed_links))\n self.assertEqual(\"Recently updated questions\", feed_links[0].attrib[\"title\"])\n self.assertEqual(\"/en-US/questions/feed?product=all\", feed_links[0].attrib[\"href\"])\n self.assertEqual(\"Recently updated questions tagged green\", feed_links[1].attrib[\"title\"])\n self.assertEqual(\"/en-US/questions/tagged/green/feed\", feed_links[1].attrib[\"href\"])", "def _getArticleContet(self,encodedTag):\r\n xmlEncodedTag = BeautifulSoup(encodedTag.string,\"lxml\")#encoded tag actually has a format of an XML\r\n articleContent = []\r\n for element in xmlEncodedTag.body.contents:\r\n if _getTextElement(element):\r\n articleContent.append(unidecode.unidecode(element.get_text()))\r\n if self._isEndOfArticleCommerical(element):\r\n continue\r\n wordPhraseToRefLink = {a.get_text().strip().lower():a.attrs['href'] for a in xmlEncodedTag.find_all(\"a\")}\r\n return articleContent,wordPhraseToRefLink", "def build_book(self, embed_images = True, embed_styles = True, remove_scripts = True, add_navbar_js = True):\n \n chapter_list = self.get_chapter_list()\n \n for li in chapter_list:\n page = self.get_page(li)\n self.add_page_to(page.page_content, self.book_content)\n \n self.update_links()\n \n if embed_styles:\n self.embed_styles()\n \n if remove_scripts:\n self.remove_scripts()\n \n if embed_images:\n self.embed_images()\n \n if add_navbar_js:\n self.add_navbar_js()\n \n self.remove_html_widgets()\n self.remove_next_page_button()", "def add_library(self):\n library = self.new_section('The Library')\n books = self.wiki('the-library')._soup(class_='boxbook')\n template = (\n '<div class=\"book-title\">{}</div>'\n '<div class=\"book-description\">{}</div>')\n for b in books:\n title = b.find(class_='booktitle').string\n description = b.find(class_='boxleft')('div')[0].text.strip()\n excerpts = [self.wiki.site + a['href']\n for a in b.find(class_='boxright')('a')]\n if title == 'The Journal of Aframos Longjourney':\n links = self.wiki(excerpts[1])._soup.select('#page-content a')\n links = [\n 'http://wanderers-library.wikidot.com/' +\n l['href'].split('/')[-1] for l in links]\n excerpts = [excerpts[0]] + links\n book = self.add_page(\n title, template.format(title, description), library)\n for url in excerpts:\n self.add_url(url, book)", "def create_book_objects(content):\n library = []\n for book in content:\n library.append(Book(book['Author'], book['Title'], book['Publisher'], book['Shelf'], book['Category'],\n book['Subject']))\n print('Your Library has been loaded.')\n return library", "def test_filter_recipe_by_tags(self):\n recipe1 = sample_recipe(user=self.user,title='chicken curry')\n recipe2 = sample_recipe(user=self.user,title='mutton curry')\n recipe3 = sample_recipe(user=self.user,title='milk dish')\n tag1 = sample_tag(user=self.user,name='chicken')\n tag2 = sample_tag(user=self.user,name='mutton')\n recipe1.tag.add(tag1)\n recipe2.tag.add(tag2)\n\n res = self.client.get(RECIPE_URL,{'tag':f'{tag1.id},{tag2.id}'})\n s1 = RecipeSerializer(recipe1)\n s2 = RecipeSerializer(recipe2)\n s3 = RecipeSerializer(recipe3)\n self.assertIn(s1.data,res.data)\n self.assertIn(s2.data,res.data)\n self.assertNotIn(s3.data,res.data)", "def related(self, identifier_type, identifier, novelist_api=None,\n feed_class=AcquisitionFeed):\n\n library = flask.request.library\n work = self.load_work(library, identifier_type, identifier)\n if isinstance(work, ProblemDetail):\n return work\n\n search_engine = self.search_engine\n if isinstance(search_engine, ProblemDetail):\n return search_engine\n\n try:\n lane_name = \"Books Related to %s by %s\" % (\n work.title, work.author\n )\n lane = RelatedBooksLane(\n library, work, lane_name, novelist_api=novelist_api\n )\n except ValueError as e:\n # No related books were found.\n return NO_SUCH_LANE.detailed(str(e))\n\n facets = self.manager.load_facets_from_request(\n worklist=lane, base_class=FeaturedFacets,\n base_class_constructor_kwargs=dict(\n minimum_featured_quality=library.minimum_featured_quality\n )\n )\n if isinstance(facets, ProblemDetail):\n return facets\n\n annotator = self.manager.annotator(lane)\n url = annotator.feed_url(\n lane,\n facets=facets,\n )\n\n return feed_class.groups(\n _db=self._db, title=lane.DISPLAY_NAME,\n url=url, worklist=lane, annotator=annotator,\n facets=facets, search_engine=search_engine\n )", "def render_search_book(title, authors, rating, description, img, isbn):\n stars = []\n print('rating', rating)\n for i in range(0, round(rating)):\n stars.append('★')\n\n\n book_html = html.Div(\n className='card',\n children=[\n html.Div(\n className='card-horizontal',\n children=[\n html.Div(\n className='img-square-wrapper-full',\n children=[\n html.Img(\n src=img,\n style={'height': 180, 'width': 140, 'padding': '1rem'},\n className=''\n )\n ]\n ),\n html.Div(\n className='card-body',\n children=[\n html.Div(title, className='card-title'),\n html.Div(', '.join(authors), className='card-subtitle mb-2 text-muted'),\n html.Div(''.join(stars), className='rating-stars'),\n html.Div(\n className='description-container',\n children=[\n html.H4('Description'),\n html.P('{}...'.format(' '.join(description.split(' ')[:33]))),\n ]),\n html.Button('Add to Library', className='add-button', id={'isbn': isbn}),\n\n ]\n ),\n ]\n ),\n ]\n )\n\n return book_html", "def treat_page(self):\n # let's define some basic variables\n urtext = self.current_page.text\n urlang = self.current_page.site.code\n urtitle = self.current_page.title()\n urcat = []\n eng_site = pywikibot.Site('en')\n eng_title = ''\n \n interDict = {}\n try:\n site = pywikibot.Site('ur', 'wikipedia')\n urpage = pywikibot.Page(site, urtitle)\n langlst = urpage.iterlanglinks()\n\n \n for i in langlst:\n lang = str(i.site).split(':')[1]\n interDict[lang] = i.title\n \n eng_title = interDict['en']\n except:\n pywikibot.output(u'\\03{lightred}Unable to fetch interwiki links!\\03{default}')\n return False\n \n site = pywikibot.Site('en', 'wikipedia')\n enpage = pywikibot.Page(site, eng_title)\n\n wikitext = enpage.get() \n wikicode = mwp.parse(wikitext)\n\n # Extracting sfn templates and converting them in REF tags\n sfnlist = []\n for template in wikicode.filter_templates():\n if template.name in ('sfn', 'sfn'):\n sfnlist.append(template)\n templ_rep = '<ref>' + str(template) + '</ref>'\n wikicode.replace(template , templ_rep)\n\n alltags = wikicode.filter_tags() \n reftags = {}\n \n def search(myDict, search1):\n for key, value in myDict.items():\n if search1 in value: \n return key \n \n i=1\n for tag in alltags:\n if tag.tag=='ref':\n if tag.attributes == []: # check if attributes list is empty\n refval='NoRefName' # Reference has no name so assigning \"NoRefName\"\n else:\n name = tag.attributes[0]\n refval = name.value\n \n if tag.contents is None:\n #conval = search(reftags,refval)\n #reftags[i] = (refval,reftags[conval][1])\n pass\n else: \n reftags[i] = (refval,tag.contents)\n i += 1\n\n dlinks = {}\n for k,v in reftags.items():\n dkey = 'و' + str(k) + 'و'\n dlinks[dkey] = '<ref>' + str(v[1]) + '</ref>'\n\n urtext = urpage.text\n for r in tuple(dlinks.items()):\n urtext = urtext.replace(*r)\n\n # newln = '\\n'\n # Using noreferences to add Reference template if not present\n self.norefbot = noreferences.NoReferencesBot(None)\n if self.norefbot.lacksReferences(urtext):\n urtext = self.norefbot.addReferences(urtext)\n else:\n urpage.text = urtext + '\\n'\n\n print(urpage.text)\n \n # save the page \n urpage.save(summary=self.summary, minor=False)\n #self.put_current(urpage.text, summary=self.summary)", "def trackEmbedding(self, tag):\n self.embeddedTags.append(tag)", "def test_basic_av_by_tag(self):\n doc1 = Document.objects.create_document(\n title=\"doc1\",\n user=self.testcase_user,\n page_count=2,\n file_name=\"koko.pdf\",\n size='1111',\n lang='ENG',\n )\n doc2 = Document.objects.create_document(\n title=\"doc2\",\n user=self.testcase_user,\n page_count=2,\n file_name=\"kuku.pdf\",\n size='1111',\n lang='ENG',\n )\n doc1.tags.add(\n \"green\",\n \"blue\",\n tag_kwargs={'user': self.testcase_user}\n )\n doc2.tags.add(\n \"blue\",\n tag_kwargs={'user': self.testcase_user}\n )\n\n ret = self.client.get(\n reverse('admin:search'), {'tag': 'green'}\n )\n self.assertEqual(\n ret.status_code,\n 200\n )\n self.assertEqual(\n len(ret.context['results_docs']),\n 1\n )\n doc_ = ret.context['results_docs'][0]\n\n self.assertEqual(\n doc_.id,\n doc1.id\n )", "def test_suggested_tags(self):\r\n # login into bookie\r\n user_data = {'login': u'admin',\r\n 'password': u'admin',\r\n 'form.submitted': u'true'}\r\n res = self.testapp.post('/login',\r\n params=user_data)\r\n # Add a bookmark\r\n res = DBSession.execute(\r\n \"SELECT api_key FROM users WHERE username = 'admin'\").fetchone()\r\n key = res['api_key']\r\n url = u'http://testing_tags.com'\r\n # set the readable content for the bookmark\r\n path = os.getcwd()+\"/bookie/tests/test_models/tag_test.txt\"\r\n content = open(path, 'r').read()\r\n test_bmark = {\r\n 'url': url,\r\n 'description': u'Bookie',\r\n 'extended': u'',\r\n 'tags': u'',\r\n 'api_key': key,\r\n 'content': content,\r\n }\r\n res = self.testapp.post('/api/v1/admin/bmark',\r\n params=test_bmark,\r\n status=200)\r\n\r\n bmark = BmarkMgr.get_by_url(url)\r\n hash_id = bmark.hash_id\r\n tags_expected = ['network', 'new', 'simulator', 'user']\r\n edit_bmark = {\r\n 'hash_id': hash_id,\r\n 'username': 'admin',\r\n 'url': url\r\n }\r\n hash_id = str(hash_id)\r\n res = self.testapp.post('/admin/edit/' + hash_id,\r\n params=edit_bmark,\r\n status=200)\r\n # pure numbers are eliminated\r\n self.assertNotIn('2014', res.body)\r\n # tags with length less than 3 are omitted\r\n self.assertNotIn('NS', res.body)\r\n # all tags are lower cased\r\n self.assertNotIn('NEW', res.body)\r\n for tag in tags_expected:\r\n self.assertIn(tag, res.body)", "def create_book():\n Book.objects.create(book_id=\"test_id\",\n title=\"test_title\",\n authors=\"test_author\",\n published_date=\"2021\",\n categories=[\"test_category\"],\n average_rating=5,\n ratings_count=5,\n thumbnail=\"http://books.google.com/books/test\"\n )", "def detail(request,book_id):\n\n #prodive all informations of book by GoogleBooksApi function\n if book_id is not None:\n book = Response.response_front(book_id)\n \n context = {\n \"title\":Response.build(book['title'][0]),\n \"desc\": Response.build(book['description'][0]),\n \"picture_detail\":book['picture_detail'][0],\n \"book_id\":book_id,\n \"book_cat\":book['categorie'][0],\n \"book_author\":(book['author'][0]),\n }\n return render(\n request,\n \"book.html\",context)", "def book(self):\n self.client.get(f\"{host}/book/{COMPETITION}/{CLUB}\")", "def can_tag(self):\n try:\n self.cork.require(role='beta-archivist')\n return True\n except Exception:\n return False", "def add_book(name, author):\n BOOKS.append({'name': name, 'author': author, 'read': False})", "def test_get_publication(self):\n pass", "def give_book(self):\n pass", "def create_article(container, title, subject=[]):\n\n article = api.content.create(\n container=container,\n type='tribuna.content.article',\n title=title,\n )\n api.content.transition(obj=article, transition='publish')\n adapter = ITags(article)\n adapter.tags_new = subject\n article.reindexObject()\n\n return adapter", "def tags(self):\n raise BookInfoNotImplementedError('tags', self.__class__.__name__)", "def supports_book_search(self):\n return False", "def test_tags(question):\n assert \"tags\" in question[\"instance\"]\n tags = set(question[\"instance\"][\"tags\"])\n # there should be at least one tag\n assert len(tags) >= 1\n # each tags should be in VALID_TAGS\n assert len(tags - VALID_TAGS) == 0\n # there should be exactly one category-defining tag\n assert len(tags.intersection(CATEGORY_TAGS)) == 1", "def get_book_details(self):\n\n try:\n # gives response for the request from the API url\n response = requests.get(self.book_url)\n\n \n # using ElementTree to store the response content in a tree\n root = ET.fromstring(response.content)\n book = root.find('book')\n\n # getting the required details\n self.book_details[\"title\"] = book.find('title').text\n self.book_details[\"average_rating\"] = book.find('average_rating').text\n self.book_details[\"ratings_count\"] = book.find('ratings_count').text\n self.book_details[\"num_pages\"] = book.find('num_pages').text\n self.book_details[\"image_url\"] = book.find('image_url').text\n self.book_details[\"publication_year\"] = book.find('publication_year').text\n\n # getting list of all the authors\n authors = book.find('authors')\n if authors:\n author_names_list = []\n for author in authors.iter('author'):\n author_names_list.append(author.find('name').text)\n author_names_sentence = \", \".join(author_names_list)\n self.book_details[\"authors\"] = author_names_sentence\n except:\n raise Exception(\"invalid XML response\")", "def get_books_data():\n entry = mongo.db.Books\n output = list()\n look_up_type = None\n if 'title' in request.args:\n look_up_type = 'title'\n if len(request.args['title']) <= 2:\n return render_template('error.html', message=\"Must enter characters\"), 400\n value = request.args['title'].strip('\"')\n title = entry.find({'title': {'$regex': value}})\n if title:\n for book in title:\n output.append({'title': book['title']})\n elif 'related_books' in request.args:\n look_up_type = 'similar_books'\n if len(request.args['related_books']) <= 2:\n return render_template('error.html', message=\"Must enter characters\"), 400\n value = request.args['related_books'].strip('\"')\n related_books = entry.find(\n {'similar_books': {'$regex': value}})\n if related_books:\n for related in related_books:\n for link in related['similar_books']:\n if value in link:\n output.append(({'similar_books': link}))\n elif 'author' in request.args:\n look_up_type = 'author'\n if len(request.args['author']) <= 2:\n return render_template('error.html', message=\"Must enter characters\"), 400\n value = request.args['author'].strip('\"')\n authors = entry.find({'author': {'$regex': value}})\n if authors:\n for name in authors:\n output.append({'author': name['author']})\n if len(output) == 0:\n return render_template('error.html', message=\"No Entries Found\"), 400\n return render_template('gottenBooks.html', output=output, look_up_type=look_up_type), 200", "def getDiscussionLinks(self, json_info, tag_filter=[]):\n discussion_links = []\n for t in json_info['document']['data']:\n if(t['type'] == 'discussions'):\n id = (t['id'])\n slug = t['attributes']['slug']\n tags = []\n for tag in t['relationships']['tags']['data']:\n tags.append(int(tag['id']))\n \n if(len(tag_filter) == 0 or len(list(set(tag_filter) & set(tags))) > 0):\n discussion_links.append(\"https://fbtag.net/d/{id}-{slug}\".format(id=id, slug=slug))\n else:\n logging.debug(msg=(tags, 'not in filter ', tag_filter, 'link', id, slug))\n pass\n \n return discussion_links", "def test_filter_recipe_by_tag(self):\n tag1 = sample_tag(self.user, name='Indian')\n tag2 = sample_tag(self.user, name='Breakfast')\n recipe1 = sample_recipe(self.user, title='Curry')\n recipe2 = sample_recipe(self.user, title=\"bacon pie\")\n recipe1.tags.add(tag1)\n recipe2.tags.add(tag2)\n recipe3 = sample_recipe(self.user)\n\n res = self.client.get(\n RECIPE_URL,\n {'tags': f'{tag1.id},{tag2.id}'}\n )\n serializer1 = RecipeSerializer(recipe1)\n serializer2 = RecipeSerializer(recipe2)\n serializer3 = RecipeSerializer(recipe3)\n self.assertIn(serializer1.data, res.data)\n self.assertIn(serializer2.data, res.data)\n self.assertNotIn(serializer3.data, res.data)", "def check_link_tag(self):\r\n node = self.article.raw_doc\r\n meta = self.parser.getElementsByTag(node, tag='link', attr='rel', value='image_src')\r\n for item in meta:\r\n src = self.parser.getAttribute(item, attr='href')\r\n if src:\r\n return self.get_image(item, src, extraction_type='linktag')\r\n return None", "def articles(self):\r\n return articles.Articles(self)", "def _get_good_request_wo_tags(self):\r\n prms = {\r\n 'url': u'http://bmark.us',\r\n 'description': u'This is my bmark desc',\r\n 'extended': u'And some extended notes about it in full form',\r\n 'tags': u'',\r\n }\r\n\r\n req_params = urllib.urlencode(prms)\r\n res = self.app.post(\r\n '/api/v1/admin/bmark?api_key={0}'.format(self.api_key),\r\n params=req_params,\r\n )\r\n return res", "def test_hypermedia_custom_resource():\n data = {\n 'name': 'Wort wort',\n 'slug': 'sluggy',\n 'not_valid': 'nooo',\n 'author': 'http://dev/api/authors/1'\n }\n instance = HypermediaBlogsResource(**data)\n assert hasattr(instance, 'get_authors')", "def view_blog(self):", "def test_model_returns_readable_representation(self):\n\n response = Tag.objects.create(\n tag = ['django-rest', 'django']\n )\n self.assertIn('django-rest', str(response))", "def test_model_can_create_a_taglist(self):\n\n response = Tag.objects.create(\n tag = ['django', 'Django-rest']\n )\n self.assertTrue(isinstance(response, Tag))", "def test_basic_av_by_tags_op_any(self):\n doc1 = Document.objects.create_document(\n title=\"doc1\",\n user=self.testcase_user,\n page_count=2,\n file_name=\"koko.pdf\",\n size='1111',\n lang='ENG',\n )\n doc2 = Document.objects.create_document(\n title=\"doc2\",\n user=self.testcase_user,\n page_count=2,\n file_name=\"kuku.pdf\",\n size='1111',\n lang='ENG',\n )\n doc3 = Document.objects.create_document(\n title=\"doc3\",\n user=self.testcase_user,\n page_count=2,\n file_name=\"momo.pdf\",\n size='1111',\n lang='ENG',\n )\n doc1.tags.add(\n \"red\",\n tag_kwargs={'user': self.testcase_user}\n )\n doc2.tags.add(\n \"green\",\n tag_kwargs={'user': self.testcase_user}\n )\n doc3.tags.add(\n \"blue\",\n tag_kwargs={'user': self.testcase_user}\n )\n\n base_url = reverse('admin:search')\n args = \"tag=red&tag=green&tags_op=any\"\n url = f\"{base_url}?{args}\"\n\n ret = self.client.get(url)\n\n self.assertEqual(\n ret.status_code,\n 200\n )\n self.assertEqual(\n len(ret.context['results_docs']),\n 2\n )\n result_ids = set(\n [doc_.id for doc_ in ret.context['results_docs']]\n )\n self.assertEqual(\n result_ids,\n set([doc1.id, doc2.id])\n )", "async def dict(self, ctx, *keywords):\n\n if not keywords:\n embed = discord.Embed(title='{}:'.format(ctx.message.author.name),\n description='Did you tried `{}help dict` yet?'.format(self.config['prefix']),\n colour=0xf20006)\n a = await self.bot.say(embed=embed)\n await self.bot.add_reaction(a, self.emojiUnicode['error'])\n return\n if keywords:\n old_keyword = \" \".join(keywords)\n try:\n keywords = \"%20\".join(keywords)\n url = 'http://api.urbandictionary.com/v0/define?term={}'.format(keywords)\n async with aiohttp.ClientSession() as session:\n async with session.get(url) as response:\n source = await response.json(encoding='utf8')\n\n source = json.dumps(source, indent=2)\n result = json.loads(str(source))\n embed = discord.Embed(title='{}:'.format(ctx.message.author.name),\n description='Your search tag was:\\n***`{}`***'.format(old_keyword),\n colour=0xf20006)\n embed.add_field(name='Word:', value='`{}`'.format(result['list'][0]['word']), inline=False)\n embed.add_field(name='Definition:', value='```{}```'.format(result['list'][0]['definition']), inline=False)\n embed.add_field(name='example:', value='```{}```'.format(result['list'][0]['example']), inline=True)\n embed.add_field(name='Author:', value='`{}`'.format(result['list'][0]['author']), inline=False)\n embed.add_field(name='Link:', value='{}'.format(result['list'][0]['permalink']), inline=False)\n embed.add_field(name='Likes:', value='\\U0001f44d `{}`'.format(result['list'][0]['thumbs_up']),\n inline=True)\n embed.add_field(name='Dislikes:', value='\\U0001f44e `{}`'.format(result['list'][0]['thumbs_down']),\n inline=True)\n\n\n a = await self.bot.say(embed=embed)\n await self.bot.add_reaction(a, self.emojiUnicode['succes'])\n except Exception as e:\n embed = discord.Embed(title='{}:'.format(ctx.message.author.name),\n description='Your search tag was:\\n***`{}`***\\n\\nNothing found :sailboat:'.format(old_keyword, self.config['prefix']),\n colour=0xf20006)\n a = await self.bot.say(embed=embed)\n await self.bot.add_reaction(a, self.emojiUnicode['warning'])", "def supports_book_lookup(self):\n return False", "def __init__(self, parsed_books: list):\n self.content = parsed_books", "def tagger():", "def _embed(slug):\n context = get_factcheck_context();\n context['slug'] = slug\n contents = context['contents']\n annotations = [post for post in contents if post['type'] == 'annotation' and post['published'] == 'yes']\n filtered = [post for post in annotations if post['slug'] == slug]\n filtered = filtered[0]\n context['filtered'] = filtered\n\n index = contents.index(filtered)\n paragraphs = int(filtered.get('prior', 1))\n start = index - paragraphs;\n prior = contents[start:index]\n context['prior'] = prior\n return make_response(render_template('embed.html', **context))", "def create_books_data():\n pass", "def test_interesting_papers_list(self):\n \n test_response = self.client.get('/papers/interesting')\n self.assertEqual(test_response.status_code, 200)\n self.assertTrue('publication_list' in test_response.context) \n self.assertTemplateUsed(test_response, 'paper-list.html')\n self.assertTemplateUsed(test_response, 'base.html')\n self.assertTemplateUsed(test_response, 'paper-detail-snippet.html') \n self.assertEqual(test_response.context['publication_list'][0].pk, 2)\n self.assertEqual(test_response.context['publication_list'][0].title, u\"THE RELATION OF ADENOSINE-3', 5'-PHOSPHATE AND PHOSPHORYLASE TO THE ACTIONS OF CATECHOLAMINES AND OTHER HORMONES.\")", "def related_for_model(self, tags, model, counts=False, min_count=None, extra=None):\n if min_count is not None: counts = True\n tags = self.model.get_tag_list(tags)\n tag_count = len(tags)\n tagged_item_table = qn(self.intermediary_table_model._meta.db_table)\n tag_columns = self._get_tag_columns()\n \n if extra is None: extra = {}\n extra_where = ''\n if 'where' in extra:\n extra_where = 'AND ' + ' AND '.join(extra['where'])\n \n # Temporary table in this query is a hack to prevent MySQL from executing\n # inner query as dependant query (which could result in severe performance loss)\n query = \"\"\"\n SELECT %(tag_columns)s%(count_sql)s\n FROM %(tagged_item)s INNER JOIN %(tag)s ON %(tagged_item)s.tag_id = %(tag)s.id\n WHERE %(tagged_item)s.content_type_id = %(content_type_id)s\n AND %(tagged_item)s.object_id IN\n (\n SELECT temporary.object_id \n FROM (\n SELECT %(tagged_item)s.object_id\n FROM %(tagged_item)s, %(tag)s\n WHERE %(tagged_item)s.content_type_id = %(content_type_id)s\n AND %(tag)s.id = %(tagged_item)s.tag_id\n AND %(tag)s.id IN (%(tag_id_placeholders)s)\n GROUP BY %(tagged_item)s.object_id\n HAVING COUNT(%(tagged_item)s.object_id) = %(tag_count)s\n ) AS temporary\n )\n %(extra_where)s\n GROUP BY %(tag_columns)s\n %(min_count_sql)s\n ORDER BY %(tag)s.%(ordering)s ASC\"\"\" % {\n 'tag': qn(self.model._meta.db_table),\n 'ordering': ', '.join(qn(field) for field in self.model._meta.ordering),\n 'tag_columns': tag_columns,\n 'count_sql': counts and ', COUNT(%s.object_id)' % tagged_item_table or '',\n 'tagged_item': tagged_item_table,\n 'content_type_id': ContentType.objects.get_for_model(model).pk,\n 'tag_id_placeholders': ','.join(['%s'] * tag_count),\n 'extra_where': extra_where,\n 'tag_count': tag_count,\n 'min_count_sql': min_count is not None and ('HAVING COUNT(%s.object_id) >= %%s' % tagged_item_table) or '',\n }\n\n params = [tag.pk for tag in tags] * 2\n if min_count is not None:\n params.append(min_count)\n\n cursor = connection.cursor()\n cursor.execute(query, params)\n related = []\n for row in cursor.fetchall():\n tag = self.model(*row[:len(self.model._meta.fields)])\n if counts is True:\n tag.count = row[len(self.model._meta.fields)]\n related.append(tag)\n return related", "def articles(self):\r\n return Articles(self)", "def canDo_article(self, artMeta):\n return False", "def create_note(self, text, tag_list):\n self.note.note_text = text\n self.note.save()\n\n for tag in tag_list:\n db_tags = Tags.objects.all() \n found = False\n\n for t in db_tags:\n if t.tag_text == tag:\n t.notes.add(self.note)\n found = True\n\n if found == False: \n new_tag = Tags() \n new_tag.tag_text = tag\n new_tag.save()\n new_tag.notes.add(self.note)\n new_tag.save()", "def load(self, request, item, linked_item, extra):\n\t\textra['buttons_update'] = True\n\t\treturn {\n\t\t\t'subject' : item.description,\n\t\t\t'tags' : item.tags,\n\t\t\t'text' : linked_item.text,\n\t\t}", "def deep_entities(model = 'bahdanau'):\n assert isinstance(model, str), 'model must be a string'\n model = model.lower()\n if model in ['concat', 'bahdanau', 'luong']:\n if not os.path.isfile(PATH_ENTITIES[model]['model']):\n print('downloading ENTITIES frozen %s model' % (model))\n download_file(\n S3_PATH_ENTITIES[model]['model'], PATH_ENTITIES[model]['model']\n )\n if not os.path.isfile(PATH_ENTITIES[model]['setting']):\n print('downloading ENTITIES %s dictionary' % (model))\n download_file(\n S3_PATH_ENTITIES[model]['setting'],\n PATH_ENTITIES[model]['setting'],\n )\n with open(PATH_ENTITIES[model]['setting'], 'r') as fopen:\n nodes = json.loads(fopen.read())\n g = load_graph(PATH_ENTITIES[model]['model'])\n return TAGGING(\n g.get_tensor_by_name('import/Placeholder:0'),\n g.get_tensor_by_name('import/Placeholder_1:0'),\n g.get_tensor_by_name('import/logits:0'),\n nodes,\n tf.InteractiveSession(graph = g),\n )\n\n else:\n raise Exception(\n 'model not supported, please check supported models from malaya.get_available_entities_models()'\n )", "def test_default_embedder(self, resource_loader):\n config = {\n \"model_type\": \"tagger\",\n \"example_type\": ENTITY_EXAMPLE_TYPE,\n \"label_type\": ENTITIES_LABEL_TYPE,\n \"model_settings\": {\"classifier_type\": \"embedder\"},\n \"params\": {\"emb_dim\": 5},\n }\n model = ModelFactory.create_model_from_config(ModelConfig(**config))\n examples = self.labeled_data.queries()\n labels = self.labeled_data.entities()\n model.initialize_resources(resource_loader, examples, labels)\n model.fit(examples, labels)\n model_predictions_assertions(model)\n\n config = {**config, \"params\": {**config[\"params\"], \"use_crf_layer\": False}}\n model = ModelFactory.create_model_from_config(ModelConfig(**config))\n model.initialize_resources(resource_loader, examples, labels)\n model.fit(examples, labels)\n model_predictions_assertions(model)", "def __init__(self, tag):\n self.tag = tag", "def embed(documents, ctx_encoder, ctx_tokenizer, device):\n input_ids = ctx_tokenizer(\n documents[\"title\"],\n documents[\"text\"],\n truncation=True,\n padding=\"longest\",\n return_tensors=\"pt\",\n )[\"input_ids\"]\n embeddings = ctx_encoder(\n input_ids.to(device=device), return_dict=True\n ).pooler_output\n return {\"embeddings\": embeddings.detach().cpu().numpy()}", "def conjecture_embedding(self, conjectures):\n raise NotImplementedError('Use a derived model')", "def test_artwork(self):\n # Create some art with a known tag\n user = sim.sim_user()\n artwork = sim.sim_artwork(user=user)\n tag = sim.sim_tag()\n artwork.tag_objs.append(tag)\n model.session.flush()\n\n # Ensure it shows in the tag's gallery\n res = self.app.get(self.url('tags.artwork', tag=tag))\n assert artwork.title in res", "def create_book(self, title, isbn):\n # new_book = Book(title, isbn)\n # return new_book\n return Book(title, isbn)", "def test_hypermedia_custom_resource_non_registered_urls():\n data = {\n 'name': 'Wort wort',\n 'slug': 'sluggy',\n 'not_valid': 'nooo',\n # This should not appear!\n 'author': 'http://dev/api/foobar/1'\n }\n instance = HypermediaBlogsResource(**data)\n assert not hasattr(instance, 'get_authors')", "def newspaper_article(source, article, keywords=[]):\n\n src = None\n try:\n src = Source.objects.get(name=source)\n except Source.DoesNotExist:\n #This is jank but can be touched up manually\n src = Source(name=source, url=article['url'])\n src.save()\n print 'source added to db with name: ' + source\n \n #unpacks article into article constructor\n try: \n art = Article(source=src, **article)\n art.save()\n make_keywords(art, keywords)\n except IntegrityError:\n print 'not unique headline for ' + article['headline'] + ' skipping.'", "def fetch_paper_books():\r\n ct = datetime.datetime.now()\r\n print('Ultima stampa di paper: %s' % ct)\r\n books = Book.objects.all()\r\n for language in settings.LANGUAGES:\r\n activate(language[0])\r\n lan = get_language()\r\n print('Lingua corrente: %s' % lan)\r\n for book in books:\r\n if book.paper_id:\r\n print('Elaboro {}'.format(book.title_it))\r\n paper_soup = render_dropbox_paper_soup(book.paper_id)\r\n filepath = settings.BASE_DIR / 'templates/ebooks/partials/book_paper_{}_{}.html'.format(book.id, language[0]) \r\n with open(filepath, 'w', encoding='utf-8') as f:\r\n print('Stampo %s' % filepath)\r\n f.write(str(paper_soup))\r\n else:\r\n pass\r\n return 1", "def test_find_empty_embedded(self):\n\n class User(EmbeddedDocument):\n name = StringField()\n\n class BlogPost(Document):\n content = StringField()\n author = EmbeddedDocumentField(User)\n\n BlogPost.drop_collection()\n\n BlogPost.objects.create(content=\"Anonymous post...\")\n\n result = BlogPost.objects.get(author=None)\n assert result.author is None", "def make_pub_rdf(pub):\n properties = {'title':'rdfs:label',\n 'volume':'bibo:volume',\n 'issue':'bibo:number',\n 'pmid':'bibo:pmid',\n 'doi':'bibo:doi',\n 'page_start':'bibo:pageStart',\n 'page_end':'bibo:pageEnd',\n 'date_harvested':'ufVivo:dateHarvested',\n 'harvested_by':'ufVivo:harvestedBy'}\n resources = {'journal_uri':'vivo:hasPublicationVenue',\n 'date_uri':'vivo:dateTimeValue'}\n ardf = \"\"\n pub_uri = pub['pub_uri']\n add = assert_resource_property(pub_uri, \"rdf:type\", \n untag_predicate(\"owl:Thing\"))\n ardf = ardf + add\n add = assert_resource_property(pub_uri, \"rdf:type\",\n untag_predicate(\"bibo:AcademicArticle\"))\n ardf = ardf + add\n\n for property in sorted(properties.keys()):\n if property in pub:\n add = assert_data_property(pub_uri,\n properties[property],\n pub[property])\n ardf = ardf + add\n for resource in sorted(resources.keys()):\n if resource in pub:\n add = assert_resource_property(pub_uri,\n resources[resource],\n pub[resource])\n ardf = ardf + add\n\n for authorship_uri in pub['authorship_uris']:\n add = assert_resource_property(pub_uri,\n \"vivo:informationResourceInAuthorship\", authorship_uri)\n ardf = ardf + add\n \n return [ardf, pub_uri]", "def filter_publication(publication, cmp_authors=True):\n query = None\n isbn_query = False\n\n # there can be ISBN query or book title query\n if publication.optionals and publication.optionals.ISBN:\n query = aleph.ISBNQuery(publication.optionals.ISBN)\n isbn_query = True\n else:\n query = aleph.TitleQuery(publication.title)\n\n result = aleph.reactToAMQPMessage(aleph.SearchRequest(query), \"\")\n\n if not result.records:\n return publication # book is not in database\n\n # if there was results with this ISBN, compare titles of the books\n # (sometimes, there are different books with same ISBN because of human\n # errors)\n if isbn_query:\n for record in result.records:\n epub = record.epublication\n\n # try to match title of the book\n if compare_names(epub.nazev, publication.title) >= 80:\n return None # book already in database\n\n return publication\n\n # checks whether the details from returned EPublication match Publication's\n for record in result.records:\n epub = record.epublication\n\n # if the title doens't match, go to next record from aleph\n if not compare_names(epub.nazev, publication.title) >= 80:\n continue\n\n if not cmp_authors:\n return None # book already in database\n\n # compare authors names\n for author in epub.autori:\n # convert Aleph's author structure to string\n author_str = \"%s %s %s\" % (\n author.firstName,\n author.lastName,\n author.title\n )\n\n # normalize author data from `publication`\n pub_authors = map(lambda x: x.name, publication.authors)\n if type(pub_authors) not in [list, tuple, set]:\n pub_authors = [pub_authors]\n\n # try to compare authors from `publication` and Aleph\n for pub_author in pub_authors:\n if compare_names(author_str, pub_author) >= 50:\n return None # book already in database\n\n return publication # book is not in database", "def process_nbk_html(self, limit):\n model = Model(self.graph)\n c = 0\n books_not_found = set()\n for nbk in self.book_ids:\n c += 1\n nbk_id = 'GeneReviews:'+nbk\n book_item = self.all_books.get(nbk)\n url = '/'.join((self.rawdir, book_item['file']))\n\n # figure out if the book is there; if so, process, otherwise skip\n book_dir = '/'.join((self.rawdir, 'books'))\n book_files = os.listdir(book_dir)\n if ''.join((nbk, '.html')) not in book_files:\n # logger.warning(\"No book found locally for %s; skipping\", nbk)\n books_not_found.add(nbk)\n continue\n logger.info(\"Processing %s\", nbk)\n\n page = open(url)\n soup = BeautifulSoup(page.read())\n\n # sec0 == clinical description\n clin_summary = \\\n soup.find(\n 'div', id=re.compile(\".*Summary.sec0\"))\n if clin_summary is not None:\n p = clin_summary.find('p')\n ptext = p.text\n ptext = re.sub(r'\\s+', ' ', ptext)\n\n ul = clin_summary.find('ul')\n if ul is not None:\n item_text = list()\n for li in ul.find_all('li'):\n item_text.append(re.sub(r'\\s+', ' ', li.text))\n ptext += ' '.join(item_text)\n\n # add in the copyright and citation info to description\n ptext = \\\n ' '.join(\n (ptext,\n '[GeneReviews:NBK1116, GeneReviews:NBK138602, ' +\n nbk_id+']'))\n\n model.addDefinition(nbk_id, ptext.strip())\n\n # get the pubs\n pmid_set = set()\n pub_div = soup.find('div', id=re.compile(r\".*Literature_Cited\"))\n if pub_div is not None:\n ref_list = pub_div.find_all('div', attrs={'class': \"bk_ref\"})\n for r in ref_list:\n for a in r.find_all(\n 'a', attrs={'href': re.compile(r\"pubmed\")}):\n if re.match(r'PubMed:', a.text):\n pmnum = re.sub(r'PubMed:\\s*', '', a.text)\n else:\n pmnum = \\\n re.search(\n r'\\/pubmed\\/(\\d+)$', a['href']).group(1)\n if pmnum is not None:\n pmid = 'PMID:'+str(pmnum)\n self.graph.addTriple(\n pmid,\n model.object_properties['is_about'],\n nbk_id)\n pmid_set.add(pmnum)\n reference = Reference(\n self.graph,\n pmid, Reference.ref_types['journal_article'])\n reference.addRefToGraph()\n\n # TODO add author history, copyright, license to dataset\n\n # TODO get PMID-NBKID equivalence (near foot of page),\n # and make it \"is about\" link\n # self.gu.addTriple(\n # self.graph, pmid,\n # self.gu.object_properties['is_about'], nbk_id)\n # for example: NBK1191 PMID:20301370\n\n # add the book to the dataset\n self.dataset.setFileAccessUrl(book_item['url'])\n\n if limit is not None and c > limit:\n break\n\n # finish looping through books\n\n l = len(books_not_found)\n if len(books_not_found) > 0:\n if l > 100:\n logger.warning(\"There were %d books not found.\", l)\n else:\n logger.warning(\n \"The following %d books were not found locally: %s\",\n l, str(books_not_found))\n logger.info(\n \"Finished processing %d books for clinical descriptions\", c-l)\n\n return", "def within_book_search_json(request, book_id):\n query = request.GET.get('q')\n term = query # todo: meta options?\n book = Book.objects.get(pk=book_id)\n\n if not query or len(query) < 3:\n return\n\n # todo: method on objectmanager to search by keyword\n notes = book.notes.filter(\n Q(subject__icontains=term) |\n Q(quote__icontains=term) |\n Q(comment__icontains=term)\n )\n terms = book.terms.filter(\n Q(term__text__icontains=term) |\n Q(term__definition__icontains=term) |\n Q(quote__icontains=term) |\n Q(quote__icontains=term)\n )\n sections = book.sections.filter(\n Q(title__icontains=term) |\n Q(authors__name__icontains=term) |\n Q(subtitle__icontains=term) |\n Q(summary__icontains=term)\n )\n\n results = {'notes': [], 'terms': [], 'sections': []}\n for note in notes:\n results['notes'].append({\n 'title': highlighter.highlight(note.subject, query),\n 'description': highlighter.highlight(note.quote, query, 200),\n 'price': note.get_page_display(),\n 'url': note.get_absolute_url(),\n })\n\n for term in terms:\n results['terms'].append({\n 'title': highlighter.highlight(term.term.text, query),\n 'description': highlighter.highlight(term.quote, query, 200),\n 'price': term.get_page_display(),\n 'url': term.get_absolute_url(),\n })\n\n for section in sections:\n authors = ', '.join(a.name for a in section.authors.all())\n results['sections'].append({\n 'title': highlighter.highlight(section.title, query),\n 'description': highlighter.highlight(authors, query),\n 'price': section.get_page_display(),\n 'url': section.get_absolute_url(),\n })\n\n return JsonResponse({\n 'results': {\n 'books': {\n 'name': 'Notes',\n 'results': results['notes'],\n },\n 'authors': {\n 'name': 'Terms',\n 'results': results['terms'],\n },\n 'sections': {\n 'name': 'Sections',\n 'results': results['sections'],\n },\n }\n })", "def borrow_book(self, author, title, publisher, edition, email, book_id):\n for book in self.books_list:\n if book['book_id'] != str(book_id):\n return 'book does not exist'\n continue\n else: \n book = {\n 'author' : author,\n 'title' : title,\n 'publisher' : publisher,\n 'edition' : edition,\n 'email' : email\n }\n self.borrowed_books.append(book)\n return book", "def add_tag(self, tag):\n\n # directional relation: tag is the blank of everything in the list\n self.relations[tag] = {\n \"overlord\": [],\n \"hegemon\": [], # for tributary\n \"tributary\": [],\n \"vassal\": [],\n \"guaranteeing\": [],\n \"guarantor\": [],\n \"alliance\": [],\n \"senior\": [],\n \"junior\": [],\n \"marriage\": []\n }", "def _mw_fetch_article(self, baseurl, title):\n params = urllib.parse.urlencode({\n 'action': 'parse',\n 'page': title,\n 'prop': 'wikitext|headhtml',\n 'formatversion': 2,\n 'format': 'json',\n 'redirects': True\n })\n api_data = self._mw_api_call(baseurl, params)\n\n page_title = api_data['parse']['title']\n content = api_data['parse']['wikitext']\n html_head = api_data['parse']['headhtml']\n text = formatter.fmt(content, summary=True)\n\n soup = BeautifulSoup(html_head, features=\"lxml\")\n if canonical_link := soup.find('link', rel='canonical'):\n # Wikipedia\n url = canonical_link.attrs['href']\n elif og_url := soup.find('meta', property='og:url'):\n # Fandom\n url = og_url.attrs['content']\n else:\n # Use generic MediaWiki link as fallback (this doesn't look as nice)\n url = baseurl.replace('api.php', 'index.php?' + urllib.parse.urlencode({\n 'title': page_title\n }))\n\n return (text, url)", "def test_book_related(self):\n client = APIClient()\n client.login(username=self.students[0].username, password=\"salam*123\")\n response = client.get(\"/books/4/related/\")\n json = response.json()\n self.assertEqual(json[\"count\"], 2)\n self.assertEqual(json[\"results\"][0][\"id\"], 5)\n self.assertEqual(json[\"results\"][1][\"id\"], 2)", "def books_detail(request, pk):\n try:\n snippet = Books.objects.get(url=pk)\n except Books.DoesNotExist:\n return Response(status=status.HTTP_404_NOT_FOUND)\n\n if request.method == 'GET':\n serializer = BooksSerializers(snippet)\n return Response(serializer.data)\n\n elif request.method == 'PUT':\n serializer = BooksSerializers(snippet, data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n elif request.method == 'DELETE':\n snippet.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)", "def _add_article(self, item):\n # Check if article from this source already exists.\n source = self._clean_url(item.link)\n exists = Article.objects.filter(source=source).count() > 0\n if exists:\n return\n\n article = Article(\n title=item.title,\n source=source,\n published_at=dateparser.parse(item.published),\n )\n if getattr(item, \"content\", None):\n article.summary = \"\".join(\n [content.value for content in item.content]\n )\n\n article.save()\n article.tags.add(*self._get_keywords(title=item.title))\n article.story = self._get_story(article=article)\n article.save()\n\n return article", "def insert_book():\n \n\n if request.method == 'POST':\n new_author = mongo.db.authors.insert_one({\n 'author_name': request.form.to_dict()['author_name']\n })\n \n author_id = new_author.inserted_id\n \n # Create new book in mongo.db.books\n new_book = mongo.db.books.insert_one({\n 'title': request.form.to_dict()['title'],\n 'genre': request.form.to_dict()['genre'],\n 'pages': request.form.to_dict()['pages'],\n 'reviews': [],\n 'likes': [],\n 'dislikes': [],\n 'author_id': str(ObjectId(author_id)),\n 'isbn_num': request.form.to_dict()['isbn_num']\n })\n \n return redirect(url_for('library'))\n \n return render_template('insert_book.html', \n genres=[genre for genre in mongo.db.genres.find()])", "def library_searched():\n\n searched_result = []\n \n updated_books = duplicated_code()\n\n if request.method == 'POST':\n if request.form['type_search'] == 'book':\n book_title = request.form['search']\n for book in updated_books:\n if book['title'] == book_title:\n searched_result.append(book)\n return render_template(\"library_searched.html\", result = searched_result)\n elif request.form['type_search'] == 'genre':\n book_genre = request.form['search']\n for book in updated_books:\n if book['genre'] == book_genre:\n searched_result.append(book)\n return render_template(\"library_searched.html\", result = searched_result)\n elif request.form['type_search'] == 'author':\n book_author = request.form['search']\n for book in updated_books:\n if book['author_name'] == book_author:\n searched_result.append(book)\n return render_template(\"library_searched.html\", result = searched_result)\n else:\n return render_template(\"library_searched.html\")", "def test_relation_without_tags():\n assert query_row(db_conf, 'osm_buildings', 50111) == None\n assert query_row(db_conf, 'osm_buildings', -50121)['type'] == 'yes'", "def test_filter_recipes_by_tags(self):\n recipe1 = sample_reteta(user=self.user, title='Thai vegetable curry')\n recipe2 = sample_reteta(user=self.user, title='Aubergine with tahini')\n tag1 = sample_tag(user=self.user, name='Vegan')\n tag2 = sample_tag(user=self.user, name='Vegetarian')\n recipe1.tags.add(tag1)\n recipe2.tags.add(tag2)\n # recipe3 = sample_reteta(user=self.user, title='Fasole si carnati')\n\n res = self.client.get(\n RETETA_URL,\n {'tags': f'{tag1.id},{tag2.id}'}\n )\n\n serializer1 = RetetaSerializer(recipe1)\n serializer2 = RetetaSerializer(recipe2)\n # serializer3 = RetetaSerializer(recipe3)\n self.assertIn(serializer1.data, res.data)\n self.assertIn(serializer2.data, res.data)\n # self.assertNotIn(serializer3.data, res.data)", "def test_badge_should_have_tags(self):\n\n badge = self.get_sample_badge()\n # It's a string, even though it is used as a URL\n self.assertIsInstance(badge.tags, list)", "def is_book_available(self, book):\n request_url = \"%s?q=%s\" % (self.API_URL, book)\n json_data = self.make_request(request_url)\n if json_data and len(json_data['docs']) >= 1:\n return True\n return False", "def add_topic(request):\n template = loader.get_template('topicAdd.html')\n try:\n topic = serializers.serialize(\"json\", Topic.objects.filter())\n except ObjectDoesNotExist:\n return HttpResponse(\"This topic doesn't exists!\")\n context = {\n 'topics': topic\n }\n\n if request.method == \"POST\":\n data = JSONParser().parse(request)\n\n # Add topic to database.\n try:\n Topic.objects.get(name=data[\"name\"])\n print(\"topic exists\")\n return HttpResponse(\"This topic exists\")\n except ObjectDoesNotExist:\n try:\n user = User.objects.get(username=request.user)\n except ObjectDoesNotExist:\n return JsonResponse({'status':'false','message':'You should login to create a topic!'}, status=401)\n name = data[\"name\"]\n topicObject = Topic.objects.create(name=name, user=user)\n for tag in data[\"tags\"]:\n tag_name = tag['label']\n if tag_name == '':\n continue\n tag_wiki_id = tag['id']\n try:\n tagObject = Tag.objects.get(wikidataID=tag_wiki_id)\n except ObjectDoesNotExist:\n tagObject = Tag.objects.create(name=tag_name, wikidataID=tag_wiki_id)\n except MultipleObjectsReturned:\n return HttpResponse(\"Multiple tags exist for.\" + tag + \" Invalid State.\")\n\n #hidden tags\n unique_hidden_tags = list(set(tag['hidden_tags']))\n if unique_hidden_tags:\n tagObject.hidden_tags = unique_hidden_tags\n # for hidden_tag in unique_hidden_tags:\n # try:\n # hiddenTagObject = Tag.objects.get(wikidataID=hidden_tag)\n # except ObjectDoesNotExist:\n # hiddenTagObject = Tag.objects.create(wikidataID=hidden_tag, hidden=True)\n # hiddenTagObject.save()\n tagObject.save()\n topicObject.tags.add(tagObject)\n context = {\n }\n\n # Add relationship to database.\n relates_to = data[\"relates_to\"]\n for relation in data[\"relates_to\"]:\n if relation['topic_id'] == '':\n continue\n try:\n relatedTopicObject = Topic.objects.get(pk=relation['topic_id'])\n label = relation['rel_name']\n relationObject = Relation.objects.create(topic_from=topicObject, topic_to=relatedTopicObject, label=label)\n except ObjectDoesNotExist:\n print(\"error\")\n return HttpResponse(\"Related topic does not exist\");\n except MultipleObjectsReturned:\n print(\"error\")\n return HttpResponse(\"This topic exists\")\n # End of add relationship to database.\n\n\n # Adding a post to new created topic\n\n if data[\"postAdd\"] == True:\n postStuff = data[\"post\"]\n content = postStuff[\"post_content\"]\n postObject = Post.objects.create(content=content, user=user, topic=topicObject)\n for tag in postStuff[\"post_tags\"]:\n if len(tag)>0:\n if tag['label'] == '':\n continue\n try:\n tagObject = Tag.objects.get(wikidataID=tag['id'])\n except ObjectDoesNotExist:\n tagObject = Tag.objects.create(wikidataID=tag['id'], name=tag['label'])\n except MultipleObjectsReturned:\n return HttpResponse(\"Multiple tags exist for.\" + tag + \" Invalid State.\")\n\n unique_hidden_tags = list(set(tag['hidden_tags']))\n if unique_hidden_tags:\n tagObject.hidden_tags = unique_hidden_tags\n\n tagObject.save()\n postObject.tags.add(tagObject)\n # End of adding a post to new created topic\n\n return HttpResponse(template.render(context, request))\n return HttpResponse(template.render(context, request))", "def dashboard_content_article_tag_cloud():\n tag_stats = dict()\n past_30 = offset_time_past(30, str=True)\n articles = mongo.db[app.config['ARTICLES_COLLECTION']]\n results = articles.find({'collected': {'$gt': past_30}}, {'_id': 0})\n for result in results:\n for tag in result.get('tags', list()):\n tag_stats[tag] = tag_stats.get(tag, 0) + 1\n tags_sorted = sorted(tag_stats.items(), key=operator.itemgetter(1),\n reverse=True)[:50]\n data = list()\n for item in tags_sorted:\n data.append({'name': item[0], 'weight': item[1]})\n return jsonify(data)" ]
[ "0.5442478", "0.53429097", "0.52412236", "0.51314247", "0.51292527", "0.5036361", "0.5019173", "0.49803144", "0.4961309", "0.49558708", "0.49364054", "0.49175373", "0.4890888", "0.48781618", "0.48643574", "0.48554486", "0.48543897", "0.48376718", "0.48289764", "0.48264697", "0.48157114", "0.47895682", "0.47717565", "0.47378552", "0.47255543", "0.4718869", "0.47182804", "0.47128978", "0.47085094", "0.47052652", "0.46948475", "0.4688331", "0.46649885", "0.4664142", "0.46610576", "0.4657365", "0.46491435", "0.46435276", "0.46368307", "0.4634703", "0.4623961", "0.46051648", "0.4602511", "0.45936498", "0.4592617", "0.45918056", "0.45870495", "0.4567064", "0.4562155", "0.45608458", "0.45605958", "0.45595253", "0.45582837", "0.45564133", "0.4555707", "0.4546752", "0.45425448", "0.45424762", "0.45107174", "0.45076704", "0.45022458", "0.4493243", "0.44879037", "0.44849265", "0.44848752", "0.44811144", "0.44745293", "0.44719836", "0.44689882", "0.4463898", "0.44548988", "0.4445145", "0.44446373", "0.4442951", "0.44419357", "0.443878", "0.44381368", "0.44363412", "0.44354546", "0.44318908", "0.44302136", "0.44298574", "0.4429386", "0.4425824", "0.442283", "0.44216505", "0.4417756", "0.44118258", "0.44051093", "0.440167", "0.43911716", "0.43896335", "0.43826634", "0.43780056", "0.43764463", "0.43764186", "0.43750367", "0.4373648", "0.43717843", "0.43716154", "0.43641293" ]
0.0
-1
Returns an HTML script element for including a script from the admin media url (or other location if an absolute url is given).
def include_admin_script(script_path): if not absolute_url_re.match(script_path): script_path = '%s%s' % (settings.ADMIN_MEDIA_PREFIX, script_path) return '<script type="text/javascript" src="%s"></script>' % script_path
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resource_js(self):\n \n portal_url = getSite().absolute_url()\n \n return \"\"\"\n <script type=\"text/javascript\" src=\"%s/++resource++swfobject.js\"></script>\n <script type=\"text/javascript\" src=\"%s/++resource++audio_player.js\"></script> \n <script type=\"text/javascript\"> \n AudioPlayer.setup(\"%s/++resource++audio_player.swf\", { \n width: 300\n }); \n </script>\n \"\"\" % (portal_url, portal_url, portal_url)", "def propeller_javascript_url():\n return javascript_url()", "def get_vendor_js():\n return (\"://plotly-load_from_python.js\",)", "def replacement(self):\n assert (self.src or self.inline) and not (self.src and self.inline)\n if self.src:\n return '<script async type=\"text/javascript\" src=\"%s\"></script>' % urllib.quote(self.src)\n else:\n return '<script>\\n%s\\n</script>' % self.inline", "def load_script(browser, url):\r\n if browser.current_url.startswith('file:'):\r\n url = 'https:' + url\r\n browser.execute_script(\"\"\"\r\n var script_tag = document.createElement(\"script\");\r\n script_tag.setAttribute(\"type\", \"text/javascript\");\r\n script_tag.setAttribute(\"src\", arguments[0]);\r\n document.getElementsByTagName(\"head\")[0].appendChild(script_tag);\r\n \"\"\", url)", "def _load_snippet(filename) -> str:\n fullpath = f'{dirname(__file__)}/js/{filename}'\n file = open(fullpath, 'r')\n script = file.read()\n file.close()\n return script", "def inline_javascript(html_src, path=None):\n javascript_re = re.compile(\"\\<script src\\=\\\"([0-9a-zA-Z./]+)\\\"\\>\\</script>\")\n\n def fetch_jssource(in_match):\n rel_path = in_match.group(1)\n jspath = os.path.join(path, rel_path)\n return \"<script>\\n{0}\\n</script>\".format(open(jspath, 'r').read())\n\n return javascript_re.sub(fetch_jssource, html_src)", "def audio_file_player(self):\n if self.audio_file:\n file_url = settings.MEDIA_URL + str(self.content)\n player_string = '<audio src=\"%s\" controls>Your browser does not support the audio element.</audio>' % (file_url)\n return player_string", "def amp_url(self):\n return self.url.child(\"amp\")", "def get_media_js(self):\n media_js = uniquify_sequence(self.media_js + self.plugin_media_js)\n\n return media_js", "def get_embed_url(self):\n return self.embed_url", "def get_embed_url(self):\n if not self.get_video_id() or not self.get_username():\n return ''\n \n return 'http://cdn.livestream.com/embed/%s?layout=4&amp;clip=%s' % (self.get_username(), self.get_video_id())", "def topcoat_icons_script_tag():\n return u'<script type=\"text/javascript src=\"%s\"></script>' % topcoat_icons_script_url()", "def get_embed_url(self):\n if not self.get_video_id():\n return ''\n \n if self.get_video_id() == -1:\n return self.original_url\n \n return 'https://www.slideshare.net/slideshow/embed_code/%s' % self.get_video_id()", "def get_url(self,urldata):\n return \"%s?%s\" % (self.script_url, urllib.urlencode(urldata,1))", "def get_vendor_js(cls):\n return (\n vendor_static_dependencies[\"cesiumjs\"].get_custom_version_url(\n url_type=\"js\", version=cls.cesium_version\n ),\n )", "def getBaseURL():\n return getQualifiedURL(getScriptname())", "def get_embed_url(self):\n if not self.original_url:\n return ''\n \n return 'https://vine.co/v/%s/embed/simple' % (self.get_video_id())", "def js(filepath):\n return static_file(filepath, root=\"public\")", "def _get_scripts_resource(pe):\n return next(\n (\n entry.directory.entries[0].directory.entries[0]\n for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries\n if entry.name and entry.name.string == b\"PYTHONSCRIPT\"\n ),\n None,\n )", "def url(self, url):\n prefix = self.request_local.environ['toscawidgets.prefix']\n script_name = self.request_local.environ['SCRIPT_NAME']\n if hasattr(url, 'url_mapping'):\n url = url.url_mapping['normal']\n return ''.join([script_name, prefix, url])", "def core_cdn_file(request, source):\n\n file_path = settings.CENTIPAIR_TEMPLATE_DIR + \"/cdn/\" + source\n source_file_url = settings.TEMPLATE_STATIC_URL + \"/\" + file_path\n return source_file_url", "def get_url(self):\n if not self.get_video_id():\n return ''\n \n if self.get_video_id() == -1:\n return self.original_url\n \n return 'http://www.slideshare.net/slideshow/embed_code/%s' % self.get_video_id()", "def driver(self):\n return '<static-vmedia>'", "def third_party_scripts(request):\n return {\n 'ORCHESTRA_THIRD_PARTY_SCRIPTS_TEMPLATE':\n settings.ORCHESTRA_THIRD_PARTY_SCRIPTS_TEMPLATE\n }", "def mediaplayer(src,width=400,height=250):\n return XML('<embed allowfullscreen=\"true\" allowscriptaccess=\"always\" flashvars=\"height=%(height)s&width=%(width)s&file=%(src)s\" height=\"%(height)spx\" src=\"%(url)s\" width=\"%(width)spx\"></embed>'%dict(url=URL('static','plugin_wiki/mediaplayer.swf'),src=src,width=width,height=height))", "def client_plugin_source(self, language):\n\n static = self.static\n if static is None:\n return None\n\n filename = os.path.join(static, \"main.\" + language)\n realfilename = os.path.realpath(filename)\n\n if not realfilename.startswith(self.static + '/'): # pragma: no cover\n raise ValueError(\"Invalid language `%s`\" % language)\n\n if not os.path.isfile(realfilename):\n return None\n\n return realfilename", "def bootstrap_javascript_url():\n return javascript_url()", "def get_embed_url(self):\n raise NotImplementedError(\"Subclass must implement abstract method get_embed_url\")", "def render_external(plugin, **kwargs):\n\n html = oembed_html(plugin.url)\n if 'youtube.com' in html:\n return mark_safe(\n '<div class=\"flex-video widescreen\">{}</div>'.format(html))\n if 'vimeo.com' in html:\n return mark_safe(\n '<div class=\"flex-video widescreen vimeo\">{}</div>'.format(html))\n return mark_safe(html)", "def static(filename):\n return href.static(file=filename)", "def get_embed_url(self):\n if not self._oembed:\n return ''\n \n if not self.original_url:\n return ''\n \n return 'https://w.soundcloud.com/player/?url=%s' % (self.original_url)", "def get_src_js(self):\n if self.get_style() != self.STYLE_BASE:\n return f\"dtables/js/dataTables.{self.get_style()}.js\"\n else:\n return f\"dtables/js/{self.get_style()}.dataTables.js\"", "def inject_script(widget_id, options):\n\n request = current.request\n s3 = current.response.s3\n\n # Static script\n if s3.debug:\n script = \"/%s/static/scripts/S3/s3.ui.anonymize.js\" % \\\n request.application\n else:\n script = \"/%s/static/scripts/S3/s3.ui.anonymize.min.js\" % \\\n request.application\n scripts = s3.scripts\n if script not in scripts:\n scripts.append(script)\n\n # Widget options\n opts = {}\n if options:\n opts.update(options)\n\n # Widget instantiation\n script = '''$('#%(widget_id)s').anonymize(%(options)s)''' % \\\n {\"widget_id\": widget_id,\n \"options\": json.dumps(opts),\n }\n jquery_ready = s3.jquery_ready\n if script not in jquery_ready:\n jquery_ready.append(script)", "def get_embed_url(self):\n if not self.id_video or not self.original_url or not self.xml_response:\n return ''\n return '//view.vzaar.com/{0}/player'.format(self.id_video)", "def test_js_url(self):\n self.assertEquals(dirs.get_js_url(), \"%s%s\" % (settings.STATIC_URL, \"js\"))\n \n with self.settings(MEDIABRUTE_USE_STATIC=False):\n self.assertEquals(dirs.get_js_url(), \"%s%s\" % (settings.MEDIA_URL, \"js\"))\n \n with self.settings(MEDIABRUTE_JS_URL_PATH=\"heyo/yoyo\"):\n self.assertEquals(dirs.get_js_url(), \"%s%s\" % (settings.STATIC_URL, \"heyo/yoyo\"))\n \n with self.settings(MEDIABRUTE_USE_STATIC=False, MEDIABRUTE_JS_URL_PATH=\"heyo/yoyo\"):\n self.assertEquals(dirs.get_js_url(), \"%s%s\" % (settings.MEDIA_URL, \"heyo/yoyo\"))", "def deploy_static_media(env=None, asset_version='', quick=False, haus_vars={}):\n print green('Deploying static media {}'.format('__quick__' if quick else ''))\n collectstatic(no_input=True, skip_admin=quick)", "def get_url(self):\n return staticfiles_storage.url(self._name)", "def scriptpath(self, code):\n return '' if code == 'en' else ('/' + code)", "def loadjs(*args):\n return render(settings, 'JS_FILES', 'staticloader/load_js.html', *args)", "def un_src(self):\n if self.src is None:\n return\n self.inline = '''\n var script = document.createElement('script');\n script.src = \"%s\";\n document.body.appendChild(script);\n''' % self.src\n self.src = None", "def embed_url(self):\n\n ref_number = self.ID\n embed_link = \"\".join(('https://embeds.datpiff.com/mixtape/', \n str(ref_number),\n '?trackid=1&platform=desktop'))\n return embed_link", "def cdn_file(request, source):\n site = request.site\n file_path = site.template_dir + \"/cdn/\" + source\n source_file_url = settings.TEMPLATE_STATIC_URL + \"/\" + file_path\n return source_file_url", "def get_embed_url(self):\n if not self.original_url:\n return ''\n \n if not self.video_id:\n return ''\n \n return 'http://embed.bambuser.com/broadcast/%s?context=b_simple&autoplay=0&chat=0' % (self.video_id)", "def media(self, req):\n first_part = req.path_info_peek()\n if first_part in self.media_paths:\n req.path_info_pop()\n path = self.media_paths[first_part]\n else:\n path = resource_filename(\"weberror\", \"eval-media\")\n app = urlparser.StaticURLParser(path)\n return app", "def setup_js(self):\n script = \"\"\"\n Salamat.contextData.redactorOptions = {imageGetJson: '%s'};\n \"\"\"\n script %= self.reverse('redactor_files', args=(self.namespace,\n self.prefix))\n return HttpResponse(script, content_type='text/javascript')", "def get_asset(location, filename):\r\n return contentstore().find(Transcript.asset_location(location, filename))", "def get_embed_url(self, *, style: str = \"banner1\") -> str:\n if style not in self.valid_embed_styles:\n raise ValueError(\"Style must be in {}\".format(self.valid_embed_styles))\n\n return self.embed_url + \"?style={}\".format(style)", "def asset_url(filename=\"\", version=True):\n if filename.startswith(\"http\") or filename.startswith(\"/\"):\n return filename\n else:\n if config.static_url:\n return_url = \"http://\" + config.static_url\n else:\n return_url = \"/static\" # web.ctx.home + \"/static\"\n if filename:\n return_url += \"/\" + filename\n if version:\n return_url += \"?\" + config.asset_version\n return return_url", "def asset_location(location, filename):\r\n return StaticContent.compute_location(location.course_key, filename)", "def get_js_file(self):\n return 'placeholder'", "def static(path):\n return static_file(path, root='media')", "def get_script(blob):\n return get_script_class(blob.path)(source=blob.abspath)", "def load_url(src):\n return LOAD(url=src)", "def propeller_jquery_url():\n return jquery_url()", "def get_url(mods):\n url = mods.find(\"{{{0}}}location/{{{0}}}url\".format(common.MODS_NS))\n return url.text", "def get_download_link(ep: mdl.Episode) -> str:\n embed_url = ep.video_data.get(\"streamtape\")\n if not embed_url:\n return None\n\n try:\n response = requests.get(embed_url, headers=settings.REQUEST_HEADERS)\n soup = BeautifulSoup(response.text, \"html.parser\")\n\n text = [str(script) for script in soup.find_all(\"script\") if \").innerHTML\" in str(script)][\n 0\n ]\n text = \"\".join(text.rstrip(\"</script>\").lstrip(\"<script>\").split())\n text = text.split(\"innerHTML=\")[1].rstrip(\";\")\n text = \"\".join([substr.strip('\"').strip(\"'\") for substr in text.split(\"+\")])\n\n download_link = f\"https:{text}\"\n except Exception as e:\n print(e)\n return None\n\n return download_link", "def getScriptForApp(app):\n\n script = None\n if _currentPresentationManager >= 0:\n script = \\\n _PRESENTATION_MANAGERS[_currentPresentationManager].getScript(app)\n return script", "def get_media_json_url(self, nuxeo_id):\n # https://s3.amazonaws.com/static.ucldc.cdlib.org/media_json/002130a5-e171-461b-a41b-28ab46af9652-media.json\n url = \"https://s3.amazonaws.com/static.ucldc.cdlib.org/media_json/{}-media.json\".format(nuxeo_id)\n\n return url", "def link_callback(self, uri, rel):\n global sUrl, mUrl\n result = finders.find(uri)\n if result:\n if not isinstance(result, (list, tuple)):\n result = [result]\n result = list(os.path.realpath(path) for path in result)\n path = result[0]\n else:\n sUrl = settings.STATIC_URL # Typically /static/\n sRoot = settings.STATIC_ROOT # Typically /home/userX/project_static/\n mUrl = settings.MEDIA_URL # Typically /media/\n mRoot = settings.MEDIA_ROOT # Typically /home/userX/project_static/media/\n\n if uri.startswith(mUrl):\n path = os.path.join(mRoot, uri.replace(mUrl, \"\"))\n elif uri.startswith(sUrl):\n path = os.path.join(sRoot, uri.replace(sUrl, \"\"))\n else:\n return uri\n\n # make sure that file exists\n if not os.path.isfile(path):\n raise Exception(\n 'media URI must start with %s or %s' % (sUrl, mUrl)\n )\n return path", "def link_callback(self, uri, rel):\n result = finders.find(uri)\n if result:\n if not isinstance(result, (list, tuple)):\n result = [result]\n result = list(os.path.realpath(path) for path in result)\n path=result[0]\n else:\n sUrl = settings.STATIC_URL # Typically /static/\n sRoot = settings.STATIC_ROOT # Typically /home/userX/project_static/\n mUrl = settings.MEDIA_URL # Typically /media/\n mRoot = settings.MEDIA_ROOT # Typically /home/userX/project_static/media/\n\n if uri.startswith(mUrl):\n path = os.path.join(mRoot, uri.replace(mUrl, \"\"))\n elif uri.startswith(sUrl):\n path = os.path.join(sRoot, uri.replace(sUrl, \"\"))\n else:\n return uri\n\n # make sure that file exists\n if not os.path.isfile(path):\n raise Exception(\n 'media URI must start with %s or %s' % (sUrl, mUrl)\n )\n return path", "def asset(location, subs_id, lang='en', filename=None):\r\n asset_filename = subs_filename(subs_id, lang) if not filename else filename\r\n return Transcript.get_asset(location, asset_filename)", "def link_callback(uri, rel):\n #print(\"uri : \"+uri) \n sUrl = settings.STATIC_URL # Typically /static/\n sRoot = settings.STATIC_ROOT # Typically /home/userX/project_static/\n mUrl = settings.MEDIA_URL # Typically /media/\n mRoot = settings.MEDIA_ROOT # Typically /home/userX/project_static/media/\n\n #print(\"sUrl : \"+sUrl)\n #print(\"sRoot : \"+sRoot)\n #print(\"mUrl : \"+mUrl)\n #print(\"mRoot : \"+mRoot) \n\n if uri.startswith(mUrl):\n path = os.path.join(mRoot, uri.replace(mUrl, \"\"))\n elif uri.startswith(sUrl):\n path = os.path.join(sRoot, uri.replace(sUrl, \"\"))\n else:\n return uri\n\n #print(\"path : \"+path) \n # make sure that file exists\n if not os.path.isfile(path):\n raise Exception(\n 'media URI must start with %s or %s' % (sUrl, mUrl)\n )\n return path", "def path_extern_media(self) -> PurePath:\n return PurePath(self.path_extern_supervisor, MEDIA_DATA)", "def javascript_url(self, url, **kw):\n self._javascript_url.setdefault(absolute_url(url, self.static_url), (self._order, kw))\n self._order += 1\n return ()", "def _get_path_to_front_end():\n dpath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'fe')\n log(\"Front-end static files @ {0}\".format(dpath))\n\n return dpath", "def oembed(self, url):\r\n _url = '{0}/oembed'.format(self.get_url())\r\n return http.Request('GET', _url, {'url': url}), parsers.parse_json", "def generate_js_dir():\n\n return pkg_resources.resource_filename('linkedin.mobster.har.visualization.js', None)", "def get_wrapper_js_path(cls):\n return os.path.join(os.path.dirname(__file__), \"wrap_crowd_source.js\")", "def get_default_javascript():\n return [\"_static/require.js\"]", "def mpd_url(self):\n # type: () -> string_types\n return self._mpd_url", "def script():\n return Response(\n response=render_template(\"import_export/js/import_export.js\", _=_),\n status=200,\n mimetype=\"application/javascript\"\n )", "def absolute_asset_url(module, path):\n return absolute_uri(get_asset_url(module, path))", "def plugin_source(self, project_id, plugin_id, language):\n try:\n project = self.server.projects[project_id]\n plugin = project.plugins[plugin_id]\n fullpath = plugin.client_plugin_source(language)\n except (KeyError, ValueError):\n raise HTTPError(404)\n else:\n if fullpath is None:\n raise HTTPError(404)\n else:\n root = os.path.dirname(fullpath)\n filename = os.path.basename(fullpath)\n return static_file(filename, root=root)", "def propeller_javascript(jquery=None):\n javascript = ''\n # See if we have to include jQuery\n if jquery is None:\n jquery = get_propeller_setting('include_jquery', False)\n # NOTE: No async on scripts, not mature enough. See issue #52 and #56\n if jquery:\n url = propeller_jquery_url()\n if url:\n javascript += render_tag('script', attrs={'src': url})\n url = propeller_javascript_url()\n if url:\n attrs = {'src': url}\n javascript += render_tag('script', attrs=attrs)\n return mark_safe(javascript)", "def oembed(self, url):\n _url = '{0}/oembed'.format(self.get_url())\n return http.Request('GET', _url, {'url': url}), parsers.parse_json", "def link_callback(self, uri, rel):\n result = finders.find(uri)\n if result:\n if not isinstance(result, (list, tuple)):\n result = [result]\n result = list(os.path.realpath(path) for path in result)\n path = result[0]\n else:\n sUrl = settings.STATIC_URL # Typically /static/\n sRoot = settings.STATIC_ROOT # Typically /home/userX/project_static/\n mUrl = settings.MEDIA_URL # Typically /media/\n mRoot = settings.MEDIA_ROOT # Typically /home/userX/project_static/media/\n\n if uri.startswith(mUrl):\n path = os.path.join(mRoot, uri.replace(mUrl, \"\"))\n elif uri.startswith(sUrl):\n path = os.path.join(sRoot, uri.replace(sUrl, \"\"))\n else:\n return uri\n\n # make sure that file exists\n if not os.path.isfile(path):\n raise Exception(\n 'media URI must start with %s or %s' % (sUrl, mUrl)\n )\n return path", "def inject_js(widget_id, options):\n\n s3 = current.response.s3\n appname = current.request.application\n\n # Static JS\n scripts = s3.scripts\n if s3.debug:\n script = \"/%s/static/scripts/S3/s3.shelter_inspection.js\" % appname\n else:\n script = \"/%s/static/scripts/S3/s3.shelter_inspection.min.js\" % appname\n scripts.append(script)\n\n # Instantiate widget\n scripts = s3.jquery_ready\n script = '''$('#%(id)s').shelterInspection(%(options)s)''' % \\\n {\"id\": widget_id, \"options\": json.dumps(options)}\n if script not in scripts:\n scripts.append(script)", "def media_image_url(self):\n url = self._state.get(\"albumart\", None)\n return self._volumio.canonic_url(url)", "def get_embed_url(self):\n if not self.get_video_id():\n return ''\n \n return 'https://www.dailymotion.com/embed/video/%s' % self.get_video_id()", "def get_client_js(self, components, url):\n out = \"\\n\\n\"\n if len(components) > 0:\n out += \"Depender.loaded.combine(['\"\n out += \"','\".join([ \"/\".join(c) for c in components ]) + \"']);\\n\\n\"\n out += \"Depender.setOptions({\\n\"\n out += \"\tbuilder: '\" + url + \"'\\n\"\n out += \"});\"\n return out;", "def disqus_dev():\n if settings.DEBUG:\n return \"\"\"<script type=\"text/javascript\">\n var disqus_developer = 1;\n var disqus_url = 'http://%s/';\n</script>\"\"\" % Site.objects.get_current().domain\n return \"\"", "def get_player_url(id):\n return JAFC_M3U8_TEMPLATE.format(id)", "def test_insert_amp_js(parsed_html):\n parsed_amp = utils.insert_amp_js(parsed_html)\n assert (\n parsed_amp.head.find(\"script\", src=re.compile(\"^https://cdn.ampproject.org.*\"))\n is not None\n )", "def script_content(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"script_content\")", "def script_content(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"script_content\")", "def get_media_path(self, filename):\n return join(settings.CMS_PAGE_MEDIA_PATH, \"%d\" % self.id, filename)", "def do_s3_static_url(parser, token):\n return do_s3_media_url(parser, token, static=True)", "def ext_static(context, extension, path):\n return static('ext/%s/%s' % (extension.id, path))", "def extraire(self, url, prefix):\n # Recuperer le code html de la page youtube\n print(url)\n code = urlopen(url).read().decode('utf8').split('\"')\n\n for elmt in code:\n if prefix in elmt:\n return elmt\n \n # Valeur par defaut\n return '/watch?v=jNQXAC9IVRw'", "def public_upload_dir_rel(self):\n return os.path.join(self.short_name,settings.COMIC_PUBLIC_FOLDER_NAME)", "def link_callback(uri, rel):\n # use short variable names\n sUrl = settings.STATIC_URL # Typically /static/\n sRoot = settings.STATIC_ROOT # Typically /home/userX/project_static/\n mUrl = settings.MEDIA_URL # Typically /static/media/\n mRoot = settings.MEDIA_ROOT # Typically /home/userX/project_static/media/\n\n # convert URIs to absolute system paths\n if uri.startswith(mUrl):\n path = os.path.join(mRoot, uri.replace(mUrl, \"\"))\n elif uri.startswith(sUrl):\n path = os.path.join(sRoot, uri.replace(sUrl, \"\"))\n else:\n return uri # handle absolute uri (ie: http://some.tld/foo.png)\n\n # make sure that file exists\n if not os.path.isfile(path):\n raise Exception(\n 'media URI must start with %s or %s' % (sUrl, mUrl)\n )\n return path", "def _get_oembed(self, url):\n api_url = 'http://www.soundcloud.com/oembed/?url=%s&format=json' % (url)\n return self._oembed_request(api_url)", "def plugin_url(self):\n return self.__plugin_url", "def vendor_bundle(self) -> str:\n\n if self.minimize:\n js_url = f\"https://cdn.jsdelivr.net/gh/salesforce/cloudsplaining@{__version__}/cloudsplaining/output/dist/js/chunk-vendors.js\"\n bundle = f'<script type=\"text/javascript\" src=\"{js_url}\"></script>'\n return bundle\n else:\n vendor_bundle_path = get_vendor_bundle_path()\n with open(vendor_bundle_path, \"r\", encoding=\"utf-8\") as f:\n bundle_content = f.read()\n # bundle_content = vendor_bundle_path.read_text(encoding=\"utf-8\")\n bundle = f'<script type=\"text/javascript\">\\n{bundle_content}\\n</script>'\n return bundle", "def get_media_data(self, res_text):\n shared_data = re.search(r'_sharedData = (\\{.+});<\\/script>', res_text)\n entry_data = json.loads(shared_data.group(1))['entry_data']\n\n if 'PostPage' in entry_data:\n return entry_data['PostPage'][0]['graphql']['shortcode_media']\n elif 'HttpErrorPage' in entry_data:\n raise PageNotFoundError('Page not found')\n elif 'ProfilePage' in entry_data:\n raise PrivateAccountError('The post is private')\n else:\n raise UnknownPageTypeError(f'Unknown page type in entry_data: {entry_data}')", "def get_static_path(path, aid, filename):\n return os.path.join(path, aid, os.path.basename(filename))", "def code(self):\n return '{}\\n<script>{}</script>'.format(self.html, self.js)", "def _copy_to_media(self, template_name, source=''):\n dirpath = os.path.join(self.cache_root, os.path.dirname(template_name))\n filename = os.path.basename(template_name)\n fullpath = os.path.join(dirpath, filename)\n\n if not os.path.isfile(fullpath) or settings.DEBUG:\n if not os.path.exists(dirpath):\n os.makedirs(dirpath)\n\n f = open(fullpath, 'w')\n f.write(source)\n f.close()\n\n return urljoin(self.cache_url, template_name), filename", "def js_embed(self):\n if self.force_js_embed:\n return True\n else:\n return self._jshost in (\n SCRIPT_FILE_PATH, constants.DEFAULT_JUPYTER_GITHUB_URL)" ]
[ "0.5999079", "0.56655055", "0.5653992", "0.56134677", "0.5572652", "0.5382267", "0.5336885", "0.5328502", "0.53220886", "0.53072685", "0.5276165", "0.52621883", "0.5245122", "0.5236014", "0.523167", "0.5201591", "0.51624304", "0.51335704", "0.51318634", "0.51224566", "0.5115419", "0.5115169", "0.5107584", "0.51004356", "0.5084249", "0.50680596", "0.5040471", "0.5032476", "0.5027374", "0.5023635", "0.50232935", "0.50022733", "0.5001753", "0.49992085", "0.49971157", "0.49927452", "0.49830878", "0.49806905", "0.4971273", "0.4961445", "0.4957104", "0.4952762", "0.49302557", "0.4915623", "0.48950958", "0.488776", "0.4881568", "0.48721427", "0.4871523", "0.48705405", "0.48510745", "0.48368773", "0.48355025", "0.483118", "0.4826332", "0.48248383", "0.48164523", "0.4814861", "0.48056504", "0.47914153", "0.47836936", "0.47806942", "0.47738105", "0.47688937", "0.47657314", "0.47586334", "0.47584054", "0.47539675", "0.47511318", "0.47477186", "0.47446623", "0.47375935", "0.47255045", "0.4723893", "0.4719596", "0.47175545", "0.47026157", "0.46979904", "0.4697886", "0.46881", "0.4685857", "0.46849766", "0.46788868", "0.4673516", "0.46713293", "0.46713293", "0.466151", "0.46578535", "0.4649241", "0.46469328", "0.4635894", "0.46336275", "0.46218807", "0.462075", "0.46199712", "0.4613227", "0.45911068", "0.45901507", "0.45824072", "0.4577758" ]
0.7652485
0
r""" Description Compute ChebNet layer.
def forward(self, graph, feat, lambda_max=None): def unnLaplacian(feat, D_invsqrt, graph): """ Operation Feat * D^-1/2 A D^-1/2 但是如果写成矩阵乘法:D^-1/2 A D^-1/2 Feat""" graph.ndata['h'] = feat * D_invsqrt graph.update_all(fn.copy_u('h', 'm'), fn.sum('m', 'h')) return graph.ndata.pop('h') * D_invsqrt with graph.local_scope(): #一点修改,这是原来的代码 if self.is_mnist: graph.update_all(fn.copy_edge('v','m'), fn.sum('m','h')) # 'v'与coordinate.py有关 D_invsqrt = th.pow(graph.ndata.pop('h').float().clamp(min=1), -0.5).unsqueeze(-1).to(feat.device) #D_invsqrt = th.pow(graph.in_degrees().float().clamp( # min=1), -0.5).unsqueeze(-1).to(feat.device) #print("in_degree : ",graph.in_degrees().shape) else: D_invsqrt = th.pow(graph.in_degrees().float().clamp(min=1), -0.5).unsqueeze(-1).to(feat.device) #print("D_invsqrt : ",D_invsqrt.shape) #print("ndata : ",graph.ndata['h'].shape) if lambda_max is None: try: lambda_max = laplacian_lambda_max(graph) except BaseException: # if the largest eigenvalue is not found dgl_warning( "Largest eigonvalue not found, using default value 2 for lambda_max", RuntimeWarning) lambda_max = th.Tensor(2).to(feat.device) if isinstance(lambda_max, list): lambda_max = th.Tensor(lambda_max).to(feat.device) if lambda_max.dim() == 1: lambda_max = lambda_max.unsqueeze(-1) # (B,) to (B, 1) # broadcast from (B, 1) to (N, 1) lambda_max = broadcast_nodes(graph, lambda_max) re_norm = 2. / lambda_max # X_0 is the raw feature, Xt refers to the concatenation of X_0, X_1, ... X_t Xt = X_0 = feat # X_1(f) if self._k > 1: h = unnLaplacian(X_0, D_invsqrt, graph) X_1 = - re_norm * h + X_0 * (re_norm - 1) # Concatenate Xt and X_1 Xt = th.cat((Xt, X_1), 1) # Xi(x), i = 2...k for _ in range(2, self._k): h = unnLaplacian(X_1, D_invsqrt, graph) X_i = - 2 * re_norm * h + X_1 * 2 * (re_norm - 1) - X_0 # Concatenate Xt and X_i Xt = th.cat((Xt, X_i), 1) X_1, X_0 = X_i, X_1 # linear projection h = self.linear(Xt) # activation if self.activation: h = self.activation(h) #print('ChebConv.py Line163 h : ',h.shape) return h
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def build_bisenet(inputs, num_classes):\n\n ### The spatial path\n ### The number of feature maps for each convolution is not specified in the paper\n ### It was chosen here to be equal to the number of feature maps of a classification\n ### model at each corresponding stage\n # spatial_net = fluid.layers.resize_bilinear(inputs, [Image_Height/8, Image_Width/8])\n # print('spatial_net_1',spatial_net)\n\n ## spatial path\n spatial_net = ConvBlock(inputs, num_filters=64, kernel_size=3, stride=2)\n spatial_net = ConvBlock(spatial_net, num_filters=128, kernel_size=3, stride=2)\n spatial_net = ConvBlock(spatial_net, num_filters=256, kernel_size=3, stride=2)\n # print(\"spatial_net:\", spatial_net)\n\n # spatial_net = fluid.layers.resize_bilinear(spatial_net, [Image_Height/8, Image_Width/8])\n # print('spatial_net_2',spatial_net)\n ### Context path\n model = ResNet(is_test=False)\n # spatial_net = model.bottleneck_block1(inputs)\n end_points_16, end_points_32 = model.net(inputs)\n net_4 = AttentionRefinementModule(end_points_16, num_filters=512)\n net_5 = AttentionRefinementModule(end_points_32, num_filters=1024)\n global_channels = fluid.layers.reduce_mean(net_5, [2, 3], keep_dim=True)\n net_5_scaled = fluid.layers.elementwise_mul(net_5, global_channels, axis=0)\n\n ### Combining the paths\n net_4 = Upsample(net_4, scale=2)\n net_5_scaled = Upsample(net_5_scaled, scale=4)\n # print('net_4, net_5:', [net_4, net_5_scaled])\n # layers_concat = list()\n # layers_concat.append(spatial_net)\n ## layers_concat.append(net_4)\n # layers_concat.append(net_5_scaled)\n context_net = fluid.layers.concat([spatial_net, net_4, net_5_scaled], axis=1) #\n # context_net = fluid.layers.concat(input=layers_concat,axis=1)\n # print('context_net', context_net)\n # context_net = fluid.layers.concat([net_4, net_5_scaled], axis=1)\n # print('context_net', context_net)\n # context_net = fluid.layers.concat([spatial_net,context_net], axis=1)\n # print('context_net2',context_net)\n\n ### FFM\n # net = FeatureFusionModule(input_1=spatial_net, input_2=context_net, num_filters=num_classes)\n net = FeatureFusionModule(inputs=context_net, num_filters=num_classes)\n\n # print('net', net)\n\n ## [batch_zize, num_filters, 128, 64]\n\n ### Final upscaling and finish\n # net = fluid.layers.conv2d_transpose(input=net, num_filters=num_classes, output_size=[256, 128])\n # print('conv2d_transpose', net)\n net = batch_normalization(net, relu=True, name='conv2d_transpose_bn1')\n net = fluid.layers.conv2d_transpose(input=net, num_filters=num_classes, output_size=[128, 256])\n net = batch_normalization(net, relu=True, name='conv2d_transpose_bn2')\n net = fluid.layers.conv2d_transpose(input=net, num_filters=num_classes, output_size=[256, 512])\n net = batch_normalization(net, relu=True, name='conv2d_transpose_bn3')\n #net = fluid.layers.conv2d_transpose(input=net, num_filters=num_classes, output_size=[512, 1024])\n #net = batch_normalization(net, relu=True, name='conv2d_transpose_bn4')\n # print('net',net)\n net = fluid.layers.image_resize(net, out_shape=[512, 1024], resample='BILINEAR')\n\n net = fluid.layers.conv2d(net, num_classes, 1)\n return net", "def __init__(self, nfeat, nhid, nclass, dropout, alpha):\n super(GCN, self).__init__()\n self.dropout = dropout\n\n self.conv1 = GraphConvolutionLayer(nfeat, nhid, dropout=dropout, alpha=alpha, not_final=True)\n \n self.add_module('conv1', self.conv1)\n\n self.conv2 = GraphConvolutionLayer(nhid, nclass, dropout=dropout, alpha=alpha, not_final=False)", "def main():\n\n dataset = ConvMNIST(64)\n print(dataset.get_train().x.shape)\n\n\n inputs = Value(type=tf.float32, shape=(None, 28, 28, 1), cls = None)\n targets = Value(type=tf.int64, shape=(None), cls = 10)\n learning_rate = 0.0001\n\n fc_hidden = [1024, 500]\n c_h = [\n (3, 3, 1, 32),\n (3, 3, 32, 64)\n ]\n conv_hidden = ConvHidden(conv_weights=c_h, fc_weights=fc_hidden)\n\n config = Config(inputs, targets, conv_hidden, learning_rate)\n\n network = ConvNetworkBuilder(config)\n hidden = FFConvHiddenBuilder()\n _ = network.build_network(hidden)\n\n\n train_config = TrainerConfig(\n epochs = EPOCHS, display_after = DISPLAY_STEP, \n keep_prob = KEEP_PROB,checkpoint_path=None, \n summary_path=None\n )\n\n trainer = Trainer(network, train_config)\n trainer.train(dataset)", "def trainNet():", "def compile(self):\n m, n = self.input_shape[1], self.input_shape[2]\n\n inp = Input(shape=self.input_shape, traces=True)\n self.add_layer(inp, \"DoG\")\n\n s1 = LIFNodes(shape=(18, m, n), traces=True)\n self.add_layer(s1, \"conv_1\")\n c1 = LIFNodes(shape=(18, m // 2, n // 2), traces=True)\n self.add_layer(c1, \"pool_1\")\n\n s2 = LIFNodes(shape=(24, m // 2, n // 2), traces=True)\n self.add_layer(s2, \"conv_2\")\n c2 = LIFNodes(shape=(24, m // 4, n // 4), traces=True)\n self.add_layer(c2, \"pool_2\")\n\n s3 = LIFNodes(shape=(32, m // 4, n // 4), traces=True)\n self.add_layer(s3, \"conv_3\")\n f = LIFNodes(shape=(32, 1), traces=True)\n self.add_layer(f, \"global_pool\")\n\n conv1 = Conv2dConnection(inp, s1, 5, padding=2, weight_decay=0.01,\n nu=0.01, update_rule=PostPre, decay=0.5)\n self.add_connection(conv1, \"DoG\", \"conv_1\")\n pool1 = MaxPool2dConnection(s1, c1, 2, 2, decay=0.5)\n self.add_connection(pool1, \"conv_1\", \"pool_1\")\n\n conv2 = Conv2dConnection(c1, s2, 3, padding=1, weight_decay=0.01,\n nu=0.01, update_rule=PostPre, decay=0.5)\n self.add_connection(conv2, \"pool_1\", \"conv_2\")\n pool2 = MaxPool2dConnection(s2, c2, 2, 2, decay=0.5)\n self.add_connection(pool2, \"conv_2\", \"pool_2\")\n\n conv3 = Conv2dConnection(c2, s3, 3, padding=1, weight_decay=0.01,\n nu=0.01, update_rule=PostPre, decay=0.5)\n self.add_connection(conv3, \"pool_2\", \"conv_3\")\n global_pool = MaxPool2dConnection(s3, f, (m // 4, n // 4), decay=0.5)\n self.add_connection(global_pool, \"conv_3\", \"global_pool\")\n\n monitor = NetworkMonitor(self, layers=[\"DoG\", \"conv_1\", \"pool_1\",\n \"conv_2\", \"pool_2\",\n \"conv_3\", \"global_pool\"],\n connections=[(\"DoG\", \"conv_1\"),\n (\"pool_1\", \"conv_2\"),\n (\"pool_2\", \"conv_3\")],\n state_vars=[\"w\", \"s\"])\n self.add_monitor(monitor, \"network_monitor\")\n\n return self", "def build_network(self):\n net = self.ccf_data\n\n # Reshape [length] -> [length, 1].\n net = tf.expand_dims(net, -1)\n\n # create summary object\n summary = []\n\n for i in self.hparams.conv_block_filters:\n for _ in range(self.hparams.conv_layers_per_block):\n input_shape = net.shape.as_list()\n conv_op = tf.keras.layers.Conv1D(filters=i, kernel_size=self.hparams.kernel_size, padding='same',\n activation=tf.nn.relu)\n net = conv_op(net)\n summary.append(\"Conv1D-{}-{}. Input shape: {}. Output shape: {}\".format(self.hparams.kernel_size, i, input_shape,\n net.shape.as_list()))\n pool_size = 2\n strides = 2\n max_pool = tf.keras.layers.MaxPool1D(pool_size=pool_size, strides=strides)\n net = max_pool(net)\n summary.append(\"MaxPool1D-{}. Pool Size: {}. Strides: {}\".format(self.hparams.kernel_size, pool_size, strides))\n\n for i in self.hparams.final_conv_num_filters:\n conv_op = tf.keras.layers.Conv1D(filters=i, kernel_size=self.hparams.kernel_size, padding='same',\n activation=tf.nn.relu)\n net = conv_op(net)\n flatten = tf.keras.layers.Flatten()\n net = flatten(net)\n\n for i in self.hparams.dense_num_layers:\n dense = tf.keras.layers.Dense(i, activation=tf.nn.relu)\n net = dense(net)\n\n # output layer\n output = tf.keras.layers.Dense(1)\n net = tf.squeeze(output(net))\n\n self.summary = \"\\n\".join(summary)\n self.predicted_rv = net", "def add_layer(self, freeze = True, add = True):\n if add:\n self.num_layers += 1\n if self.conv_dim == 1:\n new_cnn = layers.Conv1D(self.n_filters,\n (self.n_kernels),\n activation='elu',\n input_shape=(None, self.inp_shape[0], self.n_filters),\n padding=\"same\",\n name='cnn_1d_{}'.format(self.num_layers-1),\n kernel_initializer = initializers.get(self.initializer),\n bias_initializer=initializers.get(\"zeros\"),\n kernel_regularizer=self.regularizer,\n bias_regularizer=self.regularizer\n )\n elif self.conv_dim == 2:\n new_cnn = layers.Conv2D(self.n_filters,\n (self.n_kernels, self.n_kernels),\n activation='elu',\n input_shape=(None, self.inp_shape[0],self.inp_shape[1], self.n_filters),\n padding=\"same\",\n name='cnn_2d_{}'.format(self.num_layers-1),\n kernel_initializer=initializers.get(self.initializer),\n bias_initializer=initializers.get(\"zeros\"),\n kernel_regularizer=self.regularizer,\n bias_regularizer=self.regularizer\n )\n self.list_cnn.append(new_cnn)\n\n if freeze:\n for index in range(0,self.num_layers-1):\n self.list_cnn[index].trainable = False\n else:\n for index in range(0,self.num_layers-1):\n self.list_cnn[index].trainable = True", "def bn(self):\n return self.add_layer(bn)", "def __cnnNetFn(self, input, is_training):\n with tf.variable_scope('CNN'):\n conv1 = tf.layers.conv2d(input, 32, 3, activation=\"elu\", padding='SAME',\n kernel_regularizer=tf.contrib.layers.l2_regularizer(self.__weight_decay))\n conv1_bn = tf.layers.batch_normalization(conv1)\n conv2 = tf.layers.conv2d(conv1_bn, 32, 3, activation=\"elu\", padding='SAME',\n kernel_regularizer=tf.contrib.layers.l2_regularizer(self.__weight_decay))\n conv2_bn = tf.layers.batch_normalization(conv2)\n conv2_pool = tf.layers.max_pooling2d(conv2_bn, 2, 2, padding='SAME')\n conv2_drop = tf.layers.dropout(conv2_pool, rate=0.2, training=is_training)\n\n conv3 = tf.layers.conv2d(conv2_drop, 64, 3, activation=\"elu\", padding='SAME',\n kernel_regularizer=tf.contrib.layers.l2_regularizer(self.__weight_decay))\n conv3_bn = tf.layers.batch_normalization(conv3)\n conv4 = tf.layers.conv2d(conv3_bn, 64, 3, activation=\"elu\", padding='SAME',\n kernel_regularizer=tf.contrib.layers.l2_regularizer(self.__weight_decay))\n conv4_bn = tf.layers.batch_normalization(conv4)\n conv4_pool = tf.layers.max_pooling2d(conv4_bn, 2, 2, padding='SAME')\n conv4_drop = tf.layers.dropout(conv4_pool, rate=0.3, training=is_training)\n\n conv5 = tf.layers.conv2d(conv4_drop, 128, 3, activation=\"elu\", padding='SAME',\n kernel_regularizer=tf.contrib.layers.l2_regularizer(self.__weight_decay))\n conv5_bn = tf.layers.batch_normalization(conv5)\n conv6 = tf.layers.conv2d(conv5_bn, 128, 3, activation=\"elu\", padding='SAME',\n kernel_regularizer=tf.contrib.layers.l2_regularizer(self.__weight_decay))\n conv6_pool = tf.layers.max_pooling2d(conv6, 2, 2, padding='SAME')\n\n csnn_features = tf.stop_gradient(self.__csnn.getTrainOp(input))\n csnn_features = tf.identity(csnn_features)\n if self.__use_csnn:\n joint_features = tf.concat((conv6_pool, csnn_features), axis=3)\n else:\n joint_features = conv6_pool\n\n conv6_bn = tf.layers.batch_normalization(joint_features)\n\n conv7 = tf.layers.conv2d(conv6_bn, 256, 3, activation=\"elu\", padding='SAME',\n kernel_regularizer=tf.contrib.layers.l2_regularizer(self.__weight_decay))\n conv7_bn = tf.layers.batch_normalization(conv7)\n conv8 = tf.layers.conv2d(conv7_bn, 256, 3, activation=\"elu\", padding='SAME',\n kernel_regularizer=tf.contrib.layers.l2_regularizer(self.__weight_decay))\n conv8_bn = tf.layers.batch_normalization(conv8)\n conv8_pool = tf.layers.max_pooling2d(conv8_bn, 2, 2, padding='SAME')\n conv8_drop = tf.layers.dropout(conv8_pool, rate=0.4, training=is_training)\n\n flat = tf.contrib.layers.flatten(conv8_drop)\n logits = tf.layers.dense(flat, self.__num_classes)\n return logits, csnn_features", "def create(self):\n \n \"\"\" A solo prepressing reduction network in the head \"\"\"\n print(\"pre_reduction\")\n with tf.name_scope('pre_reduction'):\n conv1 = NW.conv(self.X, 7, 7, 64, 2, 2, name='conv1')\n pool1 = NW.max_pool(conv1, 3, 3, 2, 2, name='pool1')\n norm1 = NW.lrn(pool1, 2, 2e-05, 0.75, name='norm1')\n reduction2 = NW.conv(norm1, 1, 1, 64, 1, 1, name='reduction2')\n conv2 = NW.conv(reduction2, 3, 3, 192, 1, 1,name='conv2')\n norm2 = NW.lrn(conv2, 2, 2e-05, 0.75, name='norm2')\n pool2 = NW.max_pool(norm2, 3, 3, 2, 2, name='pool2')\n \n \"\"\" 1st inception layer group \"\"\"\n print(\"icp1\")\n with tf.name_scope('icp1'):\n # branch 0\n icp1_out0 = NW.conv(pool2, 1, 1, 64, 1, 1, name='icp1_out0')\n # branch 1\n icp1_reduction1 = NW.conv(pool2, 1, 1, 96, 1, 1, name='icp1_reduction1')\n icp1_out1 = NW.conv(icp1_reduction1, 3, 3, 128, 1, 1, name='icp1_out1')\n # branch 2\n icp1_reduction2 = NW.conv(pool2, 1, 1, 16, 1, 1, name='icp1_reduction2')\n icp1_out2 = NW.conv(icp1_reduction2, 5, 5, 32, 1, 1, name='icp1_out2')\n # branch 3\n icp1_pool = NW.max_pool(pool2, 3, 3, 1, 1, name='icp1_pool')\n icp1_out3 = NW.conv(icp1_pool, 1, 1, 32, 1, 1, name='icp1_out3')\n # concat\n icp2_in = NW.concat([icp1_out0,\n icp1_out1,\n icp1_out2,\n icp1_out3], 3, 'icp2_in')\n\n \"\"\" 2nd inception layer group \"\"\"\n print(\"icp2\")\n with tf.name_scope('icp2'):\n # branch 0\n icp2_out0 = NW.conv(icp2_in, 1, 1, 128, 1, 1, name='icp2_out0')\n # branch 1\n icp2_reduction1 = NW.conv(icp2_in, 1, 1, 128, 1, 1, name='icp2_reduction1')\n icp2_out1 = NW.conv(icp2_reduction1, 3, 3, 192, 1, 1, name='icp2_out1')\n # branch 2\n icp2_reduction2 = NW.conv(icp2_in, 1, 1, 32, 1, 1, name='icp2_reduction2')\n icp2_out2 = NW.conv(icp2_reduction2, 5, 5, 96, 1, 1, name='icp2_out2')\n # branch 3\n icp2_pool = NW.max_pool(icp2_in, 3, 3, 1, 1, name='icp2_pool')\n icp2_out3 = NW.conv(icp2_pool, 1, 1, 64, 1, 1, name='icp2_out3')\n # concat\n icp2_out = NW.concat([icp2_out0,\n icp2_out1,\n icp2_out2,\n icp2_out3], 3, 'icp2_out')\n \n \"\"\" 3rd inception layer group \"\"\"\n print(\"icp3\")\n with tf.name_scope('icp3'):\n icp3_in = NW.max_pool(icp2_out, 3, 3, 2, 2, name='icp3_in')\n # branch 0\n icp3_out0 = NW.conv(icp3_in, 1, 1, 192, 1, 1, name='icp3_out0')\n # branch 1\n icp3_reduction1 = NW.conv(icp3_in, 1, 1, 96, 1, 1, name='icp3_reduction1')\n icp3_out1 = NW.conv(icp3_reduction1, 3, 3, 208, 1, 1, name='icp3_out1')\n # branch 2\n icp3_reduction2 = NW.conv(icp3_in, 1, 1, 16, 1, 1, name='icp3_reduction2')\n icp3_out2 = NW.conv(icp3_reduction2, 5, 5, 48, 1, 1, name='icp3_out2')\n # branch 3\n icp3_pool = NW.max_pool(icp3_in, 3, 3, 1, 1, name='icp3_pool')\n icp3_out3 = NW.conv(icp3_pool, 1, 1, 64, 1, 1, name='icp3_out3')\n # concat\n icp3_out = NW.concat([icp3_out0,\n icp3_out1,\n icp3_out2,\n icp3_out3], 3, 'icp3_out')\n \n \"\"\" 1st classify branch \"\"\"\n with tf.name_scope('cls1'):\n cls1_pool = NW.avg_pool(icp3_out, 5, 5, 3, 3, padding='VALID', name='cls1_pool')\n cls1_reduction_pose = NW.conv(cls1_pool, 1, 1, 128, 1, 1, name='cls1_reduction_pose')\n cls1_fc1_pose = NW.fc(cls1_reduction_pose, 1024, name='cls1_fc1_pose')\n cls1_fc_pose_xy = NW.fc(cls1_fc1_pose, 2, relu=False, name='cls1_fc_pose_xy')\n cls1_fc_pose_ab = NW.fc(cls1_fc1_pose, 2, relu=False, name='cls1_fc_pose_ab')\n self.layers[\"cls1_fc_pose_xy\"] = cls1_fc_pose_xy\n self.layers[\"cls1_fc_pose_ab\"] = cls1_fc_pose_ab\n \n \"\"\" 4st inception layer group \"\"\"\n print(\"icp4\")\n with tf.name_scope('icp4'):\n # branch 0\n icp4_out0 = NW.conv(icp3_out, 1, 1, 160, 1, 1, name='icp4_out0')\n # branch 1\n icp4_reduction1 = NW.conv(icp3_out, 1, 1, 112, 1, 1, name='icp4_reduction1')\n icp4_out1 = NW.conv(icp4_reduction1, 3, 3, 224, 1, 1, name='icp4_out1')\n # branch 2\n icp4_reduction2 = NW.conv(icp3_out, 1, 1, 24, 1, 1, name='icp4_reduction2')\n icp4_out2 = NW.conv(icp4_reduction2, 5, 5, 64, 1, 1, name='icp4_out2')\n # branch 3\n icp4_pool = NW.max_pool(icp3_out, 3, 3, 1, 1, name='icp4_pool')\n icp4_out3 = NW.conv(icp4_pool, 1, 1, 64, 1, 1, name='icp4_out3')\n # concat\n icp4_out = NW.concat([icp4_out0,\n icp4_out1,\n icp4_out2,\n icp4_out3],3, name='icp4_out')\n\n \"\"\" 5st inception layer group \"\"\"\n print(\"icp5\")\n with tf.name_scope('icp5'):\n # branch 0\n icp5_out0 = NW.conv(icp4_out, 1, 1, 128, 1, 1, name='icp5_out0')\n # branch 1\n icp5_reduction1 = NW.conv(icp4_out, 1, 1, 128, 1, 1, name='icp5_reduction1')\n icp5_out1 = NW.conv(icp5_reduction1, 3, 3, 256, 1, 1, name='icp5_out1')\n # branch 2\n icp5_reduction2 = NW.conv(icp4_out,1, 1, 24, 1, 1, name='icp5_reduction2')\n icp5_out2 = NW.conv(icp5_reduction2, 5, 5, 64, 1, 1, name='icp5_out2')\n # branch 3\n icp5_pool = NW.max_pool(icp4_out,3, 3, 1, 1, name='icp5_pool')\n icp5_out3 = NW.conv(icp5_pool, 1, 1, 64, 1, 1, name='icp5_out3')\n # concat\n icp5_out = NW.concat([icp5_out0, \n icp5_out1, \n icp5_out2, \n icp5_out3], 3, name='icp5_out')\n \n \"\"\" 6st inception layer group \"\"\"\n print(\"icp6\")\n with tf.name_scope('icp6'):\n # branch 0\n icp6_out0 = NW.conv(icp5_out, 1, 1, 112, 1, 1, name='icp6_out0')\n # branch 1\n icp6_reduction1 = NW.conv(icp5_out, 1, 1, 144, 1, 1, name='icp6_reduction1')\n icp6_out1 = NW.conv(icp6_reduction1, 3, 3, 288, 1, 1, name='icp6_out1')\n # branch 2\n icp6_reduction2 = NW.conv(icp5_out, 1, 1, 32, 1, 1, name='icp6_reduction2')\n icp6_out2 = NW.conv(icp6_reduction2, 5, 5, 64, 1, 1, name='icp6_out2')\n # branch 3\n icp6_pool = NW.max_pool(icp5_out,3, 3, 1, 1, name='icp6_pool')\n icp6_out3 = NW.conv(icp6_pool, 1, 1, 64, 1, 1, name='icp6_out3')\n # concat\n icp6_out = NW.concat([icp6_out0,\n icp6_out1,\n icp6_out2,\n icp6_out3], 3, name='icp6_out')\n\n \"\"\" 2nd classify branch \"\"\"\n with tf.name_scope('cls2'):\n cls2_pool = NW.avg_pool(icp6_out, 5, 5, 3, 3, padding='VALID', name='cls2_pool')\n cls2_reduction_pose = NW.conv(cls2_pool, 1, 1, 128, 1, 1, name='cls2_reduction_pose')\n cls2_fc1 = NW.fc(cls2_reduction_pose, 1024, name='cls2_fc1')\n cls2_fc_pose_xy = NW.fc(cls2_fc1, 2, relu=False, name='cls2_fc_pose_xy')\n cls2_fc_pose_ab = NW.fc(cls2_fc1, 2, relu=False, name='cls2_fc_pose_ab')\n self.layers[\"cls2_fc_pose_xy\"] = cls2_fc_pose_xy\n self.layers[\"cls2_fc_pose_ab\"] = cls2_fc_pose_ab\n\n \"\"\" 7st inception layer group \"\"\"\n print(\"icp7\")\n with tf.name_scope('icp7'):\n # branch 0\n icp7_out0 = NW.conv(icp6_out, 1, 1, 256, 1, 1, name='icp7_out0')\n # branch 1\n icp7_reduction1 = NW.conv(icp6_out, 1, 1, 160, 1, 1, name='icp7_reduction1')\n icp7_out1 = NW.conv(icp7_reduction1, 3, 3, 320, 1, 1, name='icp7_out1')\n # branch 2\n icp7_reduction2 = NW.conv(icp6_out, 1, 1, 32, 1, 1, name='icp7_reduction2')\n icp7_out2 = NW.conv(icp7_reduction2, 5, 5, 128, 1, 1, name='icp7_out2')\n # branch 3\n icp7_pool = NW.max_pool(icp6_out, 3, 3, 1, 1, name='icp7_pool')\n icp7_out3 = NW.conv(icp7_pool, 1, 1, 128, 1, 1, name='icp7_out3')\n # concat\n icp7_out = NW.concat([icp7_out0,\n icp7_out1,\n icp7_out2,\n icp7_out3], 3, name='icp7_out')\n\n \"\"\" 8st inception layer group \"\"\"\n print(\"icp8\")\n with tf.name_scope('icp8'):\n icp8_in = NW.max_pool(icp7_out, 3, 3, 2, 2, name='icp8_in')\n # branch 0\n icp8_out0 = NW.conv(icp8_in, 1, 1, 256, 1, 1, name='icp8_out0')\n # branch 1\n icp8_reduction1 = NW.conv(icp8_in, 1, 1, 160, 1, 1, name='icp8_reduction1')\n icp8_out1 = NW.conv(icp8_reduction1, 3, 3, 320, 1, 1, name='icp8_out1')\n # branch 2\n icp8_reduction2 = NW.conv(icp8_in, 1, 1, 32, 1, 1, name='icp8_reduction2')\n icp8_out2 = NW.conv(icp8_reduction2, 5, 5, 128, 1, 1, name='icp8_out2')\n # branch 3\n icp8_pool = NW.max_pool(icp8_in, 3, 3, 1, 1, name='icp8_pool')\n icp8_out3 = NW.conv(icp8_pool, 1, 1, 128, 1, 1, name='icp8_out3')\n # concat\n icp8_out = NW.concat([icp8_out0,\n icp8_out1,\n icp8_out2,\n icp8_out3], 3, name='icp8_out')\n \n \"\"\" 9st inception layer group \"\"\"\n print(\"icp9\")\n with tf.name_scope('icp9'):\n # branch 0\n icp9_out0 = NW.conv(icp8_out, 1, 1, 384, 1, 1, name='icp9_out0')\n # branch 1\n icp9_reduction1 = NW.conv(icp8_out, 1, 1, 192, 1, 1, name='icp9_reduction1')\n icp9_out1 = NW.conv(icp9_reduction1, 3, 3, 384, 1, 1, name='icp9_out1')\n # branch 2\n icp9_reduction2 = NW.conv(icp8_out, 1, 1, 48, 1, 1, name='icp9_reduction2')\n icp9_out2 = NW.conv(icp9_reduction2, 5, 5, 128, 1, 1, name='icp9_out2')\n # branch 3\n icp9_pool = NW.max_pool(icp8_out, 3, 3, 1, 1, name='icp9_pool')\n icp9_out3 = NW.conv(icp9_pool, 1, 1, 128, 1, 1, name='icp9_out3')\n # concat\n icp9_out = NW.concat([icp9_out0,\n icp9_out1,\n icp9_out2,\n icp9_out3], 3, name='icp9_out')\n\n \"\"\" 3rd classify branch \"\"\"\n with tf.name_scope('cls3'):\n cls3_pool = NW.avg_pool(icp9_out, 7, 7, 1, 1, padding='VALID', name='cls3_pool')\n cls3_fc1_pose = NW.fc(cls3_pool, 2048, name='cls3_fc1_pose')\n cls3_fc_pose_xy = NW.fc(cls3_fc1_pose, 2, relu=False, name='cls3_fc_pose_xy')\n cls3_fc_pose_ab = NW.fc(cls3_fc1_pose, 2, relu=False, name='cls3_fc_pose_ab')\n self.layers[\"cls3_fc_pose_xy\"] = cls3_fc_pose_xy\n self.layers[\"cls3_fc_pose_ab\"] = cls3_fc_pose_ab", "def compute(self, config, budget, working_directory, *args, **kwargs):\n\n # Useful website -- https://aws.amazon.com/blogs/machine-learning/scalable-multi-node-deep-learning-training-using-gpus-in-the-aws-cloud/\n\n ''' The below is commented out because I don't want to mess with the CNN's architecture. If you want to use hyperparameter optimization to alter the architecture of the fully connected layers as well, you can use the below. '''\n \n #new_layer_elements = np.array([config['num_els_new_1'] if config['num_new_fc_layers'] >= 1 else None, \n # config['num_els_new_2'] if config['num_new_fc_layers'] >= 2 else None, \n # config['num_els_new_3'] if config['num_new_fc_layers'] >= 3 else None])\n \n #new_layer_elements = list(new_layer_elements[new_layer_elements != None])\n \n #old_fclayers_tofreeze = np.array([0 if config['freeze0_cat'] == 1 else None,\n # 1 if config['freeze1_cat'] == 1 else None])\n \n #old_fclayers_tofreeze = list(old_fclayers_tofreeze[old_fclayers_tofreeze != None])\n \n # Generate the model\n model = ISICNetAlex(num_new_fc_layers=0,\n new_layer_elements=[],\n dropout_rate=config['dropout_rate'],\n old_fclayers_tofreeze=[],\n )\n\n # Use GPU processing if available. \n if torch.cuda.is_available():\n model.cuda()\n \n # Build criterion and optimizer.\n criterion = torch.nn.CrossEntropyLoss()\n \n ''' The below is commented out because I don't want to mess with the optimizer. '''\n #if config['optimizer'] == 'Adam':\n # optimizer = torch.optim.Adam(model.parameters(), lr=config['lr'])\n #else:\n # optimizer = torch.optim.SGD(model.parameters(), lr=config['lr'], momentum=config['sgd_momentum'])\n optimizer = torch.optim.SGD(model.parameters(), lr=config['lr'], momentum=config['sgd_momentum'])\n \n \n # Run training loop.\n # IMPORTANT -- note that the budget parameter used in setting up HpBandSter refers to the number of epochs. It can be made to refer to other parameters, but here we chose to have it refer to epochs. \n for epoch in range(int(budget)):\n start = time.time()\n # initialize variables to monitor training and validation loss\n train_loss = 0.0\n\n ###################\n # train the model #\n ###################\n model.train()\n for batch_idx, (data, target) in enumerate(self.train_loader):\n # move to GPU if available\n if torch.cuda.is_available():\n data, target = data.cuda(), target.cuda()\n \n optimizer.zero_grad()\n output = model(data)\n loss = criterion(output, target)\n loss.backward()\n optimizer.step()\n train_loss += 1/(batch_idx+1)*(loss.data-train_loss)\n\n print(\"Epoch {} training time took {} seconds\".format(epoch,time.time()-start))\n\n train_accuracy = self.evaluate_accuracy(model, self.train_loader)\n validation_accuracy = self.evaluate_accuracy(model, self.validation_loader)\n test_accuracy = self.evaluate_accuracy(model, self.test_loader)\n\n return ({\n 'loss': 1-validation_accuracy, # remember: HpBandSter always minimizes!\n 'info': { 'test accuracy': test_accuracy,\n 'train accuracy': train_accuracy,\n 'validation accuracy': validation_accuracy,\n 'number of parameters': number_of_parameters(model),\n }\n\n })", "def run(layers):", "def compile(self):\n logger.info('Define network with dnnet of version : %s'\\\n % dnnet.__version__)\n if self.layers.size == 0:\n msg = 'NeuralNetwork has no layer.\\n Add layers before compiling.'\n raise DNNetRuntimeError(msg)\n\n parent = self.layers[0]\n self.add(OutputLayer())\n\n for i, layer in enumerate(self.layers, 1):\n logger.debug('Add %s layer.' % layer.get_type())\n layer.set_parent(parent)\n parent = layer\n\n logger.debug('Defined network.')", "def main():\n dataset = MNIST(BATCH_SIZE)\n \n inputs = Value(type=tf.float32, shape=(None, 784), cls=None)\n targets = Value(type=tf.int64, shape=(None), cls=10)\n fc_hidden = FCHidden(weights=[300, 150])\n\n config = Config(inputs, targets, fc_hidden, LEARNING_RATE)\n\n network_builder = FFNetworkBuilder(config)\n hidden_builder = FFHiddenBuilder()\n _ = network_builder.build_network(hidden_builder)\n\n train_config = TrainerConfig(\n epochs = EPOCHS, display_after = DISPLAY_STEP, \n keep_prob = KEEP_PROB,checkpoint_path=None, \n summary_path=None\n )\n trainer = Trainer(network_builder, train_config)\n trainer.train(dataset)", "def disp_net(target_image, is_training=True):\n batch_norm_params = {'is_training': is_training}\n h = target_image.get_shape()[1].value\n w = target_image.get_shape()[2].value\n inputs = target_image\n with tf.variable_scope('depth_net') as sc:\n end_points_collection = sc.original_name_scope + '_end_points'\n normalizer_fn = slim.batch_norm if FLAGS.use_bn else None\n normalizer_params = batch_norm_params if FLAGS.use_bn else None\n with slim.arg_scope([slim.conv2d, slim.conv2d_transpose],\n normalizer_fn=normalizer_fn,\n normalizer_params=normalizer_params,\n weights_regularizer=slim.l2_regularizer(WEIGHT_REG),\n activation_fn=tf.nn.relu,\n outputs_collections=end_points_collection):\n cnv1 = slim.conv2d(inputs, 32, [7, 7], stride=2, scope='cnv1')\n cnv1b = slim.conv2d(cnv1, 32, [7, 7], stride=1, scope='cnv1b')\n cnv2 = slim.conv2d(cnv1b, 64, [5, 5], stride=2, scope='cnv2')\n cnv2b = slim.conv2d(cnv2, 64, [5, 5], stride=1, scope='cnv2b')\n\n cnv3 = slim.conv2d(cnv2b, 128, [3, 3], stride=2, scope='cnv3')\n cnv3b = slim.conv2d(cnv3, 128, [3, 3], stride=1, scope='cnv3b')\n cnv4 = slim.conv2d(cnv3b, 256, [3, 3], stride=2, scope='cnv4')\n cnv4b = slim.conv2d(cnv4, 256, [3, 3], stride=1, scope='cnv4b')\n cnv5 = slim.conv2d(cnv4b, 512, [3, 3], stride=2, scope='cnv5')\n cnv5b = slim.conv2d(cnv5, 512, [3, 3], stride=1, scope='cnv5b')\n cnv6 = slim.conv2d(cnv5b, 512, [3, 3], stride=2, scope='cnv6')\n cnv6b = slim.conv2d(cnv6, 512, [3, 3], stride=1, scope='cnv6b')\n cnv7 = slim.conv2d(cnv6b, 512, [3, 3], stride=2, scope='cnv7')\n cnv7b = slim.conv2d(cnv7, 512, [3, 3], stride=1, scope='cnv7b')\n\n up7 = slim.conv2d_transpose(cnv7b, 512, [3, 3], stride=2, scope='upcnv7')\n # There might be dimension mismatch due to uneven down/up-sampling.\n up7 = _resize_like(up7, cnv6b)\n i7_in = tf.concat([up7, cnv6b], axis=3)\n icnv7 = slim.conv2d(i7_in, 512, [3, 3], stride=1, scope='icnv7')\n\n up6 = slim.conv2d_transpose(icnv7, 512, [3, 3], stride=2, scope='upcnv6')\n up6 = _resize_like(up6, cnv5b)\n i6_in = tf.concat([up6, cnv5b], axis=3)\n icnv6 = slim.conv2d(i6_in, 512, [3, 3], stride=1, scope='icnv6')\n\n up5 = slim.conv2d_transpose(icnv6, 256, [3, 3], stride=2, scope='upcnv5')\n up5 = _resize_like(up5, cnv4b)\n i5_in = tf.concat([up5, cnv4b], axis=3)\n icnv5 = slim.conv2d(i5_in, 256, [3, 3], stride=1, scope='icnv5')\n\n up4 = slim.conv2d_transpose(icnv5, 128, [3, 3], stride=2, scope='upcnv4')\n i4_in = tf.concat([up4, cnv3b], axis=3)\n icnv4 = slim.conv2d(i4_in, 128, [3, 3], stride=1, scope='icnv4')\n disp4 = (slim.conv2d(icnv4, 1, [3, 3], stride=1, activation_fn=tf.sigmoid,\n normalizer_fn=None, scope='disp4')\n * DISP_SCALING + MIN_DISP)\n disp4_up = tf.image.resize_bilinear(disp4, [np.int(h / 4), np.int(w / 4)])\n\n up3 = slim.conv2d_transpose(icnv4, 64, [3, 3], stride=2, scope='upcnv3')\n i3_in = tf.concat([up3, cnv2b, disp4_up], axis=3)\n icnv3 = slim.conv2d(i3_in, 64, [3, 3], stride=1, scope='icnv3')\n disp3 = (slim.conv2d(icnv3, 1, [3, 3], stride=1, activation_fn=tf.sigmoid,\n normalizer_fn=None, scope='disp3')\n * DISP_SCALING + MIN_DISP)\n disp3_up = tf.image.resize_bilinear(disp3, [np.int(h / 2), np.int(w / 2)])\n\n up2 = slim.conv2d_transpose(icnv3, 32, [3, 3], stride=2, scope='upcnv2')\n i2_in = tf.concat([up2, cnv1b, disp3_up], axis=3)\n icnv2 = slim.conv2d(i2_in, 32, [3, 3], stride=1, scope='icnv2')\n disp2 = (slim.conv2d(icnv2, 1, [3, 3], stride=1, activation_fn=tf.sigmoid,\n normalizer_fn=None, scope='disp2')\n * DISP_SCALING + MIN_DISP)\n disp2_up = tf.image.resize_bilinear(disp2, [h, w])\n\n up1 = slim.conv2d_transpose(icnv2, 16, [3, 3], stride=2, scope='upcnv1')\n i1_in = tf.concat([up1, disp2_up], axis=3)\n icnv1 = slim.conv2d(i1_in, 16, [3, 3], stride=1, scope='icnv1')\n disp1 = (slim.conv2d(icnv1, 1, [3, 3], stride=1, activation_fn=tf.sigmoid,\n normalizer_fn=None, scope='disp1')\n * DISP_SCALING + MIN_DISP)\n\n end_points = slim.utils.convert_collection_to_dict(end_points_collection)\n return [disp1, disp2, disp3, disp4], end_points", "def create_network(outfname_train, outfname_deploy, N_conv_layers=3, N_fully_connected_layers=3, batch_size_train=100,batch_size_test=100, source_train='datatrain', source_test='datatest', num_output_conv=32, kernel_size=3, weight_std_conv=0.01, activation='relu', num_output_fully_connected=64, weight_std_fully_connected=0.01, do_batchnorm=1, do_last_batchnorm=1, scale=1,shift=0, weight_std_affine=0, use_softmax=0, num_classes=3, input_dim_1=1,input_dim_2=3, input_dim_3=32, input_dim_4=32, use_lowrank=1, T_dimension=None, softmax_weight=1, lowrank_weight=1, data_type='lmdb'):\n\n if T_dimension==None:\n T_dimension = num_classes\n \n train_txt = \"\"\n deploy_txt = \"\"\n\n train_txt += data_layer(name='data_layer', source_train=source_train, batch_size_train=batch_size_train, source_test=source_test, batch_size_test=batch_size_test, data_type=data_type)\n\n deploy_txt += deploy_data_layer(name='data_layer', input_dim_1=input_dim_1, input_dim_2=input_dim_2, input_dim_3=input_dim_3, input_dim_4=input_dim_4)\n\n last_name = 'data'\n\n ####### CONVOLUTIONAL LAYERS\n for i in range(N_conv_layers):\n conv_name = 'conv%i' % (i+1)\n top = conv_name\n\n conv_txt = convolution_layer(conv_name, last_name, num_output=num_output_conv, kernel_size=kernel_size, weight_std=weight_std_conv)\n\n train_txt += conv_txt\n deploy_txt += conv_txt\n \n if activation == 'pool':\n pool_name = 'pool%i' % (i+1)\n activation_txt = pooling_layer(pool_name, conv_name)\n last_name = pool_name\n elif activation == 'relu':\n relu_name = 'relu%i' % (i+1)\n activation_txt = relu_layer(relu_name, conv_name)\n last_name = conv_name\n else:\n raise Exception('Unknown activation')\n \n\n train_txt += activation_txt\n deploy_txt += activation_txt\n\n \n\n ####### FULLY CONNECTED LAYERS\n for i in range(N_fully_connected_layers):\n fully_connected_name = 'ip%i' % (i+1)\n\n fully_connected_txt = fully_connected_layer(fully_connected_name, last_name, num_output=num_output_fully_connected, weight_std=weight_std_fully_connected)\n\n relu_name = 'iprelu%i' % (i+1)\n relu_txt = relu_layer(relu_name, fully_connected_name)\n\n batchnorm_name = 'ipbn%i' % (i+1)\n\n if do_batchnorm and i<N_fully_connected_layers-1:\n batchnorm_txt_train = batchnorm_layer(batchnorm_name, fully_connected_name, use_global_stats=False, phase='TRAIN', deploy=False)\n batchnorm_txt_test = batchnorm_layer(batchnorm_name, fully_connected_name, use_global_stats=True, phase='TEST', deploy=False)\n \n batchnorm_txt_deploy = batchnorm_layer(batchnorm_name, fully_connected_name, deploy=True)\n scale_txt = ''\n \n last_name = batchnorm_name\n \n elif do_last_batchnorm:\n batchnorm_txt_train = batchnorm_layer(batchnorm_name, fully_connected_name, use_global_stats=False, phase='TRAIN', deploy=False)\n batchnorm_txt_test = batchnorm_layer(batchnorm_name, fully_connected_name, use_global_stats=True, phase='TEST', deploy=False)\n \n batchnorm_txt_deploy = batchnorm_layer(batchnorm_name, fully_connected_name, deploy=True)\n scale_name = 'ipbnscaled%i' % (i+1)\n\n scale_txt = scale_layer(scale_name, batchnorm_name, scale=scale,shift=shift)\n \n last_name = scale_name\n else:\n batchnorm_txt_train = ''\n batchnorm_txt_test = ''\n batchnorm_txt_deploy = ''\n last_name = fully_connected_name\n scale_txt = ''\n \n train_txt += fully_connected_txt + relu_txt + batchnorm_txt_train + batchnorm_txt_test + scale_txt\n deploy_txt += fully_connected_txt + relu_txt + batchnorm_txt_deploy + scale_txt\n \n\n\n\n\n # add affine layer on top of funnel layer \n affine_name = 'affine' # (matrix T)\n affine_txt = fully_connected_layer(affine_name, last_name, num_output=T_dimension, weight_std=weight_std_affine)\n\n train_txt += affine_txt\n deploy_txt += affine_txt\n \n # apply lowrank loss to output of 'affine' layer [conv - fully_connected -\n # funnel - affine - lowrank] the lowrank output is located in affine. The\n # 'funnel' layer is used to allow softmax to separate between classes before\n # LRT\n if use_lowrank:\n lowrank_txt = lowrank_layer('lowrank_loss', affine_name, loss_weight=lowrank_weight)\n train_txt += lowrank_txt\n\n if use_softmax:\n # apply softmax loss to output of funnel layer [conv - fully_connected - funnel - softmax]\n # add one affine layer to reduce from num_output_fully_connected to num_classes\n\n # apr 4. trying on top of fully connected layer\n funnel_name = 'funnel'\n funnel_txt = fully_connected_layer(funnel_name, last_name, num_output=num_classes, weight_std=weight_std_fully_connected)\n\n train_txt += funnel_txt\n deploy_txt += funnel_txt\n\n softmax_txt = softmax_layer('softmax_loss', funnel_name, loss_weight=softmax_weight)\n train_txt += softmax_txt\n\n write_to_file(outfname_train, train_txt)\n write_to_file(outfname_deploy, deploy_txt)\n\n \n return train_txt, deploy_txt", "def __init__(self):\n super(FcNet, self).__init__()\n\n # get size of some layers\n start_num = 48\n max_num = 200\n mid_num = 50\n end_num = 8\n \n # define regressor\n self.regress = nn.Sequential(\n nn.Linear(start_num,max_num,bias=True),\n nn.Sigmoid(),\n nn.Linear(max_num,mid_num,bias = True),\n nn.Sigmoid(),\n nn.Linear(mid_num,end_num, bias = True),\n nn.Sigmoid()\n )", "def _model_definition(self, net):\n \n # Input filtering and downsampling with max pooling\n print(net.shape) #channels must be specified first otherwise keras assumes channels last\n print('resnet17_scp')\n \n net = Conv2D( filters=128, kernel_size=5, activation=None, padding='same', \n data_format=\"channels_first\", input_shape=(1, 100, 100))(net)\n net = BatchNormalization(axis=1)(net) #axis is set to the dimension which hold the colour channels\n net = LeakyReLU()(net)\n net= MaxPooling2D(pool_size=(2,2))(net)\n \n net = Conv2D( filters=64, kernel_size=3, activation=None, padding='same', data_format=\"channels_first\")(net)\n net = BatchNormalization(axis=1)(net) #axis is set to the dimension which hold the colour channels\n net = LeakyReLU()(net)\n net= MaxPooling2D(pool_size=(2,2))(net)\n \n net = Conv2D( filters=64, kernel_size=3,activation=None, padding='same', data_format=\"channels_first\")(net)\n net = BatchNormalization(axis=1)(net) #axis is set to the dimension which hold the colour channels \n net = LeakyReLU()(net)\n net= MaxPooling2D(pool_size=(2,2))(net)\n \n\n\n \n return net", "def nn_layer(input_tensor, input_dim, output_dim, layer_name, act=tf.nn.relu,method = \"xavier\"):\n # Adding a name scope ensures logical grouping of the layers in the graph.\n with tf.name_scope(layer_name):\n # This Variable will hold the state of the weights for the layer\n with tf.name_scope('weights'):\n weights = weight_variable([input_dim, output_dim],method = method,name = layer_name)\n variable_summaries(weights, layer_name + '/weights')\n with tf.name_scope('Wx_plus_b'):\n preactivate = tf.matmul(input_tensor, weights)\n tf.histogram_summary(layer_name + '/pre_activations', preactivate)\n if act is None:\n activations = preactivate\n else:\n activations = act(preactivate, 'activation')\n tf.histogram_summary(layer_name + '/activations', activations)\n return activations", "def efficientnetb7(unfreeze=False):\n model = EfficientNet.from_pretrained('efficientnet-b7')\n num_ftrs = model._fc.in_features\n model._fc = nn.Linear(num_ftrs, 8)\n\n class Net(nn.Module):\n\n\n def __init__(self):\n super().__init__()\n self.model = model\n\n\n def forward(self, x):\n x = self.model(x)\n x = nn.functional.softmax(x, dim=1)\n return x\n\n net = Net()\n\n for param in model.parameters():\n param.requires_grad = unfreeze\n for param in model._fc.parameters():\n param.requires_grad = True\n return net", "def __init__(self, cell_index, stimulus_type, conv_layers=[(12, 9, 9), (12, 9, 9)], dense_layer=64,\n loss='poisson_loss', optimizer='adam', weight_init='normal', l2_reg=0., dropout=0.5, mean_adapt=False):\n\n self.stim_shape = (40, 50, 50)\n\n # build the model\n with notify('Building convnet'):\n\n self.model = Sequential()\n\n # convolutional layers\n for ix, layer in enumerate(conv_layers):\n\n # get parameters for this layer\n num_filters, row_size, col_size = layer\n\n # convolutional layer\n if ix == 0:\n self.model.add(Convolution2D(num_filters, row_size, col_size,\n input_shape=self.stim_shape, init=weight_init,\n border_mode='same', subsample=(1,1),\n W_regularizer=l2(l2_reg), activation='relu'))\n\n else:\n self.model.add(Convolution2D(num_filters, row_size, col_size,\n input_shape=self.stim_shape, init=weight_init,\n border_mode='same', subsample=(1,1),\n W_regularizer=l2(l2_reg), activation='relu'))\n\n # max pooling layer\n self.model.add(MaxPooling2D(pool_size=(2, 2), ignore_border=True))\n\n # dropout\n self.model.add(Dropout(dropout))\n\n # flatten\n self.model.add(Flatten())\n\n # Add dense (affine) layer with relu activation\n self.model.add(Dense(dense_layer, init=weight_init, W_regularizer=l2(l2_reg), activation='relu'))\n self.model.add(Dropout(dropout))\n\n # Add a final dense (affine) layer with softplus activation\n self.model.add(Dense(1, init=weight_init, W_regularizer=l2(l2_reg), activation='softplus'))\n\n # save architecture string (for markdown file)\n self.architecture = '\\n'.join(['Convolutional layers {}'.format(conv_layers),\n '{} filters in the second (fully connected) layer'.format(dense_layer),\n 'weight initialization: {}'.format(weight_init),\n 'l2 regularization: {}'.format(l2_reg),\n 'stimulus shape: {}'.format(self.stim_shape)])\n\n # compile\n super().__init__(cell_index, stimulus_type, loss, optimizer, mean_adapt)", "def hnet_bsd(args, x, train_phase):\n # Sure layers weight & bias\n order = 1\n nf = int(args.n_filters)\n nf2 = int((args.filter_gain)*nf)\n nf3 = int((args.filter_gain**2)*nf)\n nf4 = int((args.filter_gain**3)*nf)\n bs = args.batch_size\n fs = args.filter_size\n nch = args.n_channels\n nr = args.n_rings\n tp = train_phase\n std = args.std_mult\n\n x = tf.reshape(x, shape=[bs,args.height,args.width,1,1,3])\n fm = {}\n\n # Convolutional Layers\n with tf.name_scope('stage1') as scope:\n cv1 = hl.conv2d(x, nf, fs, stddev=std, padding='SAME', n_rings=nr, name='1_1')\n cv1 = hl.non_linearity(cv1, name='1_1')\n\n cv2 = hl.conv2d(cv1, nf, fs, stddev=std, padding='SAME', n_rings=nr, name='1_2')\n cv2 = hl.batch_norm(cv2, tp, name='bn1')\n mags = to_4d(hl.stack_magnitudes(cv2))\n fm[1] = linear(mags, 1, 1, name='sw1')\n\n with tf.name_scope('stage2') as scope:\n cv3 = hl.mean_pooling(cv2, ksize=(1,2,2,1), strides=(1,2,2,1))\n cv3 = hl.conv2d(cv3, nf2, fs, stddev=std, padding='SAME', n_rings=nr, name='2_1')\n cv3 = hl.non_linearity(cv3, name='2_1')\n\n cv4 = hl.conv2d(cv3, nf2, fs, stddev=std, padding='SAME', n_rings=nr, name='2_2')\n cv4 = hl.batch_norm(cv4, train_phase, name='bn2')\n mags = to_4d(hl.stack_magnitudes(cv4))\n fm[2] = linear(mags, 1, 1, name='sw2')\n\n with tf.name_scope('stage3') as scope:\n cv5 = hl.mean_pooling(cv4, ksize=(1,2,2,1), strides=(1,2,2,1))\n cv5 = hl.conv2d(cv5, nf3, fs, stddev=std, padding='SAME', n_rings=nr, name='3_1')\n cv5 = hl.non_linearity(cv5, name='3_1')\n\n cv6 = hl.conv2d(cv5, nf3, fs, stddev=std, padding='SAME', n_rings=nr, name='3_2')\n cv6 = hl.batch_norm(cv6, train_phase, name='bn3')\n mags = to_4d(hl.stack_magnitudes(cv6))\n fm[3] = linear(mags, 1, 1, name='sw3')\n\n with tf.name_scope('stage4') as scope:\n cv7 = hl.mean_pooling(cv6, ksize=(1,2,2,1), strides=(1,2,2,1))\n cv7 = hl.conv2d(cv7, nf4, fs, stddev=std, padding='SAME', n_rings=nr, name='4_1')\n cv7 = hl.non_linearity(cv7, name='4_1')\n\n cv8 = hl.conv2d(cv7, nf4, fs, stddev=std, padding='SAME', n_rings=nr, name='4_2')\n cv8 = hl.batch_norm(cv8, train_phase, name='bn4')\n mags = to_4d(hl.stack_magnitudes(cv8))\n fm[4] = linear(mags, 1, 1, name='sw4')\n\n with tf.name_scope('stage5') as scope:\n cv9 = hl.mean_pooling(cv8, ksize=(1,2,2,1), strides=(1,2,2,1))\n cv9 = hl.conv2d(cv9, nf4, fs, stddev=std, padding='SAME', n_rings=nr, name='5_1')\n cv9 = hl.non_linearity(cv9, name='5_1')\n\n cv10 = hl.conv2d(cv9, nf4, fs, stddev=std, padding='SAME', n_rings=nr, name='5_2')\n cv10 = hl.batch_norm(cv10, train_phase, name='bn5')\n mags = to_4d(hl.stack_magnitudes(cv10))\n fm[5] = linear(mags, 1, 1, name='sw5')\n\n fms = {}\n side_preds = []\n xsh = tf.shape(x)\n with tf.name_scope('fusion') as scope:\n for key in fm.keys():\n fms[key] = tf.image.resize_images(fm[key], tf.stack([xsh[1], xsh[2]]))\n side_preds.append(fms[key])\n side_preds = tf.concat(axis=3, values=side_preds)\n\n fms['fuse'] = linear(side_preds, 1, 1, bias_init=0.01, name='side_preds')\n return fms", "def __init__(self, num_blocks=3, layers_per_block=2, base_num_channels=16,\n upconv=False, fc_layer_sizes=None, upconv_reshape_size=None,\n conditioning_layer_sizes=None, channels_out=3, alpha=0.3,\n conditioning_postprocessing=None,\n final_sigmoid=False, conditioning_type=\"mult_and_add\",\n kernel_initializer_mode=\"fan_in\"):\n\n super(ConditionalConvnet, self).__init__()\n self._num_blocks = num_blocks\n self._layers_per_block = layers_per_block\n self._base_num_channels = base_num_channels\n self._channels_out = channels_out\n self._upconv = upconv\n self._fc_layer_sizes = fc_layer_sizes\n self._upconv_reshape_size = upconv_reshape_size\n self._final_sigmoid = final_sigmoid\n if upconv_reshape_size is not None and ((not upconv) or\n (fc_layer_sizes is None)):\n raise ValueError(\"upconv_reshape_size should be supplied only if \"\n \"upconv=True and fc_layer_sizes is not None.\")\n self._conditioning_layer_sizes = conditioning_layer_sizes\n self._nonlinearity = lambda x: tf.nn.leaky_relu(x, alpha)\n if conditioning_postprocessing is not None:\n self._conditioning_postprocessing = conditioning_postprocessing()\n else:\n self._conditioning_postprocessing = None\n if conditioning_type not in [\"mult_and_add\", \"concat\", \"input\"]:\n raise ValueError(\"Unknown conditioning_type {}\".format(conditioning_type))\n self._conditioning_type = conditioning_type\n scale_factor = 2. / (1. + alpha**2)\n self._kernel_initializer = tf.keras.initializers.VarianceScaling(\n mode=kernel_initializer_mode, scale=scale_factor)", "def _build(self):\n with tf.variable_scope (self.name + '_architecutre') as scope:\n images_square = unflatten_layer ( self.images )\n visualize_images(images_square)\n\n # Conv Layer 1\n conv1_out, params = conv_2d_layer ( input = images_square,\n neurons = CONV_1_N,\n filter_size = CONV_1_FILT,\n name = 'enc_conv_1',\n visualize = True )\n process_params(params, name = self.name)\n e1_params = params\n pool1_out = max_pool_2d_layer ( input = conv1_out, name = 'enc_pool_1')\n # lrn1_out = local_response_normalization_layer (pool1_out, name = 'lrn_1' )\n\n # Conv Layer 2\n conv2_out, params = conv_2d_layer ( input = pool1_out,\n neurons = CONV_2_N,\n filter_size = CONV_2_FILT,\n name = 'enc_conv_2' )\n process_params(params, name = self.name)\n e2_params = params\n pool2_out = max_pool_2d_layer ( input = conv2_out, name = 'enc_pool_2')\n # lrn2_out = local_response_normalization_layer (pool2_out, name = 'lrn_2' )\n\n flattened = flatten_layer(pool2_out)\n\n # Dropout Layer 1 \n flattened_dropout = dropout_layer ( input = flattened,\n prob = self.dropout_prob,\n name = 'enc_dropout_1') \n\n # Dot Product Layer 1\n fc1_out, params = dot_product_layer ( input = flattened_dropout,\n neurons = HIDDEN_1,\n name = 'enc_dot_1')\n process_params(params, name = self.name)\n e3_params = params \n\n # Dropout Layer 2 \n fc1_out_dropout = dropout_layer ( input = fc1_out,\n prob = self.dropout_prob,\n name = 'enc_dropout_2')\n # Dot Product Layer 2\n fc2_out, params = dot_product_layer ( input = fc1_out_dropout, \n neurons = HIDDEN_2,\n name = 'enc_dot_2')\n process_params(params, name = self.name)\n e4_params = params \n\n # Dropout Layer 3 \n fc2_out_dropout = dropout_layer ( input = fc2_out,\n prob = self.dropout_prob,\n name = 'enc_dropout_3')\n \n # Dot Product Layer 2\n self.codeword, params = dot_product_layer ( input = fc2_out_dropout, \n neurons = CODEWORD_LENGTH,\n activation = CODE_ACTIVATION,\n name = 'enc_dot_2')\n process_params(params, name = self.name)\n process_codeword_normalization_regularizer(self.codeword, \n coeff = AUTOENCODER_CODEWORD_COEFF,\n name = self.name)\n e5_params = params \n # tf.summary.histogram('codewords', self.codeword)\n # self.hash = threshold_layer ( input = self.codeword,\n # name = 'hash')\n # process_hash_regularizer(self.codeword, coeff = AUTOENCODER_HASH_COEFF,\n # name = self.name)\n\n # Decoder ... \n decoder_1_out, params = dot_product_layer ( input = self.codeword, \n neurons = HIDDEN_2,\n params = [tf.transpose(e5_params[0]), None],\n name = 'decoder_dot_1')\n d1_params = params\n process_params([params[1]], name = self.name)\n \n dec_1_out_dropout = dropout_layer ( input = decoder_1_out,\n prob = self.dropout_prob,\n name = 'dec_dropout_1')\n\n decoder_2_out, params = dot_product_layer ( input = dec_1_out_dropout, \n neurons = HIDDEN_1,\n params = [tf.transpose(e4_params[0]), None],\n name = 'decoder_dot_2')\n d2_params = params\n process_params([params[1]], name = self.name)\n \n # dropout 2\n dec_2_out_dropout = dropout_layer ( input = decoder_2_out,\n prob = self.dropout_prob,\n name = 'dec_dropout_2')\n\n decoder_3_out, params = dot_product_layer ( input = dec_2_out_dropout, \n neurons = 1250,\n params = [tf.transpose(e3_params[0]), None],\n name = 'decoder_dot_3')\n d3_params = params\n process_params([params[1]], name = self.name)\n\n # DeConv Layer 1\n # The output shapes need to be changed according to architecture.\n\n dec_3_square = unflatten_layer ( decoder_3_out, channels = CONV_2_N )\n upsample_1 = upsampling_layer (dec_3_square, size = (10,10), name = 'dec_upsampling_1')\n\n deconv1_out, params = deconv_2d_layer ( input = upsample_1,\n neurons = CONV_1_N,\n filter_size = CONV_2_FILT,\n output_shape = (12,12),\n # n_outs = MINI_BATCH_SIZE,\n stride = (1,1,1,1), \n params = [e2_params[0], None], \n name = 'dec_deconv_1' )\n\n process_params([params[1]], name = self.name)\n d4_params = params\n\n # DeConv Layer 2\n upsample_2 = upsampling_layer (deconv1_out, size = (24,24), name = 'dec_upsampling_2')\n decoded_images_square, params = deconv_2d_layer ( input = upsample_2,\n neurons = 1,\n filter_size = CONV_1_FILT,\n stride = (1,1,1,1),\n output_shape = (28,28),\n # n_outs = MINI_BATCH_SIZE, \n params = [e1_params[0], None], \n activation = 'tanh', \n name = 'dec_deconv_2' )\n \n process_params([params[1]], name = self.name)\n d5_params = params \n \n self.decoded = flatten_layer (decoded_images_square, in_shp = [-1, 28, 28, 1])\n visualize_images(decoded_images_square, name = 'decoded')\n # This is because transpose don't initialize.\n self.params = [ [e5_params[0], d1_params[1] ],\n [e4_params[0], d2_params[1] ],\n [e3_params[0], d3_params[1] ],\n [e2_params[0], d4_params[1] ],\n [e1_params[0], d5_params[1] ] ]\n\n with tf.variable_scope (self.name + '_objectives') as scope: \n with tf.variable_scope( self.name + '_decoder_error') as scope:\n reconstruction_error = rmse(self.images, self.decoded) \n tf.add_to_collection( self.name + '_objectives', reconstruction_error ) \n tf.summary.scalar('reconstruction_error', reconstruction_error)\n\n self._cook_optimizer( \n lr = AUTOENCODER_LR, \n optimizer = AUTOENCODER_OPTIMIZER,\n l1_coeff = AUTOENCODER_L1_COEFF,\n l2_coeff = AUTOENCODER_WEIGHT_DECAY_COEFF)", "def all_views_conv_layer(input_layer,network_type, layer_name, number_of_filters=32, filter_size=(3, 3), stride=(1, 1),\r\n padding='VALID', biases_initializer=tf.zeros_initializer()):\r\n if network_type == \"CC\":\r\n\r\n\r\n input_l_cc, input_r_cc = input_layer\r\n\r\n #with tf.variable_scope(layer_name + \"_CC\") as cc_cope:\r\n h_l_cc = tf.contrib.layers.convolution2d(inputs=input_l_cc, num_outputs=number_of_filters,\r\n kernel_size=filter_size, stride=stride, padding=padding,\r\n weights_initializer=tf.contrib.layers.xavier_initializer(), biases_initializer=biases_initializer)\r\n h_r_cc = tf.contrib.layers.convolution2d(inputs=input_r_cc, num_outputs=number_of_filters,\r\n kernel_size=filter_size, stride=stride, padding=padding, reuse=False,\r\n weights_initializer=tf.contrib.layers.xavier_initializer(), biases_initializer=biases_initializer)\r\n\r\n\r\n h = (h_l_cc, h_r_cc)\r\n\r\n return h\r\n\r\n else:\r\n input_l_mlo, input_r_mlo = input_layer\r\n\r\n # with tf.variable_scope(layer_name + \"_CC\") as cc_cope:\r\n h_l_mlo = tf.contrib.layers.convolution2d(inputs=input_l_mlo, num_outputs=number_of_filters,\r\n kernel_size=filter_size, stride=stride, padding=padding,\r\n weights_initializer=tf.contrib.layers.xavier_initializer(),\r\n biases_initializer=biases_initializer)\r\n h_r_mlo = tf.contrib.layers.convolution2d(inputs=input_r_mlo, num_outputs=number_of_filters,\r\n kernel_size=filter_size, stride=stride, padding=padding, reuse=False,\r\n weights_initializer=tf.contrib.layers.xavier_initializer(),\r\n biases_initializer=biases_initializer)\r\n\r\n h = (h_l_mlo, h_r_mlo)\r\n\r\n return h", "def __init__(self):\n super(BCEDiceLoss, self).__init__()\n self.bce = nn.BCEWithLogitsLoss()\n self.contour = active_contour_loss()", "def mgcNetArchNin(outLayer, l2_val, **kwargs):\n\n def_vals = {\"input_img_rows\" : 72,\n \"input_img_cols\" : 72,\n \"channels\" : 1,\n \"nb_classes\" : 13\n } # default parameters value\n\n for k, v in def_vals.items():\n kwargs.setdefault(k, v)\n\n input_img_rows = kwargs['input_img_rows']\n input_img_cols = kwargs['input_img_cols']\n channels = kwargs['channels']\n nb_classes = kwargs['nb_classes']\n\n \n # Input: 72 x 72 x 1\n img_shape = layers.Input(shape = (input_img_rows, input_img_cols, channels))\n\n # Layer 1\n #------------------------\n conv1 = layers.Conv2D(filters=16, kernel_size=(5, 5), padding='valid', activation='relu')(img_shape)\n conv1 = layers.Conv2D(filters=16, kernel_size=(1, 1), activation='relu')(conv1)\n conv1 = layers.Conv2D(filters=16, kernel_size=(1, 1), activation='relu')(conv1)\n conv1 = layers.MaxPooling2D(pool_size=(2, 2))(conv1)\n conv1 = layers.Dropout(0.4)(conv1)\n\n # Layer 2\n #------------------------\n conv2 = layers.Conv2D(filters=32, kernel_size=(5, 5), padding='same', activation='relu')(conv1)\n conv2 = layers.Conv2D(filters=32, kernel_size=(1, 1), activation='relu')(conv2)\n conv2 = layers.Conv2D(filters=32, kernel_size=(1, 1), activation='relu')(conv2)\n conv2 = layers.MaxPooling2D(pool_size=(2, 2))(conv2)\n conv2 = layers.Dropout(0.4)(conv2)\n\n # Layer 3\n #------------------------\n conv3 = layers.Conv2D(filters=64, kernel_size=(3, 3), padding='same', activation='relu')(conv2)\n conv3 = layers.Conv2D(filters=64, kernel_size=(1, 1), activation='relu')(conv3)\n conv3 = layers.Conv2D(filters=64, kernel_size=(1, 1), activation='relu')(conv3)\n conv3 = layers.MaxPooling2D(pool_size=(2, 2))(conv3)\n conv3 = layers.Dropout(0.4)(conv3)\n\n # Layer 4\n #------------------------\n #conv4 = layers.Conv2D(filters=128, kernel_size=(2, 2), padding='same', activation='relu')(conv3)\n #conv4 = layers.Conv2D(filters=128, kernel_size=(1, 1), activation='relu')(conv4)\n #conv4 = layers.Conv2D(filters=128, kernel_size=(1, 1), activation='relu')(conv4)\n #conv4 = layers.MaxPooling2D(pool_size=(2, 2))(conv4)\n #conv4 = layers.Dropout(0.4)(conv4)\n\n # Layer 5\n #------------------------\n output = layers.Conv2D(filters=128, kernel_size=(2, 2), padding='same', activation='relu')(conv3) # skip layer 4\n output = layers.Conv2D(filters=64, kernel_size=(1, 1), activation='relu')(output)\n output = layers.Conv2D(filters=32, kernel_size=(1, 1))(output)\n output = layers.MaxPooling2D(pool_size=(2, 2))(output)\n output = layers.Dropout(0.4)(output)\n\n \n # FC Layer\n #------------------------\n outputmlp = layers.Flatten()(output)\n outputmlp = layers.Dense(64, activation = 'relu')(outputmlp)\n outputmlp = layers.Dropout(0.5)(outputmlp)\n\n predictionsMlp = layers.Dense(nb_classes, activation='softmax')(outputmlp)\n \n \n # global averaging\n weight_decay=1E-4\n concat_axis = 1\n \n x = BatchNormalization(axis=concat_axis,\n gamma_regularizer=regularizers.l2(weight_decay),\n beta_regularizer=regularizers.l2(weight_decay))(output)\n x = Activation('relu')(x)\n x = layers.Dropout(0.4)(x)\n x = GlobalAveragePooling2D(data_format=K.image_data_format())(x)\n \n predictionsGloAvg = layers.Dense(nb_classes,\n activation='softmax',\n kernel_regularizer=regularizers.l2(weight_decay),\n bias_regularizer=regularizers.l2(weight_decay))(x)\n \n if outLayer == \"gloAvg\":\n predictions = predictionsGloAvg\n elif outLayer == \"mlp\":\n predictions = predictionsMlp\n \n # prediction model\n model = Model(img_shape, predictions, name = 'net_in_net')\n\n\n return model", "def network(x, FLAGS):\n\tconv_out, outsize, nfeatures = hnn.convlayers(x, FLAGS)\n\tconv_out_flat = tf.reshape(conv_out, [-1, outsize * outsize * nfeatures])\n\ty = hnn.fclayers(conv_out_flat, outsize * outsize * nfeatures, [FLAGS.hidden_layer_size, NUM_CLASSES], FLAGS)\n\treturn y", "def fc_layer(input_layer, nodes_input, nodes_output, name_scope, final_layer=False):\n W = tf.get_variable(name=name_scope + 'W', shape=[nodes_input, nodes_output],\n initializer=tf.truncated_normal_initializer())\n b = tf.get_variable(name=name_scope + 'b', shape=[nodes_output], initializer=tf.constant_initializer(0))\n\n if final_layer:\n return tf.matmul(input_layer, W) + b # no activation\n else:\n return tf.nn.relu(tf.matmul(input_layer, W) + b) # relu activation\n # return tf.sigmoid(tf.matmul(input_layer, W) + b) # sigmoid activation", "def fc_layer(bottom, weight_shape, name):\n with tf.variable_scope(name):\n fc_weight = tf.get_variable('weight', weight_shape, initializer=tf.truncated_normal_initializer(stddev=0.1))\n fc_bias = tf.get_variable('bias', weight_shape[1], initializer=tf.truncated_normal_initializer(stddev=0.1))\n fc = tf.nn.bias_add(tf.matmul(bottom, fc_weight), fc_bias)\n print_activation(fc)\n\n # visitable fc_weight layer and fc_bias layer\n variable_summaries(fc_weight, name)\n variable_summaries(fc_bias, name)\n # tf.summary.histogram(name + '/fc', fc)\n return fc", "def __init__(self, n_channels_in: int = 3, n_channels_out: int = 3, n_hidden: int = 64,\n norm_layer: nn.Module = nn.BatchNorm2d, use_dropout: bool = True,\n n_hidden_layers: int = 3, padding_type: str = 'reflect', temperature: float = 1,\n device: str = \"cpu\", **kwargs):\n super(ToyCalNet, self).__init__(n_channels_in, n_channels_out, device, **kwargs)\n\n # save for use in forward pass\n self.temperature = temperature\n\n # If normalizing layer is instance normalization, add bias\n # use_bias = norm_layer == nn.InstanceNorm2d\n use_bias = True\n use_dropout = True\n\n\n # Initialize model input block\n layers = []\n\n # Add input block layers\n layers += [nn.ReflectionPad2d(1)]\n layers += [nn.Conv2d(n_channels_in, n_hidden, kernel_size=3, bias=use_bias)]\n\n # layers += [nn.Linear(n_channels_in, n_hidden)]\n\n layers += [nn.Dropout(int(use_dropout) * 0.2)]\n layers += [norm_layer(n_hidden)]\n layers += [nn.LeakyReLU(0.2, inplace=True)]\n\n # Add hidden block layers\n for i in range(n_hidden_layers):\n # Add input block layers\n layers += [nn.ReflectionPad2d(1)]\n layers += [nn.Conv2d(n_hidden, n_hidden, kernel_size=3, bias=use_bias)]\n\n # layers += [nn.Linear(n_hidden, n_hidden)]\n\n layers += [nn.Dropout(int(use_dropout) * 0.2)]\n layers += [norm_layer(n_hidden)]\n layers += [nn.LeakyReLU(0.2, inplace=True)]\n\n layers += [nn.ReflectionPad2d(1)]\n layers += [nn.Conv2d(n_hidden, n_channels_out, kernel_size=3)]\n\n # layers += [nn.Linear(n_hidden, n_channels_out)]\n\n layers += [nn.Dropout(int(use_dropout) * 0.2)]\n # layers += [nn.Softmax(dim=1)]\n\n # Save model\n self.model = nn.Sequential(*layers)", "def __init__(self):\n super(PackageNet, self).__init__()\n \n # remove last layers of vgg19 model, save first fc layer and maxpool layer\n self.vgg = models.vgg19(pretrained=True)\n del self.vgg.classifier[2:]\n\n # get size of some layers\n start_num = self.vgg.classifier[0].out_features\n mid_num0 = int(np.sqrt(start_num))\n mid_num1 = int(start_num**0.667)\n mid_num2 = int(start_num**0.333)\n \n cls_out_num = 9 \n reg_out_num = 16 # 8 3D bounding box coords\n \n # define classifier\n self.classifier = nn.Sequential(\n nn.Linear(start_num,mid_num0,bias=True),\n nn.ReLU(),\n nn.Linear(mid_num0,cls_out_num,bias = True),\n nn.Softmax(dim = 1)\n )\n \n # define regressor\n # try relu and tanh, also try without bias\n self.regressor = nn.Sequential(\n nn.Linear(start_num,mid_num1,bias=True),\n nn.ReLU(),\n nn.Linear(mid_num1,mid_num2,bias = True),\n nn.ReLU(),\n nn.Linear(mid_num2,reg_out_num,bias = True),\n nn.Sigmoid()\n \n )", "def output(self,x=None,h=None,in_features=0,in_features_h=0,reuse=False):\n assert (x is not None or h is not None)\n print('re'*10,reuse , self.name)\n stride=[1, self.cnn_stride, self.cnn_stride, 1]\n with tf.variable_scope(self.name):\n Wxi=self._create_weight([self.cnn_size, self.cnn_size, in_features, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='conv_Wxi')\n Whi=self._create_weight([self.cnn_size, self.cnn_size, in_features_h, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='conv_Whi')\n Wxf=self._create_weight([self.cnn_size, self.cnn_size, in_features, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='conv_Wxf')\n Whf=self._create_weight([self.cnn_size, self.cnn_size, in_features_h, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='conv_Whf')\n Wxc=self._create_weight([self.cnn_size, self.cnn_size, in_features, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='conv_Wxc')\n Whc=self._create_weight([self.cnn_size, self.cnn_size, in_features_h, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='conv_Whc')\n Wxo=self._create_weight([self.cnn_size, self.cnn_size, in_features, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='conv_Wxo')\n Who=self._create_weight([self.cnn_size, self.cnn_size, in_features_h, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='conv_Who')\n Wci=self._create_weight([1,self.height , self.width, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='ele_Wci')\n Wcf=self._create_weight([1,self.height , self.width, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='ele_Wcf')\n Wco=self._create_weight([1,self.height , self.width, self.out_features], stddev=self.weight_dev,\n kname=self.weight_init, name='ele_Wco')\n if not reuse and h is None:\n print('xr'*20,x.get_shape().as_list(),in_features,self.out_features,in_features_h)\n h=tf.zeros((1,self.height,self.width,self.out_features))\n self._input=tf.sigmoid(self._conv2d(x,Wxi,stride=stride,pre_name='Wxi')+self._conv2d(h,Whi,use_bias=True,stride=stride,pre_name='Whi')+Wci*self._cell)\n self._forget=tf.sigmoid(self._conv2d(x,Wxf,stride=stride,pre_name='Wxf')+self._conv2d(h,Whf,use_bias=True,stride=stride,pre_name='Whf')+Wcf*self._cell)\n self._cell=self._forget*self._cell+self._input*tf.tanh(self._conv2d(x,Wxc,stride=stride,pre_name='Wxc')+self._conv2d(h,Whc,stride=stride,use_bias=True,pre_name='Whc'))\n self._output=tf.sigmoid(self._conv2d(x,Wxo,stride=stride,pre_name='Wxo')+self._conv2d(h,Who,use_bias=True,stride=stride,pre_name='Who')+Wco*self._cell)\n else:\n # print('x'*10,x.shape,'\\\\n Wxi',Wxi.shape,'\\\\n h ',h.shape,Whi.shape,'\\\\n c ',Wci.shape)\n if h is None:\n # print('x'*20,x.get_shape().as_list(),in_features,out_features)\n self._input=tf.sigmoid(self._conv2d(x,Wxi,stride=stride,pre_name='Wxi',use_bias=True)+Wci*self._cell)\n self._forget=tf.sigmoid(self._conv2d(x,Wxf,stride=stride,pre_name='Wxf',use_bias=True)+Wcf*self._cell)\n self._cell=self._forget*self._cell+self._input*tf.tanh(self._conv2d(x,Wxc,stride=stride,pre_name='Wxc',use_bias=True))\n self._output=tf.sigmoid(self._conv2d(x,Wxo,stride=stride,pre_name='Wxo',use_bias=True)+Wco*self._cell)\n elif x is None:\n # print('h'*20,h.get_shape().as_list(),in_features_h,out_features)\n self._input=tf.sigmoid(self._conv2d(h,Whi,use_bias=True,stride=stride,pre_name='Whi')+Wci*self._cell)\n self._forget=tf.sigmoid(self._conv2d(h,Whf,use_bias=True,stride=stride,pre_name='Whf')+Wcf*self._cell)\n self._cell=self._forget*self._cell+self._input*tf.tanh(self._conv2d(h,Whc,stride=stride,use_bias=True,pre_name='Whc'))\n self._output=tf.sigmoid(self._conv2d(h,Who,use_bias=True,stride=stride,pre_name='Who')+Wco*self._cell)\n else:\n # print('xh'*20,x.get_shape().as_list(),in_features,out_features,in_features_h)\n self._input=tf.sigmoid(self._conv2d(x,Wxi,stride=stride,pre_name='Wxi')+self._conv2d(h,Whi,use_bias=True,stride=stride,pre_name='Whi')+Wci*self._cell)\n self._forget=tf.sigmoid(self._conv2d(x,Wxf,stride=stride,pre_name='Wxf')+self._conv2d(h,Whf,use_bias=True,stride=stride,pre_name='Whf')+Wcf*self._cell)\n self._cell=self._forget*self._cell+self._input*tf.tanh(self._conv2d(x,Wxc,stride=stride,pre_name='Wxc')+self._conv2d(h,Whc,stride=stride,use_bias=True,pre_name='Whc'))\n self._output=tf.sigmoid(self._conv2d(x,Wxo,stride=stride,pre_name='Wxo')+self._conv2d(h,Who,use_bias=True,stride=stride,pre_name='Who')+Wco*self._cell)\n h=self._output*tf.tanh(self._cell)\n\n return h", "def add_internal_layer(self,_input, growth_rate, training=True, bc_mode=False, dropout_keep_prob=1.0,\n scope=\"inner_layer\"):\n # call composite function with 3x3 kernel\n with tf.variable_scope(scope):\n if not bc_mode:\n _output = self.composite_function(_input, growth_rate, training)\n if training:\n _output = slim.dropout(_output, dropout_keep_prob)\n\n elif bc_mode:\n bottleneck_out = self.bottleneck(_input, growth_rate, training)\n _output = self.composite_function(bottleneck_out, growth_rate, training)\n if training:\n _output = slim.dropout(_output, dropout_keep_prob)\n\n # concatenate _input with out from composite function\n # the only diffenence between resnet and densenet\n output = tf.concat(axis=3, values=(_input, _output))\n return output", "def contract(self, depth, kernel_cc_weight=0.0):\n\n def composite_layer(x):\n name = 'contr_%d' % depth\n num_filters = self.base_num_filters * 2 ** depth\n x = self.conv_depth(num_filters=num_filters, name=name, kernel_cc_weight=kernel_cc_weight)(x)\n self.contr_tensors[depth] = x\n x = MaxPoolingND(x)(x)\n return x\n\n return composite_layer", "def importCaffeNetwork(modeldef, params):\n if params is None:\n net = caffe.Net(modeldef, caffe.TEST)\n else:\n net = caffe.Net(modeldef, params, caffe.TEST)\n model = caffe.proto.caffe_pb2.NetParameter()\n Merge(open(modeldef, \"rb\").read(), model)\n numLayers = len(model.layer)\n outLayerStr = []\n outParams = []\n ret = []\n # TODO check that net is linear (no branching)\n dataLayerName = net.inputs[0] # any better way to get this?\n dataShape = net.blobs[dataLayerName].data.shape\n if dataShape[2] != dataShape[3]:\n raise Exception(\"Only square images supported for now\")\n inDim = dataShape[2]\n inChans = dataShape[1]\n for i in range(numLayers):\n layerModel = model.layer[i]\n layerType = layerModel.type\n layerName = layerModel.name\n logging.info(\"Processing layer: %s (type %s). input (chans,dim)=(%d,%d)\" % (layerName, layerType, inChans, inDim))\n if net.params.has_key(layerName):\n layerParams = net.params[layerName]\n if layerType == \"Input\":\n # TODO we should support some of the transformations that Caffe\n # supports on the input\n raise Exception(\"Input layer is not yet convertable, need input data shape instead\")\n elif layerType == \"Scale\":\n A = layerParams[0].data\n if layerModel.scale_param.bias_term:\n B = layerParams[1].data\n else:\n B = np.zeros(shape=A.shape)\n ret += [lb.LinearLayer(A, B)]\n elif layerType == \"BatchNorm\":\n # epsilon to ensure non-zero operand to square root\n eps = layerModel.batch_norm_param.eps\n # batchnorm layer has the following data blobs:\n # [mean, variance, moving average factor]\n # BUG: mavf can be zero, causing invalid divide below\n mavf = layerParams[2].data[0]\n if mavf == 0:\n mavf = 1\n m = layerParams[0].data / mavf\n i = 1 / (np.sqrt( (layerParams[1].data / mavf) + eps ))\n numBatchNormChans = m.shape[0]\n # Caffe BN layers do not have b and g\n b = np.zeros((numBatchNormChans), dtype=np.float32)\n g = np.ones((numBatchNormChans), dtype=np.float32)\n # we want to implement batchnorm as a linear operation Mx+N\n # where Mx+N = g*i*(x-m)+b = g*i*x - g*i*m + b\n # so M = g*i and N = b - g*i*m\n M = g*i\n N = b - g*i*m\n #outLayerStr += [\"linear\"]\n #outParams += [M, N]\n ret += [lb.LinearLayer(M, N)]\n elif layerType == \"Quant\":\n # quantization layer\n # get quantization type and levels\n qfxn = layerModel.quant_param.forward_func\n qlevels = np.asarray(layerModel.quant_param.centers, dtype=np.float32)\n if qfxn == \"hwgq\":\n # add zero as an explicit level for HWGQ\n qlevels = np.concatenate((np.asarray([0.0], dtype=np.float32), qlevels))\n # check for uniform quantization -- all levels equally spaced\n isUniform = np.all(np.isclose(np.diff(qlevels, 2), 0))\n if not isUniform:\n # TODO add a LookupTableLayer for nonlinear quantization support\n raise Exception(\"Nonuniform quantization not yet supported\")\n else:\n # uniform quantization = threshold followed by linear transform\n # compute thresholds as HWGQ does\n qlevels_t = qlevels[1:] # exclude the zero level for thres. comp\n thr = (qlevels_t[:-1] + qlevels_t[1:]) / 2.0\n # add explicit zero threshold\n thr = np.concatenate((np.asarray([0.0], dtype=np.float32), thr))\n # emit threshold layer\n #outLayerStr += [\"thres\"]\n #outParams += [thr]\n ret += [lb.ThresholdingLayer(thr)]\n # TODO this should be ideally propagated (similar to bitwidths)\n # using a transform\n ret[-1].insize = inChans\n ret[-1].outsize = inChans\n # find the coefficients for the linear transform Fx + G\n G = np.asarray([qlevels[0]])\n F = np.asarray([qlevels[1] - qlevels[0]])\n # emit linear layer with scalars\n #outLayerStr += [\"linear\"]\n #outParams += [F, G]\n ret += [lb.LinearLayer(F, G)]\n elif qfxn == \"sign\":\n # sign quantization has its own layer type, but the core logic\n # still uses 0 as a threshold.\n thr = np.asarray([[0.0]], dtype=np.float32)\n ret += [lb.BipolarThresholdingLayer(thr)]\n else:\n raise Exception(\"Unsupported quantization function\")\n\n elif layerType == \"BinaryInnerProduct\":\n # binary inner product layer may or may not have bias field\n # additionally, it may use the l1-norm as a scaling factor\n # need access to prototxt to find out whether to use alpha\n if not layerModel.binary_inner_product_param.use_binarization:\n raise Exception(\"use_binarization not set in BinaryInnerProduct layer\")\n useBias = layerModel.inner_product_param.bias_term\n W = layerParams[0].data\n (rows, cols) = W.shape\n useAlpha = layerModel.binary_inner_product_param.use_alpha\n # the weights here are not yet binarized - need to do that\n # access and binarize the weights as done by the bnfc layer impl\n # binarize the weight matrix:\n Wbin = np.sign(W)\n # generate fully connected layer output\n\n # TODO indicate 1 bit signed (bipolar)\n ret += [lb.FullyConnectedLayer(Wbin, 1, 32, 32)]\n ret[-1].in_dim = inDim\n ret[-1].kernel = 1\n # treat the produced data as \"rows\"-channel, 1px images\n inChans = rows\n inDim = 1\n if useAlpha:\n # add a linear layer with A=alpha B=0 after the FC layer\n alpha = np.zeros(rows, dtype=np.float32)\n beta = np.zeros(rows, dtype=np.float32)\n Wabs = np.abs(W)\n for i in range(rows):\n alpha[i] = Wabs[i].sum() / cols\n ret += [lb.LinearLayer(alpha, beta)]\n if useBias:\n # add bias as additive linear layer\n b = layerParams[1].data\n ret += [lb.LinearLayer(np.ones((rows), dtype=np.float32), b)]\n elif layerType == \"BinaryConvolution\":\n if not layerModel.binary_convolution_param.use_binarization:\n raise Exception(\"use_binarization not set in BinaryInnerProduct layer\")\n useAlpha = layerModel.binary_convolution_param.use_alpha\n useBias = layerModel.convolution_param.bias_term\n ofm = layerModel.convolution_param.num_output\n # TODO warn about non-uniform stride/pad/kernelsize\n # kernel size\n if len(layerModel.convolution_param.kernel_size) == 0:\n raise Exception(\"Unknown kernel size\")\n else:\n k = layerModel.convolution_param.kernel_size[0]\n # stride options\n if len(layerModel.convolution_param.stride) == 0:\n s = 1\n else:\n s = layerModel.convolution_param.stride[0]\n # padding options\n if len(layerModel.convolution_param.pad) == 0:\n pad = 0\n else:\n pad = layerModel.convolution_param.pad[0]\n # size of each output feature map\n outDim = ((inDim + 2*pad - k) / s) + 1\n W = layerParams[0].data\n # binarize kernel weights and output conv layer\n orig_shape = W.shape\n Wbin = np.sign(W)\n\n # TODO indicate 1 bit signed (bipolar)\n ret += [lb.ConvolutionLayer(Wbin, inDim, pad, s, 1, 1, 1)]\n ret[-1].kernel = k\n ret[-1].k = k\n ret[-1].stride = s\n ret[-1].parallel = layerModel.convolution_param.group\n # compute alphas, if needed\n if useAlpha:\n Wa = W.reshape((ofm, k*k*inChans))\n (rows, cols) = Wa.shape\n # add a linear layer with A=alpha B=0 after the conv layer\n alpha = np.zeros(rows, dtype=np.float32)\n Wabs = np.abs(Wa)\n for i in range(rows):\n alpha[i] = Wabs[i].sum() / cols\n beta = np.zeros(rows, dtype=np.float32)\n #outLayerStr += [\"linear\"]\n #outParams += [alpha, beta]\n ret += [lb.LinearLayer(alpha, beta)]\n # TODO support conv bias\n if useBias:\n raise Exception(\"BinaryConvolution bias not yet supported\")\n # update data shape passed to next layer\n inChans = ofm\n inDim = outDim\n elif layerType == \"Convolution\":\n useBias = layerModel.convolution_param.bias_term\n ofm = layerModel.convolution_param.num_output\n # TODO warn about non-uniform stride/pad/kernelsize\n # kernel size\n if len(layerModel.convolution_param.kernel_size) == 0:\n raise Exception(\"Unknown kernel size\")\n else:\n k = layerModel.convolution_param.kernel_size[0]\n # stride options\n if len(layerModel.convolution_param.stride) == 0:\n s = 1\n else:\n s = layerModel.convolution_param.stride[0]\n # padding options\n if len(layerModel.convolution_param.pad) == 0:\n pad = 0\n else:\n pad = layerModel.convolution_param.pad[0]\n # size of each output feature map\n outDim = ((inDim + 2*pad - k) / s) + 1\n W = layerParams[0].data\n #outParams += [W]\n #outLayerStr += [\"conv:%d:%d:%d:32:32:32\" % (inDim, pad, s)]\n ret += [lb.ConvolutionLayer(W, inDim, pad, s, 32, 32, 32)]\n ret[-1].kernel = k\n ret[-1].stride = s\n ret[-1].parallel = layerModel.convolution_param.group\n # TODO support conv bias\n if useBias:\n rows = ofm \n b = layerParams[1].data\n ret += [lb.LinearLayer(np.ones((rows), dtype=np.float32), b)]\n raise Exception(\"Convolution bias not yet supported\")\n # update data shape passed to next layer\n inChans = ofm\n inDim = outDim\n elif layerType == \"InnerProduct\":\n useBias = layerModel.inner_product_param.bias_term\n W = layerParams[0].data\n (rows, cols) = W.shape\n # if the previous layer was a conv layer, interleave the columns\n # to match the interleaved channel data layout\n # generate fully connected layer output\n #outLayerStr += [\"fc:32:32:32\"]\n #outParams += [W]\n ret += [lb.FullyConnectedLayer(W, 32, 32, 32)]\n # treat the produced data as \"rows\"-channel, 1px images\n ret[-1].kernel =1\n inChans = rows\n inDim = 1\n if useBias:\n # add bias as additive linear layer\n b = layerParams[1].data\n #outLayerStr += [\"linear\"]\n #outParams += [np.ones((rows), dtype=np.float32), b]\n ret += [lb.LinearLayer(np.ones((rows), dtype=np.float32), b)]\n elif layerType == \"Pooling\":\n if inDim == 1:\n continue\n if layerModel.pooling_param.pool == 0: # max pooling\n poolFxn = \"MAX\"\n elif layerModel.pooling_param.pool == 1: # average pooling\n poolFxn = \"AVE\"\n else:\n raise Exception(\"Only max and average pooling supported for now\")\n k = layerModel.pooling_param.kernel_size\n s = layerModel.pooling_param.stride\n #outLayerStr += [\"maxpool:%d:%d:%d:%d\" % (inDim, inChans, k, s)]\n ret += [lb.PoolingLayer(inDim, inChans, k, s, poolFxn)]\n # update data shape passed to next layer\n inChans = ofm\n inDim = ((inDim - k) / s) + 1\n elif layerType == \"Softmax\":\n ret += [lb.SoftmaxLayer()]\n ret[-1].outsize = inChans\n ret[-1].insize = inChans\n elif layerType == \"ReLU\":\n ret += [lb.ReLULayer()]\n elif layerType == \"LRN\":\n pass\n elif layerType == \"Dropout\":\n pass\n else:\n raise Exception(\"Unrecognized or unsupported layer: %s\" % layerType)\n\n return ret", "def __call__(self, **kwargs):\n segname = 'block_{}_expand_relu'\n blocks = [13, 6, 3, 1]\n skips = [self._backbone.get_layer(segname.format(i)) for i in blocks]\n backbone_out = self._backbone.get_layer('block_16_project')\n\n p5 = self._fpn_block(backbone_out.output, skips[0].output)\n p4 = self._fpn_block(p5, skips[1].output)\n p3 = self._fpn_block(p4, skips[2].output)\n p2 = self._fpn_block(p3, skips[3].output)\n\n s5 = self._conv_block(p5, 128)\n s4 = self._conv_block(p4, 128)\n s3 = self._conv_block(p3, 128)\n s2 = self._conv_block(p2, 128)\n\n s5 = tf.keras.layers.UpSampling2D(\n size=(8, 8),\n interpolation='nearest'\n )(s5)\n\n s4 = tf.keras.layers.UpSampling2D(\n size=(4, 4),\n interpolation='nearest'\n )(s4)\n\n s3 = tf.keras.layers.UpSampling2D(\n size=(2, 2),\n interpolation='nearest'\n )(s3)\n\n concat = [s5, s4, s3, s2]\n x = tf.keras.layers.Concatenate()(concat)\n x = tf.keras.layers.Conv2D(\n 64,\n kernel_size=3,\n padding='same',\n kernel_initializer='he_uniform'\n )(x)\n\n x = tf.keras.layers.BatchNormalization()(x)\n x = tf.keras.layers.Activation('relu')(x)\n x = tf.keras.layers.UpSampling2D((2, 2))(x)\n\n x = tf.keras.layers.Conv2D(\n 1,\n kernel_size=3,\n padding='same',\n kernel_initializer='he_uniform'\n )(x)\n\n out = tf.keras.layers.Activation('sigmoid')(x)\n model = tf.keras.models.Model(\n inputs=self._backbone.input,\n outputs=out\n )\n\n return model", "def __init__(self, config, input_shp):\n\n # Run initialization for super class\n super(MyNetwork, self).__init__()\n\n # Store configuration\n self.config = config\n\n # Placeholder for layers\n self.layers = {}\n indim = input_shp[0]\n\n # Retrieve Conv, Act, Pool functions from configurations. We'll use\n # these for our code below.\n if config.conv2d == \"torch\":\n self.Conv2d = nn.Conv2d\n elif config.conv2d == \"custom\":\n self.Conv2d = ConvBlock\n self.Activation = getattr(nn, config.activation)\n self.Pool2d = getattr(nn, config.pool2d)\n self.Linear = nn.Linear\n\n # Resnet Blocks, similar to slide 73 of lecture 21. However, for\n # simplicity, we'll make is slightly different. Note that we used\n # nn.Sequential this time.\n self.convs = nn.Sequential()\n cur_h, cur_w = input_shp[-2:]\n for _i in range(config.num_conv_outer):\n #\n # NOTE THE NEW LAYER ON THESE LINES!\n #\n # We have a dedicated 1x1 layer to get more channels. Note also\n # that this is a pure linear convolution layer.\n outdim = config.nchannel_base * 2 ** _i\n self.convs.add_module(\n \"conv_{}_base\".format(_i), nn.Conv2d(indim, outdim, 1, 1, 0))\n indim = outdim\n for _j in range(config.num_conv_inner):\n # We now use our selected convolution layer. Note that our\n # resnet implementation will have a different call style to\n # vanilla conv2d of torch, so we'll just do an ugly if-else\n # here.\n if config.conv2d == \"torch\":\n self.convs.add_module(\n \"conv_{}_{}\".format(_i, _j),\n self.Conv2d(indim, outdim, config.ksize, 1, 1))\n self.convs.add_module(\n \"act_{}_{}\".format(_i, _j),\n self.Activation())\n cur_h = cur_h - (config.ksize - 1)\n cur_w = cur_w - (config.ksize - 1)\n elif config.conv2d == \"custom\":\n self.convs.add_module(\n \"conv_{}_{}\".format(_i, _j),\n self.Conv2d(indim, outdim, config.ksize, 1, self.Activation))\n self.convs.add_module(\n \"conv_{}_pool\".format(_i), self.Pool2d(2, 2))\n cur_h = cur_h // 2\n cur_w = cur_w // 2\n\n # Final output layer. We'll assume that conv layer outputs are global\n # average pooled\n self.output = nn.Linear(indim, config.num_class)\n\n print(self)", "def BasicBlock(cin, cout, n):\n layers = [\n nn.Conv2d(cin, cout, 3, padding=1),\n nn.BatchNorm2d(cout),\n nn.ReLU()\n ]\n for _ in range(n - 1):\n layers.append(nn.Conv2d(cout, cout, 3, padding=1))\n layers.append(nn.BatchNorm2d(cout))\n layers.append(nn.ReLU())\n layers.append(nn.MaxPool2d(2))\n return nn.Sequential(*layers)", "def get_network(x):\n n_classes = 5\n batch_size = x.get_shape().as_list()[0]\n channels = x.get_shape().as_list()[3]\n\n # Model Helpers --------------------------------------------------------\n\n # https://www.tensorflow.org/versions/r0.8/api_docs/python/nn.html#conv2d\n def conv2d(img, w, b):\n \n x = tf.nn.conv2d(img, w, strides=[1, 1, 1, 1], padding='VALID')\n z = tf.nn.bias_add(x, b)\n return tf.nn.relu(z)\n\n # https://www.tensorflow.org/versions/r0.8/api_docs/python/nn.html#max_pool\n def max_pool(img, k):\n ks = [1, k, k, 1]\n return tf.nn.max_pool(img, ksize=ks, strides=ks, padding='VALID')\n\n # TODO implement\n def maxout(x):\n raise NotImplemented()\n\n def fc(x, w, b, act):\n if act:\n return act(tf.add(tf.matmul(x, w), b))\n else:\n return tf.add(tf.matmul(x, w), b)\n\n def conv_net(_X, _weights, _biases):\n # First convolution layer\n #print 'x: {}'.format(_X.get_shape())\n \n conv1 = conv2d(_X, _weights['wc1'], _biases['bc1'])\n # k used to be 2\n conv1 = max_pool(conv1, k=2)\n\n #print 'conv1: {}'.format(conv1.get_shape())\n\n # Second Covolution layer\n conv2 = conv2d(conv1, _weights['wc2'], _biases['bc2'])\n conv2 = max_pool(conv2, k=2)\n\n #print 'conv2: {}'.format(conv2.get_shape())\n\n # Thrid Convolution Layer\n conv3 = conv2d(conv2, _weights['wc3'], _biases['bc3'])\n\n #print 'conv3: {}'.format(conv3.get_shape())\n\n # Fourth Convolution Layer\n conv4 = conv2d(conv3, _weights['wc4'], _biases['bc4'])\n conv4 = max_pool(conv4, k=2)\n\n #print 'conv4: {}'.format(conv4.get_shape())\n\n # In the paper the FC layers suggest that you use maxout, but\n # there isn't a native maxout in TensorFlow, so I used ReLU for now.\n\n # First Fully Connected Layer, flatten out filters first\n fc1 = tf.reshape(conv4, [batch_size, -1])\n # https://www.tensorflow.org/versions/r0.8/api_docs/python/nn.html#relu\n fc1 = fc(fc1, _weights['wf1'], _biases['bf1'], tf.nn.relu)\n # TODO dropout should be a parameter\n fc1 = tf.nn.dropout(fc1, tf.Variable(tf.constant(0.5)))\n\n\n # Second Fully Connected Layer\n fc2 = fc(fc1, _weights['wf2'], _biases['bf2'], tf.nn.relu)\n # TODO dropout should be a parameter\n fc2 = tf.nn.dropout(fc2, tf.Variable(tf.constant(0.5)))\n\n # Output\n # https://www.tensorflow.org/versions/r0.8/api_docs/python/nn.html#sigmoid\n output = fc(fc2, _weights['out'], _biases['out'], None)\n return output\n\n # Model Helpers --------------------------------------------------------\n\n\n # Model weights and biases\n weights = {\n # 6x6 conv, 3-channel input, 32-channel outputs\n 'wc1': tf.Variable(tf.truncated_normal([3, 3, channels, 32], stddev=0.01)), #0.01\n # 5x5 conv, 32-channel inputs, 64-channel outputs\n 'wc2': tf.Variable(tf.truncated_normal([5, 5, 32, 64], stddev=0.01)), #0.01\n # 3x3 conv, 64-channel inputs, 128-channel outputs\n 'wc3': tf.Variable(tf.truncated_normal([3, 3, 64, 128], stddev=0.01)), #0.01\n # 3x3 conv, 128-channel inputs, 128-channel outputs\n 'wc4': tf.Variable(tf.truncated_normal([3, 3, 128, 128], stddev=0.1)), #0.1\n # fully connected, 512 inputs, 2048 outputs\n # was 4608 for 84x84\n 'wf1': tf.Variable(tf.truncated_normal([6272, 2048], stddev=0.001)), #0.001\n # fully coneected 2048 inputs, 2048 outputs\n 'wf2': tf.Variable(tf.truncated_normal([2048, 2048], stddev=0.001)), #0.001\n # 2048 inputs, 5 outputs (class prediction)\n 'out': tf.Variable(tf.truncated_normal([2048, n_classes], stddev=0.01)) #0.01\n }\n\n biases = {\n 'bc1': tf.Variable(tf.constant(0.1, shape=[32])),\n 'bc2': tf.Variable(tf.constant(0.1, shape=[64])),\n 'bc3': tf.Variable(tf.constant(0.1, shape=[128])),\n 'bc4': tf.Variable(tf.constant(0.1, shape=[128])),\n 'bf1': tf.Variable(tf.constant(0.01, shape=[2048])),\n 'bf2': tf.Variable(tf.constant(0.01, shape=[2048])),\n 'out': tf.Variable(tf.constant(0.1, shape=[n_classes]))\n }\n\n return conv_net(x, weights, biases)", "def __init__(self, img_rows=400, img_cols=400, vgg_weights=\"imagenet\", inference_only=False, net_name='default', gpus=1, vgg_device=None):\n \n # Settings\n self.img_rows = img_rows\n self.img_cols = img_cols\n self.img_overlap = 30\n self.inference_only = inference_only\n self.net_name = net_name\n self.gpus = gpus\n self.vgg_device = vgg_device\n\n # Scaling for VGG input\n self.mean = [0.485, 0.456, 0.406]\n self.std = [0.229, 0.224, 0.225]\n \n #get PowerSpect_CMB\n reader = np.zeros((2507,))\n fp = open('./data/COM_PowerSpect_CMB-base-plikHM-TTTEEE-lowl-lowE-lensing-minimum-theory_R3.01.txt')\n \n for i,line in enumerate(fp):\n if i >= 1:\n reader[i-1] = line.split()[1]\n \n fp.close() \n readers = np.log(reader)\n self.cl = K.constant(readers)\n # Assertions\n assert self.img_rows >= 256, 'Height must be >256 pixels'\n assert self.img_cols >= 256, 'Width must be >256 pixels'\n\n # Set current epoch\n self.current_epoch = 0\n \n # VGG layers to extract features from (first maxpooling layers, see pp. 7 of paper)\n self.vgg_layers = [3, 6, 10]\n\n # Instantiate the vgg network\n if self.vgg_device:\n with tf.device(self.vgg_device):\n self.vgg = self.build_vgg(vgg_weights)\n else:\n self.vgg = self.build_vgg(vgg_weights)\n \n # Create UNet-like model\n if self.gpus <= 1:\n self.model, inputs_mask= self.build_pconv_unet()\n self.compile_pconv_unet(self.model, inputs_mask) \n else:\n with tf.device(\"/cpu:0\"):\n self.model, inputs_mask = self.build_pconv_unet()\n self.model = multi_gpu_model(self.model, gpus=self.gpus)\n self.compile_pconv_unet(self.model, inputs_mask)", "def build_resnet50(self):\n use_batch_norm = self.use_batch_norm\n\n imgs = tf.placeholder(tf.float32, [self.batch_size]+self.img_shape)\n is_train = tf.placeholder(tf.bool)\n\n conv1_feats = convolution(imgs, 7, 7, 64, 2, 2, 'conv1')\n conv1_feats = batch_norm(conv1_feats, 'bn_conv1', is_train, use_batch_norm)\n conv1_feats = nonlinear(conv1_feats, 'relu')\n pool1_feats = max_pool(conv1_feats, 3, 3, 2, 2, 'pool1')\n\n res2a_feats = self.basic_block(pool1_feats, 'res2a', 'bn2a', is_train, use_batch_norm, 64, 1)\n res2b_feats = self.basic_block2(res2a_feats, 'res2b', 'bn2b', is_train, use_batch_norm, 64)\n res2c_feats = self.basic_block2(res2b_feats, 'res2c', 'bn2c', is_train, use_batch_norm, 64)\n \n res3a_feats = self.basic_block(res2c_feats, 'res3a', 'bn3a', is_train, use_batch_norm, 128)\n res3b_feats = self.basic_block2(res3a_feats, 'res3b', 'bn3b', is_train, use_batch_norm, 128)\n res3c_feats = self.basic_block2(res3b_feats, 'res3c', 'bn3c', is_train, use_batch_norm, 128)\n res3d_feats = self.basic_block2(res3c_feats, 'res3d', 'bn3d', is_train, use_batch_norm, 128)\n\n res4a_feats = self.basic_block(res3d_feats, 'res4a', 'bn4a', is_train, use_batch_norm, 256)\n res4b_feats = self.basic_block2(res4a_feats, 'res4b', 'bn4b', is_train, use_batch_norm, 256)\n res4c_feats = self.basic_block2(res4b_feats, 'res4c', 'bn4c', is_train, use_batch_norm, 256)\n res4d_feats = self.basic_block2(res4c_feats, 'res4d', 'bn4d', is_train, use_batch_norm, 256)\n res4e_feats = self.basic_block2(res4d_feats, 'res4e', 'bn4e', is_train, use_batch_norm, 256)\n res4f_feats = self.basic_block2(res4e_feats, 'res4f', 'bn4f', is_train, use_batch_norm, 256)\n\n res5a_feats = self.basic_block(res4f_feats, 'res5a', 'bn5a', is_train, use_batch_norm, 512)\n res5b_feats = self.basic_block2(res5a_feats, 'res5b', 'bn5b', is_train, use_batch_norm, 512)\n res5c_feats = self.basic_block2(res5b_feats, 'res5c', 'bn5c', is_train, use_batch_norm, 512)\n\n res5c_feats_flat = tf.reshape(res5c_feats, [self.batch_size, 49, 2048])\n self.conv_feats = res5c_feats_flat\n self.conv_feat_shape = [49, 2048]\n self.num_ctx = 49 \n self.dim_ctx = 2048\n\n self.imgs = imgs\n self.is_train = is_train", "def modified_resnet10(self) -> torch.nn.Module:\n # initialize a Resnet-10 instance\n net = torchvision.models.resnet._resnet(arch=\"resnet10\", block=torchvision.models.resnet.BasicBlock, layers=[1, 1, 1, 1], pretrained=False, progress=False)\n\n # the first layer will be a lazy convolutional layer with any input channels\n net.conv1 = torch.nn.LazyConv2d(\n out_channels=64,\n kernel_size=(7, 7),\n stride=(2, 2),\n padding=(3, 3),\n bias=not self.bn_affine\n )\n\n # modify batch-norm layer to have momentum 1 and no tracking statistics\n net.bn1 = torch.nn.BatchNorm2d(64, momentum=1, track_running_stats=False, affine=self.bn_affine)\n\n net.layer1[0].bn1 = torch.nn.BatchNorm2d(64, momentum=1, track_running_stats=False, affine=self.bn_affine)\n net.layer1[0].bn2 = torch.nn.BatchNorm2d(64, momentum=1, track_running_stats=False, affine=self.bn_affine)\n\n net.layer2[0].bn1 = torch.nn.BatchNorm2d(128, momentum=1, track_running_stats=False, affine=self.bn_affine)\n net.layer2[0].bn2 = torch.nn.BatchNorm2d(128, momentum=1, track_running_stats=False, affine=self.bn_affine)\n net.layer2[0].downsample[1] = torch.nn.BatchNorm2d(128, momentum=1, track_running_stats=False, affine=self.bn_affine)\n\n net.layer3[0].bn1 = torch.nn.BatchNorm2d(256, momentum=1, track_running_stats=False, affine=self.bn_affine)\n net.layer3[0].bn2 = torch.nn.BatchNorm2d(256, momentum=1, track_running_stats=False, affine=self.bn_affine)\n net.layer3[0].downsample[1] = torch.nn.BatchNorm2d(256, momentum=1, track_running_stats=False, affine=self.bn_affine)\n\n net.layer4[0].bn1 = torch.nn.BatchNorm2d(512, momentum=1, track_running_stats=False, affine=self.bn_affine)\n net.layer4[0].bn2 = torch.nn.BatchNorm2d(512, momentum=1, track_running_stats=False, affine=self.bn_affine)\n net.layer4[0].downsample[1] = torch.nn.BatchNorm2d(512, momentum=1, track_running_stats=False, affine=self.bn_affine)\n\n # last layer\n if self.dim_output is not None:\n net.fc = torch.nn.LazyLinear(out_features=self.dim_output)\n else:\n net.fc = torch.nn.Identity()\n\n # add dropout-2d after layers 1, 2, and 3\n net.maxpool.add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n\n net.layer1[0].add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n # net.layer1[1].add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n net.layer1.add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n\n net.layer2[0].add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n # net.layer2[1].add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n net.layer2.add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n\n net.layer3[0].add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n # net.layer3[1].add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n net.layer3.add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n\n net.layer4[0].add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n # net.layer4[1].add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n net.layer4.add_module(name='dropout2d', module=torch.nn.Dropout2d(p=self.dropout_prob))\n\n return net", "def classifier_layer(input_layer, input_rois, n_rois, nb_classes=4):\n\n\tpooling_regions = 7\n\t#input_shape = (n_rois, 7, 7, 512)\n\n\t# out_roi_pool.shape = (1, n_rois, channels, pool_size, pool_size)\n\t# n_rois (4) 7x7 roi pooling\n\tout_roi_pool = RoiPoolingConv(\n\t\tpooling_regions,\n\t\tn_rois\n\t)([input_layer, input_rois])\n\n\t# Flatten the convolutional layer and connected to 2 FC and 2 dropout\n\tout = TimeDistributed(Flatten(name='flatten'))(out_roi_pool)\n\tout = TimeDistributed(Dense(4096, activation='relu', name='fc1'))(out)\n\tout = TimeDistributed(Dropout(0.5))(out)\n\tout = TimeDistributed(Dense(4096, activation='relu', name='fc2'))(out)\n\tout = TimeDistributed(Dropout(0.5))(out)\n\n\t# There are two output layer\n\t# out_class: softmax acivation function for classify the class name of the object\n\t# out_regr: linear activation function for bboxes coordinates regression\n\tout_class = TimeDistributed(\n\t\tDense(\n\t\t\tnb_classes,\n\t\t\tactivation='softmax',\n\t\t\tkernel_initializer='zero'\n\t\t),\n\t\tname='dense_class_{}'.format(nb_classes)\n\t)(out)\n\t\n\t# note: no regression target for bg class\n\tout_regr = TimeDistributed(\n\t\tDense(\n\t\t\t4 * (nb_classes-1),\n\t\t\tactivation='linear',\n\t\t\tkernel_initializer='zero'\n\t\t),\n\t\tname='dense_regress_{}'.format(nb_classes)\n\t)(out)\n\n\treturn [out_class, out_regr]", "def __call__(self, inputs, training):\n\n self.training = training\n input_shape = inputs.shape\n if self.data_format == 'channels_first':\n img_size = (input_shape[2], input_shape[3])\n else:\n img_size = (input_shape[1], input_shape[2])\n\n with self._model_variable_scope('ssd300_model'):\n if self.data_format == 'channels_first':\n # Convert the inputs from channels_last (NHWC) to channels_first (NCHW).\n # This provides a large performance boost on GPU. See\n # https://www.tensorflow.org/performance/performance_guide#data_formats\n inputs = tf.transpose(inputs, [0, 3, 1, 2])\n\n net = super(Model, self).__call__(inputs, training)\n\n with self._model_variable_scope('ssd300_model'):\n\n net = self._atrous_convolution_2d(net, filters=1024,\n kernel_size=3,\n atrous_rate=6, name='fc6')\n\n net = self._conv2d(net, filters=1024, kernel_size=1,\n padding='same', name='fc7')\n\n net = self._conv2d(net, filters=256, kernel_size=1,\n padding='same', name='conv6_1')\n\n net = self._conv2d(net, filters=512, kernel_size=3,\n strides=2,\n padding='same', name='conv6_2')\n\n net = self._conv2d(net, filters=128, kernel_size=1,\n padding='same', name='conv7_1')\n\n net = self._conv2d(fixed_padding(net, 3, self.data_format),\n filters=256, kernel_size=3,\n strides=2,\n padding='valid', name='conv7_2')\n\n net = self._conv2d(net, filters=128, kernel_size=1,\n padding='same', name='conv8_1')\n\n net = self._conv2d(net, filters=256, kernel_size=3,\n strides=2,\n padding='same', name='conv8_2')\n\n if self.data_format == 'channels_first':\n net = tf.reduce_mean(net, [2, 3])\n else:\n net = tf.reduce_mean(net, [1, 2])\n self.layers['pool6'] = net\n\n # Prediction from conv4_3\n conv4_3_norm = self._normalize(net, 20, name='conv4_3_norm')\n num_priors = 3\n x = self._conv2d(conv4_3_norm, filters=num_priors * 4, kernel_size=3,\n padding='same', name='conv4_3_norm_mbox_loc')\n self.layers['conv4_3_norm_mbox_loc_flat'] = tf.layers.flatten(x, name='conv4_3_norm_mbox_loc_flat')\n\n x = self._conv2d(conv4_3_norm, filters=num_priors * self.num_classes,\n kernel_size=3, padding='same',\n name='conv4_3_norm_mbox_conf')\n self.layers['conv4_3_norm_mbox_conf_flat'] = tf.layers.flatten(x, name='conv4_3_norm_mbox_conf_flat')\n\n prior_box = PriorBox(img_size, min_size=30.0, aspect_ratios=[2],\n variances=[0.1, 0.1, 0.2, 0.2],\n name='conv4_3_norm_mbox_priorbox')\n net['conv4_3_norm_mbox_priorbox'] = prior_box(conv4_3_norm)\n\n return net", "def basic_block(x, num_features, cfg, name):\n x = Conv1D(num_features, kernel_size=3, padding='same', use_bias=True,\n kernel_regularizer=l2(cfg.weight_decay), kernel_initializer=taejun_uniform(), name=f'{name}_conv')(x)\n x = BatchNormalization(name=f'{name}_norm')(x)\n x = Activation('relu', name=f'{name}_relu')(x)\n x = MaxPool1D(pool_size=3, name=f'{name}_pool')(x)\n return x", "def define(self, optimizer = Adam(lr=1e-5)): \n \n self.optimizer = optimizer\n\n model = Sequential()\n\n #Layer 1\n model.add(Conv2D( filters = 96, \n kernel_size = (11,11), \n strides = 4, \n padding = 'same', \n activation = 'relu', \n input_shape = (224, 224, 3), \n kernel_initializer = 'he_normal'))\n model.add(MaxPooling2D( pool_size = (3,3), \n strides = (2,2), \n padding= 'same', \n data_format = None)) # overlapping pooling\n #Layer 2\n model.add(Conv2D( filters = 256, \n kernel_size = (5,5), \n strides = 1, \n padding = 'same', \n activation = 'relu', \n kernel_initializer = 'he_normal'))\n model.add(MaxPooling2D( pool_size = (3,3), \n strides = (2,2), \n padding= 'same', \n data_format = None)) \n #Layer 3\n model.add(Conv2D( filters = 384, \n kernel_size = (3,3), \n strides = 1, padding = 'same', \n activation = 'relu', kernel_initializer = 'he_normal'))\n #Layer 4\n model.add(Conv2D( filters = 384, \n kernel_size = (3,3), \n strides = 1, padding = 'same', \n activation = 'relu', \n kernel_initializer = 'he_normal'))\n #Layer 5\n model.add(Conv2D( filters = 256, \n kernel_size = (3,3), \n strides = 1, padding = 'same', \n activation = 'relu', \n kernel_initializer = 'he_normal'))\n #Layer 6\n model.add(MaxPooling2D( pool_size = (3,3), \n strides = (2,2), \n padding= 'same', \n data_format = None))\n \n #Layer 7\n model.add(Flatten())\n \n #Layer 8\n model.add(Dense( units = 4096, activation = 'relu'))\n model.add(Dense( units = 1024, activation = 'relu'))\n model.add(Dense( units = 512, activation = 'relu'))\n model.add(Dense( units = 256, activation = 'relu'))\n model.add(Dense( units = 128, activation = 'relu'))\n \n #Layer end\n model.add(Dense( units = 3, activation = 'softmax'))\n model.summary()\n \n self.model = model", "def run_net(self,\n pre_trained_chckpnt_dir ='' #for resuming training, load the model from this directory\n ):\n\n _rd = _read_data(data=self.data)\n\n self.alpha_coeff=1\n\n #read path of the images for train, test, and validation\n train_CTs, train_GTVs, train_Torso, train_penalize, train_surface,\\\n validation_CTs, validation_GTVs, validation_Torso, validation_penalize, validation_surface,\\\n test_CTs, test_GTVs, test_Torso, test_penalize,test_surface=_rd.read_data_path(fold=self.fold)\n self.img_width = self.img_width\n self.img_height = self.img_height\n # ======================================\n #validation instances\n bunch_of_images_no=20\n _image_class_vl = image_class(validation_CTs, validation_GTVs, validation_Torso,validation_penalize,validation_surface\n , bunch_of_images_no=bunch_of_images_no, is_training=0,\n patch_window=self.patch_window)\n _patch_extractor_thread_vl = _patch_extractor_thread(_image_class=_image_class_vl,\n sample_no=self.sample_no, patch_window=self.patch_window,\n GTV_patchs_size=self.GTV_patchs_size,\n tumor_percent=self.tumor_percent,\n img_no=bunch_of_images_no,\n mutex=settings.mutex,is_training=0,vl_sample_no=self.validation_samples\n )\n _fill_thread_vl = fill_thread(validation_CTs,\n validation_GTVs,\n validation_Torso,\n validation_penalize,\n validation_surface,\n _image_class_vl,\n sample_no=self.sample_no,\n total_sample_no=self.validation_samples,\n patch_window=self.patch_window,\n GTV_patchs_size=self.GTV_patchs_size,\n img_width=self.img_width, img_height=self.img_height,\n mutex=settings.mutex,\n tumor_percent=self.tumor_percent,\n is_training=0,\n patch_extractor=_patch_extractor_thread_vl,\n fold=self.fold)\n\n\n _fill_thread_vl.start()\n _patch_extractor_thread_vl.start()\n _read_thread_vl = read_thread(_fill_thread_vl, mutex=settings.mutex,\n validation_sample_no=self.validation_samples, is_training=0)\n _read_thread_vl.start()\n # ======================================\n #training instances\n bunch_of_images_no = 24\n _image_class = image_class(train_CTs, train_GTVs, train_Torso,train_penalize,train_surface\n , bunch_of_images_no=bunch_of_images_no,is_training=1,patch_window=self.patch_window\n )\n patch_extractor_thread = _patch_extractor_thread(_image_class=_image_class,\n sample_no=240, patch_window=self.patch_window,\n GTV_patchs_size=self.GTV_patchs_size,\n tumor_percent=self.tumor_percent,\n img_no=bunch_of_images_no,\n mutex=settings.mutex,is_training=1)\n _fill_thread = fill_thread(train_CTs, train_GTVs, train_Torso,train_penalize,train_surface,\n _image_class,\n sample_no=self.sample_no,total_sample_no=self.sample_no,\n patch_window=self.patch_window,\n GTV_patchs_size=self.GTV_patchs_size,\n img_width=self.img_width,\n img_height=self.img_height,mutex=settings.mutex,\n tumor_percent=self.tumor_percent,\n is_training=1,\n patch_extractor=patch_extractor_thread,\n fold=self.fold)\n\n _fill_thread.start()\n patch_extractor_thread.start()\n\n _read_thread = read_thread(_fill_thread,mutex=settings.mutex,is_training=1)\n _read_thread.start()\n # ======================================\n\n image = tf.placeholder(tf.float32, shape=[None, None, None, None, 1])\n label = tf.placeholder(tf.float32, shape=[None, None, None, None, 2])\n penalize = tf.placeholder(tf.float32, shape=[None, None, None, None,1])\n surf_map = tf.placeholder(tf.float32, shape=[None, None, None, None,1])\n loss_coef = tf.placeholder(tf.float32, shape=[None, 2]) # shape: batchno * 2 values for each class\n alpha = tf.placeholder(tf.float32, name='alpha') # background coeff\n beta = tf.placeholder(tf.float32, name='beta') # tumor coeff\n\n ave_vali_acc=tf.placeholder(tf.float32)\n ave_loss_vali=tf.placeholder(tf.float32)\n ave_dsc_vali=tf.placeholder(tf.float32)\n\n dropout=tf.placeholder(tf.float32,name='dropout')\n is_training = tf.placeholder(tf.bool, name='is_training')\n is_training_bn = tf.placeholder(tf.bool, name='is_training_bn')\n dense_net_dim = tf.placeholder(tf.int32, name='dense_net_dim')\n\n _dn = _densenet_unet(self.densnet_unet_config,self.compression_coefficient,self.growth_rate) #create object\n y=_dn.dens_net(image=image,is_training=is_training,dropout_rate1=0,dropout_rate2=0,dim=dense_net_dim,is_training_bn=is_training_bn)\n # y = _dn.vgg(image)\n\n y_dirX = ((y[:, int(self.GTV_patchs_size / 2), :, :, 0, np.newaxis]))\n label_dirX = (label[:, int(self.GTV_patchs_size / 2), :, :, 0, np.newaxis])\n penalize_dirX = (penalize[:,16,:,:,0,np.newaxis])\n surf_map_dirX = (surf_map[:,16,:,:,0,np.newaxis])\n image_dirX = ((image[:, int(self.patch_window / 2), :, :, 0, np.newaxis]))\n\n show_img=tf.nn.softmax(y)[:, int(self.GTV_patchs_size / 2) , :, :, 0, np.newaxis]\n tf.summary.image('outprunut',show_img , 3)\n tf.summary.image('output without softmax',y_dirX ,3)\n tf.summary.image('groundtruth', label_dirX,3)\n tf.summary.image('penalize', penalize_dirX,3)\n tf.summary.image('surf_map', surf_map_dirX,3)\n tf.summary.image('image',image_dirX ,3)\n\n print('*****************************************')\n print('*****************************************')\n print('*****************************************')\n sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))\n devices = sess.list_devices()\n print(devices)\n\n print(device_lib.list_local_devices())\n print('*****************************************')\n print('*****************************************')\n print('*****************************************')\n\n train_writer = tf.summary.FileWriter(self.LOGDIR + '/train' ,graph=tf.get_default_graph())\n validation_writer = tf.summary.FileWriter(self.LOGDIR + '/validation' , graph=sess.graph)\n\n extra_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n saver=tf.train.Saver(tf.global_variables(), max_to_keep=1000)\n\n\n\n #define the loss function\n with tf.name_scope('cost'):\n penalize_weight=0\n [ penalized_loss,\n soft_dice_coef,logt,lbl]=self.loss_instance.dice_plus_distance_penalize(logits=y, labels=label,penalize=penalize)\n surface_loss= self.loss_instance.surface_loss(logits=y, labels=label, surf_map=surf_map)\n cost = tf.reduce_mean((1.0 - soft_dice_coef[1])+penalize_weight*penalized_loss+surface_loss, name=\"cost\")\n\n #Setup the Tensorboard plots\n tf.summary.scalar(\"cost\", cost)\n f1_measure = self.loss_instance.f1_measure(logits=y, labels=label)\n tf.summary.scalar(\"dice_bakground\", f1_measure[0])\n tf.summary.scalar(\"dice_tumor\", f1_measure[1])\n\n pwc = self.loss_instance.PWC(y, label)\n tf.summary.scalar(\"pwc_bakground\", pwc[0])\n tf.summary.scalar(\"pwc_tumor\", pwc[1])\n\n recall = self.loss_instance.Recall(y, label)\n tf.summary.scalar(\"recall_bakground\", recall[0])\n tf.summary.scalar(\"recall_tumor\", recall[1])\n\n precision = self.loss_instance.Precision(y, label)\n tf.summary.scalar(\"precision_bakground\", precision[0])\n tf.summary.scalar(\"precision_tumor\", precision[1])\n\n fpr = self.loss_instance.FPR(y, label)\n tf.summary.scalar(\"FPR_bakground\", fpr[0])\n tf.summary.scalar(\"FPR_tumor\", fpr[1])\n\n fnr = self.loss_instance.FNR(y, label)\n tf.summary.scalar(\"FNR_bakground\", fnr[0])\n tf.summary.scalar(\"FNR_tumor\", fnr[1])\n\n extra_update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)\n with tf.control_dependencies(extra_update_ops):\n optimizer_tmp = tf.train.AdamOptimizer(self.learning_rate,epsilon=0.001)\n optimizer = optimizer_tmp.minimize(cost)\n\n with tf.name_scope('validation'):\n average_validation_accuracy=ave_vali_acc\n average_validation_loss=ave_loss_vali\n average_dsc_loss=ave_dsc_vali\n tf.summary.scalar(\"average_validation_accuracy\",average_validation_accuracy)\n tf.summary.scalar(\"average_validation_loss\",average_validation_loss)\n tf.summary.scalar(\"average_dsc_loss\",average_dsc_loss)\n\n with tf.name_scope('accuracy'):\n accuracy=self.loss_instance.accuracy_fn(y, label)\n\n tf.summary.scalar(\"accuracy\", accuracy)\n\n sess.run(tf.global_variables_initializer())\n logging.debug('total number of variables %s' % (\n np.sum([np.prod(v.get_shape().as_list()) for v in tf.trainable_variables()])))\n summ=tf.summary.merge_all()\n\n point = 0 # starting point, starts from a value > 0 if training is resumed\n itr1 = 0 # number of iterations\n if len(pre_trained_chckpnt_dir):\n ckpt = tf.train.get_checkpoint_state(pre_trained_chckpnt_dir)\n saver.restore(sess, ckpt.model_checkpoint_path)\n point=int(ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1])\n itr1=point\n\n\n # patch_radius = 49\n '''loop for epochs'''\n\n for epoch in range(self.total_epochs):\n while self.no_sample_per_each_itr*int(point/self.no_sample_per_each_itr)<self.sample_no:\n print('0')\n print(\"epoch #: %d\" %(epoch))\n startTime = time.time()\n step = 0\n self.beta_coeff=1+1 * np.exp(-point/2000)\n # =============start validation================\n if itr1 % self.display_validation_step ==0:\n '''Validation: '''\n loss_validation = 0\n acc_validation = 0\n validation_step = 0\n dsc_validation=0\n while (validation_step * self.batch_no_validation <settings.validation_totalimg_patch):\n [validation_CT_image, validation_GTV_image,validation_Penalize_patch,validation_Surface_patch] = _image_class_vl.return_patches_validation( validation_step * self.batch_no_validation, (validation_step + 1) *self.batch_no_validation)\n if (len(validation_CT_image)<self.batch_no_validation) | (len(validation_GTV_image)<self.batch_no_validation) | (len(validation_Penalize_patch)<self.batch_no_validation) | (len(validation_Surface_patch)<self.batch_no_validation) :\n _read_thread_vl.resume()\n time.sleep(0.5)\n continue\n\n validation_CT_image_patchs = validation_CT_image\n validation_GTV_label = validation_GTV_image\n tic=time.time()\n\n [acc_vali, loss_vali,dsc_vali,surface_loss1] = sess.run([accuracy, cost,f1_measure,surface_loss],\n feed_dict={image: validation_CT_image_patchs,\n label: validation_GTV_label,\n penalize: validation_Penalize_patch,\n dropout: 1,\n is_training: False,\n ave_vali_acc: -1,\n ave_loss_vali: -1,\n ave_dsc_vali:-1,\n dense_net_dim: self.patch_window,\n is_training_bn:False,\n alpha:1,\n beta:1,\n surf_map:validation_Surface_patch,\n })\n elapsed=time.time()-tic\n\n acc_validation += acc_vali\n loss_validation += loss_vali\n dsc_validation+=dsc_vali[1]\n validation_step += 1\n if np.isnan(dsc_validation) or np.isnan(loss_validation) or np.isnan(acc_validation):\n print('nan problem')\n process = psutil.Process(os.getpid())\n\n print(\n '%d - > %d: elapsed_time:%d acc_validation: %f, loss_validation: %f, memory_percent: %4s' % (\n validation_step,validation_step * self.batch_no_validation\n , elapsed, acc_vali, loss_vali, str(process.memory_percent()),\n ))\n\n settings.queue_isready_vl = False\n acc_validation = acc_validation / (validation_step)\n loss_validation = loss_validation / (validation_step)\n dsc_validation = dsc_validation / (validation_step)\n if np.isnan(dsc_validation) or np.isnan(loss_validation) or np.isnan(acc_validation):\n print('nan problem')\n _fill_thread_vl.kill_thread()\n print('******Validation, step: %d , accuracy: %.4f, loss: %f*******' % (\n itr1, acc_validation, loss_validation))\n\n [sum_validation] = sess.run([summ],\n feed_dict={image: validation_CT_image_patchs,\n label: validation_GTV_label,\n penalize: validation_Penalize_patch,\n dropout: 1,\n is_training: False,\n ave_vali_acc: acc_validation,\n ave_loss_vali: loss_validation,\n ave_dsc_vali:dsc_validation,\n dense_net_dim: self.patch_window,\n is_training_bn: False,\n alpha: 1,\n beta: 1,\n surf_map: validation_Surface_patch,\n\n })\n validation_writer.add_summary(sum_validation, point)\n print('end of validation---------%d' % (point))\n\n #loop for training batches\n while(step*self.batch_no<self.no_sample_per_each_itr):\n [train_CT_image_patchs, train_GTV_label, train_Penalize_patch,loss_coef_weights,train_Surface_patch] = _image_class.return_patches( self.batch_no)\n\n if (len(train_CT_image_patchs)<self.batch_no)|(len(train_GTV_label)<self.batch_no)\\\n |(len(train_Penalize_patch)<self.batch_no)|(len(train_Surface_patch)<self.batch_no):\n time.sleep(0.5)\n _read_thread.resume()\n continue\n\n tic=time.time()\n [acc_train1, loss_train1, optimizing,out,dsc_train11] = sess.run([accuracy, cost, optimizer,y,f1_measure],\n feed_dict={image: train_CT_image_patchs,\n label: train_GTV_label,\n penalize: train_Penalize_patch,\n # loss_coef: loss_coef_weights,\n dropout: self.dropout_keep,\n is_training: True,\n ave_vali_acc: -1,\n ave_loss_vali: -1,\n ave_dsc_vali: -1,\n dense_net_dim: self.patch_window,\n is_training_bn: True,\n alpha: self.alpha_coeff,\n beta: self.beta_coeff,\n surf_map: train_Surface_patch,\n\n })\n elapsed=time.time()-tic\n dsc_train1=dsc_train11[1]\n\n self.x_hist=self.x_hist+1\n # np.hstack((self.x_hist, [np.ceil(\n\n [sum_train] = sess.run([summ],\n feed_dict={image: train_CT_image_patchs,\n label: train_GTV_label,\n penalize: train_Penalize_patch,\n dropout: self.dropout_keep, is_training: True,\n ave_vali_acc: acc_train1,\n ave_loss_vali: loss_train1,\n ave_dsc_vali: dsc_train1,\n dense_net_dim: self.patch_window,\n is_training_bn: True,\n alpha: self.alpha_coeff,\n beta: self.beta_coeff,\n surf_map: train_Surface_patch,\n\n })\n train_writer.add_summary(sum_train,point)\n step = step + 1\n\n process = psutil.Process(os.getpid())\n\n print(\n 'point: %d, elapsed_time:%d step*self.batch_no:%f , LR: %.15f, acc_train1:%f, loss_train1:%f,memory_percent: %4s' % (\n int((point)),elapsed,\n step * self.batch_no, self.learning_rate, acc_train1, loss_train1,\n str(process.memory_percent())))\n\n\n point=int((point))\n if point%100==0:\n '''saveing model inter epoch'''\n chckpnt_path = os.path.join(self.chckpnt_dir,\n ('densenet_unet_inter_epoch%d_point%d.ckpt' % (epoch, point)))\n saver.save(sess, chckpnt_path, global_step=point)\n itr1 = itr1 + 1\n point=point+1\n endTime = time.time()\n\n #==============\n '''saveing model after each epoch'''\n chckpnt_path = os.path.join(self.chckpnt_dir, 'densenet_unet.ckpt')\n saver.save(sess, chckpnt_path, global_step=epoch)\n print(\"End of epoch----> %d, elapsed time: %d\" % (epoch, endTime - startTime))", "def add_conv_layer(self, input_layer, hyperparams, func='relu', bn=True):\n W = self._weight_variable(shape=hyperparams[0])\n b = self._bias_variable(shape=hyperparams[1])\n if bn:\n return self._batch_normalize(\n self._nonlinearity(func)(self._conv2d(input_layer, W) + b))\n elif not bn:\n return self._nonlinearity(func)(self._conv2d(input_layer, W) + b)", "def Classify(x, reuse=False, keepProb=1.0,isNankai=False):\n with tf.variable_scope('Classify') as scope: \n if reuse:\n scope.reuse_variables()\n \n # 1st layer\n w1 = weight_variable('w1',[dInput,nHidden])\n bias1 = bias_variable('bias1',[nHidden])\n h1 = fc_relu(x,w1,bias1,keepProb)\n \n # 2nd layer\n w2 = weight_variable('w2',[nHidden,nHidden2])\n bias2 = bias_variable('bias2',[nHidden2])\n h2 = fc_relu(h1,w2,bias2,keepProb) \n \n # 3nd layer\n w3 = weight_variable('w3',[nHidden2,nHidden3])\n bias3 = bias_variable('bias3',[nHidden3])\n h3 = fc_relu(h2,w3,bias3,keepProb) \n\n \n # Toy\n if dataMode == 0:\n # 3rd layar\n w3 = weight_variable('w3',[nHidden2,nClass])\n bias3 = bias_variable('bias3',[nClass])\n y = fc(h2,w3,bias3,keepProb)\n # Nankai\n else:\n # 4th layer\n w4_1 = weight_variable('w4_1',[nHidden3,nClass])\n bias4_1 = bias_variable('bias4_1',[nClass])\n \n w4_2 = weight_variable('w4_2',[nHidden3,nClass])\n bias4_2 = bias_variable('bias4_2',[nClass])\n \n w4_3 = weight_variable('w4_3',[nHidden3,nClass])\n bias4_3 = bias_variable('bias4_3',[nClass])\n \n y1 = fc(h3,w4_1,bias4_1,keepProb)\n y2 = fc(h3,w4_2,bias4_2,keepProb)\n y3 = fc(h3,w4_3,bias4_3,keepProb)\n # [number of data, number of class, cell(=3)]\n y = tf.concat((tf.expand_dims(y1,2),tf.expand_dims(y2,2),tf.expand_dims(y3,2)),2)\n \n # shape=[None,number of class]\n return y", "def entangling_layer(nqubits):\n # In other words it should apply something like :\n # CNOT CNOT CNOT CNOT... CNOT\n # CNOT CNOT CNOT... CNOT\n for i in range(0, nqubits - 1, 2): # Loop over even indices: i=0,2,...N-2\n qml.CNOT(wires=[i, i + 1])\n for i in range(1, nqubits - 1, 2): # Loop over odd indices: i=1,3,...N-3\n qml.CNOT(wires=[i, i + 1])", "def modelbuilder():\n model = Sequential()\n # Add a convolution layer with with a sigmoid activation function\n model.add(layers.Conv2D(1, (2, 2), strides=(1, 1), activation='sigmoid', padding='same', input_shape=(256, 256, 3)))\n model.compile(optimizer='adam', loss='mse', metrics=['accuracy'])\n model.summary()\n return model", "def build_layers(self):\n raise NotImplementedError", "def build(self, hp):\n\n model = Sequential()\n model.add(Conv2D(filters=hp.Choice('num_filters_0', values=[8, 16, 32, 64]),\n kernel_size=hp.Choice('kernel_size_0', values=[3, 4, 5]),\n activation=hp.Choice('activation_0', values=['relu', 'tanh']),\n input_shape=self.input_shape))\n\n for i in range(hp.Int('num_layers', 1, 3)):\n model.add(Conv2D(filters=hp.Choice('num_filters_%d' % (i + 1), values=[8, 16, 32, 64]),\n kernel_size=hp.Choice('kernel_size_%d' % (i + 1), values=[3, 4, 5]),\n activation=hp.Choice('activation_%d' % (i + 1), values=['relu', 'tanh'])))\n model.add(Flatten())\n model.add(Dense(N_zern))\n model.summary()\n\n model.compile(optimizer=keras.optimizers.Adam(hp.Choice('learning_rate', values=[1e-3, 5e-4, 1e-4])),\n loss='mean_squared_error')\n return model", "def test_ctor(self):\r\n # the network model itself\r\n model = densenet.DenseNet(\r\n depth=40,\r\n Block=densenet.BasicBlock,\r\n growth_rate=12,\r\n compression_rate=1.0,\r\n mask=True,\r\n num_classes=100,\r\n )\r\n num_params = model_utils.get_model_num_params(model)\r\n\r\n self.assertAlmostEqual(num_params, 1.06, places=1) # around 1.7\r\n self.assertEqual(model_utils.get_num_conv2d_layers(model), 40)", "def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=7,\n hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0,\n dtype=np.float32):\n self.params = {}\n self.reg = reg\n self.dtype = dtype\n \n ############################################################################\n # TODO: Initialize weights and biases for the three-layer convolutional #\n # network. Weights should be initialized from a Gaussian with standard #\n # deviation equal to weight_scale; biases should be initialized to zero. #\n # All weights and biases should be stored in the dictionary self.params. #\n ############################################################################\n \n # Store weights and biases for the convolutional layer using the keys 'W1' and 'b1'; \n C, H, W = input_dim\n filter_sizes = (filter_size, filter_size)\n self.params['W1'] = np.random.normal(0, weight_scale, [num_filters, C, filter_sizes[0], filter_sizes[1]])\n self.params['b1'] = np.zeros((num_filters, ))\n\n # use keys 'W2' and 'b2' for the weights and biases of the hidden affine layer;\n # In this case, ConvLayer doesn't reduce the spatial size of the input, (N, C, H, W) -> Conv -> (N, F, H, W)\n # To satisfy this constraint, (W + 2 * pad - filter_size) / stride + 1 = W need to hold, which led to pad = (F - S) / 2 where S == 1\n # (N, C, H, W) -> Conv -> (N, F, H, W) -> Pooling -> (N, F, H/2, W/2)\n # In a FC_NN, FCL weights (input_dim, hidden_dim) where every img is flatten into a 1D array of length D = F * H/2 * W/2.\n self.params['W2'] = np.random.normal(0, weight_scale, [num_filters * (H / 2) * (W / 2), hidden_dim])\n self.params['b2'] = np.zeros((hidden_dim, ))\n\n # And the keys 'W3' and 'b3' for the weights and biases of the output affine layer. \n self.params['W3'] = np.random.normal(0, weight_scale, [hidden_dim, num_classes])\n self.params['b3'] = np.zeros((num_classes, ))\n\n \n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n\n for k, v in self.params.iteritems():\n self.params[k] = v.astype(dtype)", "def basic_block(self, input_feats, name1, name2, is_train, use_batch_norm, c, s=2):\n branch1_feats = convolution_no_bias(input_feats, 1, 1, 4*c, s, s, name1+'_branch1')\n branch1_feats = batch_norm(branch1_feats, name2+'_branch1', is_train, use_batch_norm)\n\n branch2a_feats = convolution_no_bias(input_feats, 1, 1, c, s, s, name1+'_branch2a')\n branch2a_feats = batch_norm(branch2a_feats, name2+'_branch2a', is_train, use_batch_norm)\n branch2a_feats = nonlinear(branch2a_feats, 'relu')\n\n branch2b_feats = convolution_no_bias(branch2a_feats, 3, 3, c, 1, 1, name1+'_branch2b')\n branch2b_feats = batch_norm(branch2b_feats, name2+'_branch2b', is_train, use_batch_norm)\n branch2b_feats = nonlinear(branch2b_feats, 'relu')\n\n branch2c_feats = convolution_no_bias(branch2b_feats, 1, 1, 4*c, 1, 1, name1+'_branch2c')\n branch2c_feats = batch_norm(branch2c_feats, name2+'_branch2c', is_train, use_batch_norm)\n\n output_feats = branch1_feats + branch2c_feats\n output_feats = nonlinear(output_feats, 'relu')\n return output_feats", "def __init__(self, mod_param, epochNb=3):\n tf.reset_default_graph()\n weight_path = '../caffe_layers_value.pickle'\n model_path = mod_param.paths[\"save_model\"]\n model_path += '-'+str(epochNb)\n self.mod_param = mod_param\n self.labels = mod_param.labels\n self.n_labels = mod_param.n_labels\n \n # Initialize some tensorflow variables\n self.images_tf = tf.placeholder( tf.float32, [None, None, None, 3], name=\"images\")\n self.labels_tf = tf.placeholder( tf.int64, [None], name='labels')\n \n detector = Detector(mod_param)\n c1,c2,c3,c4,conv5, self.conv6, self.gap, self.output = detector.inference( self.images_tf )\n self.detector = detector\n \n self.sess = tf.InteractiveSession()\n saver = tf.train.Saver()\n saver.restore( self.sess, model_path )\n\n if re.match(\".*CAM.*_W_S\", self.mod_param.mod_type):\n self.classmap = self.detector.get_classmap( self.labels_tf, self.conv6 )", "def construct_model():\n import lbann\n\n # Layer graph\n input = lbann.Input(target_mode='N/A', name='inp_data')\n # data is 64*64*4 images + 15 scalar + 5 param\n #inp_slice = lbann.Slice(input, axis=0, slice_points=\"0 16399 16404\",name='inp_slice')\n inp_slice = lbann.Slice(input, axis=0, slice_points=str_list([0,args.ydim,args.ydim+5]),name='inp_slice')\n gt_y = lbann.Identity(inp_slice,name='gt_y')\n gt_x = lbann.Identity(inp_slice, name='gt_x') #param not used\n\n zero = lbann.Constant(value=0.0,num_neurons='1',name='zero')\n one = lbann.Constant(value=1.0,num_neurons='1',name='one')\n\n z_dim = 20 #Latent space dim\n\n z = lbann.Gaussian(mean=0.0,stdev=1.0, neuron_dims=\"20\")\n model = macc_models.MACCWAE(args.zdim,args.ydim,cf=args.mcf,use_CNN=args.useCNN)\n d1_real, d1_fake, d_adv, pred_y = model(z,gt_y)\n\n d1_real_bce = lbann.SigmoidBinaryCrossEntropy([d1_real,one],name='d1_real_bce')\n d1_fake_bce = lbann.SigmoidBinaryCrossEntropy([d1_fake,zero],name='d1_fake_bce')\n d_adv_bce = lbann.SigmoidBinaryCrossEntropy([d_adv,one],name='d_adv_bce')\n img_loss = lbann.MeanSquaredError([pred_y,gt_y])\n rec_error = lbann.L2Norm2(lbann.WeightedSum([pred_y,gt_y], scaling_factors=\"1 -1\"))\n\n layers = list(lbann.traverse_layer_graph(input))\n # Setup objective function\n weights = set()\n src_layers = []\n dst_layers = []\n for l in layers:\n if(l.weights and \"disc0\" in l.name and \"instance1\" in l.name):\n src_layers.append(l.name)\n #freeze weights in disc2\n if(l.weights and \"disc1\" in l.name):\n dst_layers.append(l.name)\n for idx in range(len(l.weights)):\n l.weights[idx].optimizer = lbann.NoOptimizer()\n weights.update(l.weights)\n l2_reg = lbann.L2WeightRegularization(weights=weights, scale=1e-4)\n d_adv_bce = lbann.LayerTerm(d_adv_bce,scale=0.01)\n obj = lbann.ObjectiveFunction([d1_real_bce,d1_fake_bce,d_adv_bce,img_loss,rec_error,l2_reg])\n # Initialize check metric callback\n metrics = [lbann.Metric(img_loss, name='recon_error')]\n #pred_y = macc_models.MACCWAE.pred_y_name\n callbacks = [lbann.CallbackPrint(),\n lbann.CallbackTimer(),\n lbann.CallbackSaveModel(dir=args.dump_models),\n lbann.CallbackReplaceWeights(source_layers=list2str(src_layers),\n destination_layers=list2str(dst_layers),\n batch_interval=2)]\n\n if(args.ltfb_batch_interval > 0) :\n callbacks.append(lbann.CallbackLTFB(batch_interval=args.ltfb_batch_interval,metric='recon_error',\n low_score_wins=True,\n exchange_hyperparameters=True))\n\n # Construct model\n return lbann.Model(args.num_epochs,\n serialize_io=True,\n weights=weights,\n layers=layers,\n metrics=metrics,\n objective_function=obj,\n callbacks=callbacks)", "def __init__(self, depth=7, feature_size=512, use_eql=True, gpu_parallelize=False):\r\n from torch.nn import ModuleList\r\n from CustomLayers import DisGeneralConvBlock, DisFinalBlock, _equalized_conv2d\r\n from torch.nn import Conv2d\r\n\r\n super().__init__()\r\n\r\n assert feature_size != 0 and ((feature_size & (feature_size - 1)) == 0), \\\r\n \"latent size not a power of 2\"\r\n if depth >= 4:\r\n assert feature_size >= np.power(2, depth - 4), \\\r\n \"feature size cannot be produced\"\r\n\r\n # create state of the object\r\n self.gpu_parallelize = gpu_parallelize\r\n self.use_eql = use_eql\r\n self.depth = depth\r\n self.feature_size = feature_size\r\n\r\n # create the fromRGB layers for various inputs:\r\n if self.use_eql:\r\n def from_rgb(out_channels):\r\n return _equalized_conv2d(1, out_channels, (1, 1), bias=True)\r\n else:\r\n def from_rgb(out_channels):\r\n return Conv2d(1, out_channels, (1, 1), bias=True)\r\n\r\n self.rgb_to_features = ModuleList()\r\n self.final_converter = from_rgb(self.feature_size // 2)\r\n\r\n # create a module list of the other required general convolution blocks\r\n self.layers = ModuleList()\r\n self.final_block = DisFinalBlock(self.feature_size, use_eql=self.use_eql)\r\n\r\n # create the remaining layers\r\n for i in range(self.depth - 1):\r\n if i > 2:\r\n layer = DisGeneralConvBlock(\r\n int(self.feature_size // np.power(2, i - 2)),\r\n int(self.feature_size // np.power(2, i - 2)),\r\n use_eql=self.use_eql\r\n )\r\n rgb = from_rgb(int(self.feature_size // np.power(2, i - 1)))\r\n else:\r\n layer = DisGeneralConvBlock(self.feature_size, self.feature_size // 2,\r\n use_eql=self.use_eql)\r\n rgb = from_rgb(self.feature_size // 2)\r\n\r\n self.layers.append(layer)\r\n self.rgb_to_features.append(rgb)\r\n\r\n # just replace the last converter\r\n self.rgb_to_features[self.depth - 2] = \\\r\n from_rgb(self.feature_size // np.power(2, i - 2))\r\n\r\n # parallelize the modules from the module-lists if asked to:\r\n if self.gpu_parallelize:\r\n for i in range(len(self.layers)):\r\n self.layers[i] = torch.nn.DataParallel(self.layers[i])\r\n self.rgb_to_features[i] = torch.nn.DataParallel(\r\n self.rgb_to_features[i])\r\n\r\n # Note that since the FinalBlock contains the StdDev layer,\r\n # it cannot be parallelized so easily. It will have to be parallelized\r\n # from the Lower level (from CustomLayers). This much parallelism\r\n # seems enough for me.\r", "def baseUNet(input_shape,conv_depth,n_classes,init_w,dropout):\n inputs = Input(input_shape)\n\n c1=Conv2D(conv_depth,\n (3,3),\n activation='relu',\n padding='same',\n kernel_initializer=init_w)(inputs)\n\n c1=Conv2D(conv_depth,\n (3,3),\n activation='relu',\n padding=\"same\",\n kernel_initializer=init_w)(c1)\n\n # pool down to next layer\n pool1 = MaxPooling2D((2,2),strides = (2,2))(c1)\n\n conv_depth *= 2\n\n # convolute down again\n conv2 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(pool1)\n\n conv2 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv2)\n \n # pool down again\n pool2 = MaxPooling2D((2,2),strides = (2,2))(conv2)\n\n conv_depth *= 2 \n\n # Convolution\n conv3 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(pool2)\n\n conv3 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv3)\n \n # pool down\n pool3 = MaxPooling2D((2,2),strides = (2,2))(conv3)\n\n conv_depth *= 2 \n # Convolution\n conv4 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(pool3)\n\n conv4 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv4)\n \n # pool down \n pool4 = MaxPooling2D((2,2),strides = (2,2))(conv4)\n\n conv_depth *=2 \n\n # Convolution\n conv5 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(pool4)\n\n conv5 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv5)\n\n drop = Dropout(dropout)(conv5)\n\n conv_depth /= 2\n conv_depth = int(conv_depth) \n # do upsampling\n up1 = UpSampling2D(size = (2,2))(drop)\n conv6 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(up1)\n \n # add in skip info\n cat1 = concatenate([conv4,conv6],axis = 3)\n conv6 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(cat1)\n\n conv6 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv6)\n\n conv_depth /= 2\n conv_depth = int(conv_depth)\n # do upsampling\n up2 = UpSampling2D(size = (2,2))(conv6)\n conv7 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(up2)\n \n # add in skip info\n cat2 = concatenate([conv3,conv7],axis = 3)\n conv7 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(cat2)\n\n conv7 = Conv2D(conv_depth,\n activation = 'relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv7)\n \n conv_depth /= 2\n conv_depth = int(conv_depth)\n # do upsampling\n up3 = UpSampling2D(size = (2,2))(conv7)\n conv8 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size=(3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(up3)\n \n # add in skip info\n cat3 = concatenate([conv2,conv8],axis = 3)\n conv8 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(cat3)\n\n conv8 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv8)\n \n conv_depth /= 2\n conv_depth = int(conv_depth)\n # do upsampling\n up4 = UpSampling2D(size = (2,2))(conv8)\n conv9 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(up4)\n \n # add in skip info\n cat4 = concatenate([c1,conv9],axis = 3)\n conv9 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(cat4)\n\n conv9 = Conv2D(conv_depth,\n activation ='relu',\n kernel_size = (3,3),\n strides = (1,1),\n padding = \"same\",\n kernel_initializer=init_w)(conv9)\n\n outputs = Conv2D(n_classes, 1, activation = 'softmax')(conv9)\n\n return outputs,inputs", "def model_CNN(x_train, y_train, x_test=None, y_test=None, kwargs={}):\n \"\"\"\n Notes on Input shape\n 4D tensor with shape (batch_size, timesteps, features, `colors`).\n 4D tensor with shape: (samples, rows, cols, channels)\n `channels_last` (default)\n Output 4D tensor with shape: (samples, new_rows, new_cols, filters)\n \"\"\"\n ######## CNN for stocks\n # create and fit CNN\n # input_shape = StockDate x Lookback x Features\n from keras.layers import Conv2D, MaxPooling2D\n from keras.optimizers import SGD\n\n\n layers = kwargs.get('layers', 10 ) #TODO\n nodes = kwargs.get('nodes', None) #TODO\n\n if nodes is None or nodes==0 or nodes==[0]:\n nodes = [np.shape(x_train)[1]*3]\n elif isinstance(nodes, (int, np.integer)): # turn int to list\n nodes = [nodes]\n\n if layers > 1 and len(nodes) < layers:\n nodes = list(np.pad(nodes,[0,layers-len(nodes)], mode='constant',constant_values=nodes[-1]))\n\n ndim = np.max([2,len(np.shape(x_train))]) # Min 2D\n if ndim==2:\n input_shape=(x_train.shape[1],)\n elif ndim==3:\n input_shape=(x_train.shape[1],x_train.shape[2])\n elif ndim==4:\n input_shape=(x_train.shape[1],x_train.shape[2],x_train.shape[3])\n else:\n input_shape=x_train.shape[1:]\n if kwargs.get('learning_rate', False):\n lr = kwargs.get('learning_rate')\n else:\n lr = False\n\n if False:\n conv = (3, 3)\n else:\n conv = (2, 2)\n n_conv = 5\n\n if np.ndim(y_train)==1:\n n_out = 1 #e.g. forecast y as float, just 1 step ahead.\n else:\n n_out = np.shape(y_train)[1] #e.g. onehot encoded, or n-steps ahead.\n\n dropout = kwargs.get('dropout',0) # dropout rate between 0 and 1.\n #stateful = kwargs.get('stateful',True)\n actvn = 'relu' #kwargs.get('actvn','relu')\n actvl = kwargs.get('actvl','sigmoid')\n model=[]\n model = Sequential() # https://keras.io/models/sequential/\n model.reset_states()\n # input: 100x100 images with 3 channels -> (100, 100, 3) tensors.\n # this applies 32 convolution filters of size 3x3 each.\n model.add(Conv2D(n_conv, conv, activation=actvn, input_shape=input_shape))\n #model.add(Conv2D(n_conv, conv, activation=actvn))\n model.add(MaxPooling2D(pool_size=(2, 2)))\n model.add(Dropout(dropout ))\n\n model.add(Conv2D(n_conv*2, conv, activation=actvn))\n #model.add(Conv2D(n_conv*2, conv, activation=actvn))\n #model.add(MaxPooling2D(pool_size=(2, 2)))\n model.add(Dropout(dropout ))\n\n model.add(Flatten())\n model.add(Dense(np.min(input_shape), activation=actvn))\n model.add(Dropout(dropout*2))\n model.add(Dense(n_out, activation=actvl))\n\n if hasattr(kwargs,'optimizer'):\n optimizer = kwargs['optimizer']\n elif lr:\n optimizer = SGD(lr=lr, decay=1e-6, momentum=0.01, nesterov=True)\n else:\n optimizer = 'Nadam' #keras.optimizers.SGD(lr=0.01, momentum=0.0, decay=0.0, nesterov=False)\n\n if is_bool_dtype(y_train):\n model.compile(loss='binary_crossentropy', optimizer=optimizer)\n if is_categorical_dtype(y_train) or kwargs.get('onehot',False):\n #TODO Multiple Category\n model.compile(loss='categorical_crossentropy', optimizer=optimizer)\n else:\n #model.compile(loss='mean_squared_error', optimizer=optimizer)\n model.compile(loss='mean_squared_error', optimizer=optimizer, metrics=[r2_keras])\n\n\n if kwargs.get('verbose',False) > 1:\n model.summary()\n print(\"Inputs: {}\".format(model.input_shape))\n print(\"Outputs: {}\".format(model.output_shape))\n print(\"Actual input: {}\".format(x_train.shape))\n print(\"Actual output: {}\".format(y_train.shape))\n print('Model Loss: ' + model.loss)\n\n # For compatability with other models;\n model.score = model.evaluate\n\n return model #self.model=model", "def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias):\r\n super(ResnetBlock, self).__init__()\r\n self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, use_dropout, use_bias)", "def build(self, input_layer, trainable=True):\n\n with tf.variable_scope(self.name):\n # Get the number of input channels\n input_shape = input_layer.get_shape()\n num_input_channels = input_shape[-1].value\n\n # Create the weights and convolutional layer\n weight_shape = [self.kernel_shape[0], self.kernel_shape[1], num_input_channels, self.num_kernels]\n\n# if self.name:\n# self.weights = weight_variable(weight_shape, 'W_'+self.name)\n# else:\n# self.weights = weight_variable(weight_shape)\n\n self.weights = weight_variable(weight_shape, 'weights', trainable)\n self.bias = bias_variable([self.num_kernels], 'bias', trainable)\n\n self.layer = tf.nn.conv2d(input_layer, self.weights, strides=[1, self.stride, self.stride, 1], padding=self.padding) + self.bias\n\n if self.activation_function:\n self.layer = self.activation_function(self.layer)\n\n return self.layer, self.weights, self.bias", "def __init__(self, \n n_neurons = \"micro\", # else: \"brunel\" or arrays\n C_ab = \"micro\", # else: \"brunel\" or arrays\n area = net.area, # simulation size\n neuron_model = net.neuron_model, # \"iaf_psc_delta\" or \"iaf_psc_exp\"\n connection_rule = net.connection_rule, # \"fixed_total_number\" or \"fixed_indegree\"\n j02 = net.j02, \n weight_rel_sd = net.weight_rel_sd, \n delay_rel_sd = net.delay_rel_sd, \n g = net.g, \n rate_ext = net.rate_ext):\n ###################################################\n ### \tNetwork parameters\t\t### \n ###################################################\n\n # area of network in mm^2; scales numbers of neurons\n # use 1 for the full-size network (77,169 neurons)\n self.area = area\n \n self.layers = net.layers #np.array([\"L23\", \"L4\", \"L5\", \"L6\"])\n self.types = net.types #np.array([\"e\", \"i\"]) \n self.populations = np.array([layer + typus for layer in self.layers for typus in self.types])\n self.n_populations = len(self.populations)\n self.n_layers = len(self.layers)\n self.n_types = len(self.types)\n \n # Neuron numbers\n if n_neurons == \"micro\":\n self.n_neurons = np.int_(net.full_scale_n_neurons * self.area)\n elif n_neurons == \"brunel\":\n # Provide an array of equal number of neurons in each exc./inh. population\n gamma = 0.25\n inh_factor = 1. / (gamma + 1.)\n exc_factor = 1. - inh_factor \n n_total_micro = np.sum(net.full_scale_n_neurons * self.area)\n N_exc = n_total_micro/self.n_populations * exc_factor\n N_inh = n_total_micro/self.n_populations * inh_factor\n self.n_neurons = np.tile([N_exc, N_inh], self.n_layers).astype(int)\n else:\n if type(n_neurons) == np.ndarray:\n if n_neurons.shape == (self.n_populations, ):\n self.n_neurons = np.int_(n_neurons)\n else:\n raise Exception(\"'n_neurons' has wrong shape. \"+\n \"Expects (%i,)\"%self.n_populations)\n else: \n raise Exception(\"'n_neurons' expects either numpy.ndarray or string \"+\n \"in {'micro', 'brunel'}\")\n self.n_total = np.sum(self.n_neurons)\n\n \n # Synapse numbers\n # Connection probabilities: conn_probs[post, pre] = conn_probs[target, source]\n conn_probs = net.conn_probs\n # Scale synapse numbers of the C_ab\n if net.scale_C_linearly:\n n_outer_full = np.outer(net.full_scale_n_neurons, net.full_scale_n_neurons)\n C_full_scale = np.log(1. - conn_probs) / np.log(1. - 1. / n_outer_full)\n C_scaled = np.int_(C_full_scale * self.area)\n else:\n n_outer = np.outer(self.n_neurons, self.n_neurons)\n C_scaled = np.int_(np.log(1. - conn_probs) / np.log(1. - 1. / n_outer))\n\n self.connection_rule = connection_rule\n if self.connection_rule == \"fixed_total_number\":\n C_ab_micro = C_scaled # total number, do not divide! \n elif self.connection_rule == \"fixed_indegree\":\n C_ab_micro = (C_scaled.T / (net.full_scale_n_neurons * self.area)).T\n else:\n raise Exception(\"Unexpected connection type. Use 'fixed_total_number' for microcircuit \" + \n \"model or 'fixed_indegree' for Brunel's model!\")\n\n if C_ab == \"micro\":\n self.C_ab = C_ab_micro # shall not be integer at this point!\n elif C_ab == \"brunel\":\n C_e = np.mean(C_ab_micro) # mean for microcircuit (= 501 in full scale)\n C_i = gamma * C_e\n self.C_ab = np.tile([C_e, C_i], (self.n_populations, self.n_layers)).astype(int) \n else:\n if type(C_ab) == np.ndarray:\n if C_ab.shape == (self.n_populations, self.n_populations):\n self.C_ab = np.int_(C_ab)\n else:\n raise Exception(\"'C_ab' has wrong shape. \"+\n \"Expects (%i, %i)\"%(self.n_populations, self.n_populations))\n else: \n raise Exception(\"'C_ab' expects either numpy.ndarray or string \"+\n \"in {'micro', 'brunel'}\")\n\n\n ###################################################\n ### Single-neuron parameters\t\t### \n ###################################################\n self.neuron_model = neuron_model\n self.Vm0_mean = net.Vm0_mean # mean of initial membrane potential (mV)\n self.Vm0_std = net.Vm0_std # std of initial membrane potential (mV)\n self.model_params = net.model_params\n if not self.neuron_model==\"iaf_psc_delta\":\n self.model_params[\"tau_syn_ex\"] = net.tau_syn_ex # excitatory synaptic time constant (ms)\n self.model_params[\"tau_syn_in\"] = net.tau_syn_in # inhibitory synaptic time constant (ms)\n self.tau_syn_ex = net.tau_syn_ex # ms\n self.tau_syn_in = net.tau_syn_in # ms\n self.tau_syn = np.tile([self.tau_syn_ex, self.tau_syn_in], (self.n_populations, self.n_layers))\n # Rescaling for model calculations: these values are not used in the simulation!\n self.tau_m = self.model_params[\"tau_m\"] # ms\n self.t_ref = self.model_params[\"t_ref\"] # ms\n self.E_L = self.model_params[\"E_L\"] # mV\n self.V_r = self.model_params[\"V_reset\"] - self.E_L # mV\n self.theta = self.model_params[\"V_th\"] - self.E_L # mV\n self.C_m = self.model_params[\"C_m\"] # pF\n\n\n ######################################################\n # Synaptic weights. Depend on neuron_model! ##\n ######################################################\n self.g = g\n self.j02 = j02\n\n g_all = np.tile([1., -self.g], (self.n_populations, self.n_layers))\n L23e_index = np.where(self.populations == \"L23e\")[0][0]\n L4e_index = np.where(self.populations == \"L4e\")[0][0]\n g_all[L23e_index, L4e_index] *= self.j02\n \n self.J = net.PSP_e # mv; mean PSP, used as reference PSP\n self.J_ab = self.J * g_all\n self.weight_rel_sd = weight_rel_sd # Standard deviation of weight relative to mean weight\n # Transformation from peak PSP to PSC\n delta_tau = self.tau_syn - self.tau_m\n ratio_tau = self.tau_m / self.tau_syn\n PSC_over_PSP = self.C_m * delta_tau / (self.tau_m * self.tau_syn * \\\n (ratio_tau**(self.tau_m / delta_tau) - ratio_tau**(self.tau_syn / delta_tau))) \n # Actual weights have to be adapted: from peak PSP to PSC (and back...)\n if self.neuron_model==\"iaf_psc_exp\": # PSCs calculated from PSP amplitudes\n self.weights = self.J_ab * PSC_over_PSP # neuron populations\n elif self.neuron_model==\"iaf_psc_delta\":\n self.weights = self.J_ab * PSC_over_PSP * (self.tau_syn_ex) / self.C_m\n # This might be an overkill / doing things twice...\n elif self.neuron_model==\"iaf_psc_alpha\": # PSCs calculated from PSP amplitudes\n self.weights = self.J_ab * np.exp(1) / (self.tau_syn_ex) / self.C_m\n else:\n raise Exception(\"Neuron model should be iaf_psc_ - {delta, exp, alpha}!\")\n\n\n ###################################################\n ### Delays and dicts ### \n ###################################################\n # mean dendritic delays for excitatory and inhibitory transmission (ms)\n self.delay_e = net.delay_e # ms, excitatory synapses\n self.delay_i = net.delay_i # ms, inhibitory synapses\n\n self.delays = np.tile([self.delay_e, self.delay_i], (self.n_populations, self.n_layers)) # adapt...\n self.delay_rel_sd = delay_rel_sd \n \n # Synapse dictionaries\n # default connection dictionary\n self.conn_dict = {\"rule\": connection_rule}\n # weight distribution of connections between populations\n self.weight_dict_exc = net.weight_dict_exc\n self.weight_dict_inh = net.weight_dict_inh\n # delay distribution of connections between populations\n self.delay_dict = net.delay_dict\n # default synapse dictionary\n self.syn_dict = net.syn_dict\n \n \n ###################################################\n ### External stimuli ## \n ###################################################\n # rate of background Poisson input at each external input synapse (spikes/s) \n self.rate_ext = rate_ext # Hz \n self.J_ext = net.PSP_ext # external synaptic weight\n self.delay_ext = self.delay_e # ms; mean delay of external input\n self.dc_amplitude = net.dc_amplitude # constant bg amplitude\n self.C_aext = net.C_aext # in-degrees for background input\n # Adapt weights\n if self.neuron_model==\"iaf_psc_exp\": # PSCs calculated from PSP amplitudes\n self.weight_ext = self.J_ext * PSC_over_PSP[0, 0] \n elif self.neuron_model==\"iaf_psc_delta\":\n self.weight_ext = self.J_ext * PSC_over_PSP[0, 0] * self.tau_syn_ex / self.C_m\n elif self.neuron_model==\"iaf_psc_alpha\": # PSCs calculated from PSP amplitudes\n self.weight_ext = self.J_ext * np.exp(1) / self.tau_syn_ex / self.C_m\n\n # optional additional thalamic input (Poisson)\n self.n_th = net.n_th # size of thalamic population\n self.th_start = net.th_start # onset of thalamic input (ms)\n self.th_duration = net.th_duration # duration of thalamic input (ms)\n self.th_rate = net.th_rate # rate of thalamic neurons (spikes/s)\n self.J_th = net.PSP_th # mean EPSP amplitude (mV) for thalamic input\n # Adapt weights\n if self.neuron_model==\"iaf_psc_exp\": # PSCs calculated from PSP amplitudes\n self.weight_th = self.J_th * PSC_over_PSP[0, 0] \n elif self.neuron_model==\"iaf_psc_delta\":\n self.weight_th = self.J_th * PSC_over_PSP[0, 0] * self.tau_syn_ex / self.C_m\n elif self.neuron_model==\"iaf_psc_alpha\": # PSCs calculated from PSP amplitudes\n self.weight_th = self.J_th * np.exp(1) / self.tau_syn_ex / self.C_m\n\n \n # connection probabilities for thalamic input\n conn_probs_th = net.conn_probs_th\n if net.scale_C_linearly:\n if not self.n_th == 0:\n C_th_full_scale = np.log(1. - conn_probs_th) / \\\n np.log(1. - 1. / (self.n_th * net.full_scale_n_neurons))\n self.C_th_scaled = np.int_(C_th_full_scale * self.area)\n else:\n if not self.n_th == 0:\n self.C_th_scaled = np.int_(np.log(1. - conn_probs_th) / \\\n np.log(1. - 1. / (self.n_th * self.n_neurons_micro)))\n if self.n_th == 0:\n self.C_th_scaled = None\n \n # mean delay of thalamic input (ms)\n self.delay_th = net.delay_th\n # standard deviation relative to mean delay of thalamic input\n self.delay_th_rel_sd = net.delay_th_rel_sd\n\n\n ######################################################\n # Predefine matrices for mean field ##\n ######################################################\n if self.neuron_model==\"iaf_psc_delta\":\n self.J_mu = self.weights\n self.J_sd = self.weights\n self.J_mu_ext = self.weight_ext \n self.J_sd_ext = self.weight_ext\n elif self.neuron_model==\"iaf_psc_exp\":\n self.J_mu = self.weights * self.tau_syn / self.C_m\n self.J_sd = self.weights * np.sqrt(self.tau_syn / 2.) / self.C_m\n self.J_mu_ext = self.weight_ext * self.tau_syn_ex / self.C_m\n self.J_sd_ext = self.weight_ext * np.sqrt(self.tau_syn_ex / 2.) / self.C_m\n elif self.neuron_model==\"iaf_psc_alpha\":\n self.J_mu = self.weights * self.tau_syn**2 / self.C_m\n self.J_sd = self.weights * self.tau_syn**(3./2.) / (self.C_m * 2.)\n self.J_mu_ext = self.weight_ext * self.tau_syn_ex**2 / self.C_m\n self.J_sd_ext = self.weight_ext * self.tau_syn_ex**(3./2.) / (self.C_m * 2.)\n self.mat_mu = self.tau_m * 1e-3 * self.J_mu * self.C_ab\n self.mu_ext = self.tau_m * 1e-3 * self.J_mu_ext * self.C_aext * self.rate_ext\n self.mat_var = self.tau_m * 1e-3 * (1 + self.weight_rel_sd ** 2) * self.J_sd**2 * self.C_ab\n self.var_ext = self.tau_m * 1e-3 * (1 + self.weight_rel_sd ** 2) * self.J_sd_ext**2 * self.C_aext * self.rate_ext", "def __conv_block(self, x, stage, branch, nb_filter, dropout_rate=None, weight_decay=1e-4):\n\t\teps = 1.1e-5\n\t\tconv_name_base = \"conv\" + str(stage) + \"_\" + str(branch)\n\t\trelu_name_base = \"relu\" + str(stage) + \"_\" + str(branch)\n\n\t\t# 1x1 Convolution (Bottleneck layer)\n\t\tinter_channel = nb_filter * 4 \n\t\tx = BatchNormalization(epsilon=eps, axis=self.concat_axis, name=conv_name_base+\"_x1_bn\")(x)\n\t\tx = Scale(axis=self.concat_axis, name=conv_name_base+\"_x1_scale\")(x)\n\t\tx = Activation(\"relu\", name=relu_name_base+\"_x1\")(x)\n\t\tx = Conv2D(inter_channel, (1, 1), name=conv_name_base+\"_x1\", use_bias=False)(x)\n\n\t\tif dropout_rate:\n\t\t\tx = Dropout(dropout_rate)(x)\n\n\t\t# 3x3 Convolution\n\t\tx = BatchNormalization(epsilon=eps, axis=self.concat_axis, name=conv_name_base+\"_x2_bn\")(x)\n\t\tx = Scale(axis=self.concat_axis, name=conv_name_base+\"_x2_scale\")(x)\n\t\tx = Activation(\"relu\", name=relu_name_base+\"_x2\")(x)\n\t\tx = ZeroPadding2D((1, 1), name=conv_name_base+\"_x2_zeropadding\")(x)\n\t\tx = Conv2D(nb_filter, (3, 3), name=conv_name_base+\"_x2\", use_bias=False)(x)\n\n\t\tif dropout_rate:\n\t\t\tx = Dropout(dropout_rate)(x)\n\n\t\treturn x", "def main():\n args = get_arguments()\n \n # Create queue coordinator.\n coord = tf.train.Coordinator()\n \n # Load reader.\n with tf.name_scope(\"create_inputs\"):\n reader = ImageReader_MultiClass_Loss(\n args.data_dir,\n args.data_list,\n None, # No defined input size.\n RANDOM_SEED,\n False, # No random scale.\n False, # No random mirror.\n coord)\n image, l2_catg, binary_catg, hinge_catg = reader.image, reader.l2_catg, reader.binary_catg, reader.hinge_catg\n image_batch = tf.expand_dims(image, dim=0)\n binary_catg_batch = tf.expand_dims(binary_catg, dim=0)\n\n # Create network.\n net = DeepLabResNetModel({'data': image_batch}, is_training=False)\n\n # Which variables to load.\n restore_var = tf.global_variables()\n \n # Predictions.\n raw_output = net.layers['fc1_voc12']\n\n # Do the global average pooling\n raw_output_bcgd_rmvd = raw_output[:,:,:,1:]\n g_avg_pool = tf.reduce_mean(tf.reduce_mean(raw_output_bcgd_rmvd, axis=1, keep_dims=True),\\\n axis=2, keep_dims=True) # Avg across the width and height dimension -> [Bx21]\n g_avg_pool_sqzd = tf.squeeze(g_avg_pool, axis=[1, 2])\n pred = tf.nn.softmax(g_avg_pool_sqzd)\n\n # Get the class activation map\n raw_output_up = tf.image.resize_bilinear(raw_output_bcgd_rmvd, tf.shape(image_batch)[1:3,])\n raw_output_up = raw_output_up - tf.reduce_min(tf.reduce_min(raw_output_up, axis=1, keep_dims=True), axis=2, keep_dims=True) + EPSILON\n raw_output_up = raw_output_up / tf.reduce_max(tf.reduce_max(raw_output_up, axis=1, keep_dims=True), axis=2, keep_dims=True)\n cam_m_1 = tf.argmax(raw_output_up, dimension=3) + 1\n raw_output_catgs_rmvd = raw_output_up * tf.expand_dims(tf.expand_dims(binary_catg_batch, 1), 2)\n cam_m_2 = tf.argmax(raw_output_catgs_rmvd, dimension=3) + 1\n cam = tf.cast(tf.equal(cam_m_1, cam_m_2), tf.int64) * cam_m_1\n\n cam_batch = tf.expand_dims(cam, dim=3)\n\n # Set up tf session and initialize variables. \n config = tf.ConfigProto()\n config.gpu_options.allow_growth = True\n sess = tf.Session(config=config)\n init = tf.global_variables_initializer()\n \n sess.run(init)\n sess.run(tf.local_variables_initializer())\n \n # Load weights.\n loader = tf.train.Saver(var_list=restore_var)\n if args.restore_from is not None:\n load(loader, sess, args.restore_from)\n \n # Start queue threads.\n threads = tf.train.start_queue_runners(coord=coord, sess=sess)\n \n # Iterate over training steps.\n for step in range(args.num_steps):\n preds, images, cams, bin_catg = sess.run([pred, image_batch, cam_batch, binary_catg])\n \"\"\"\n print(bin_catg)\n print(np.unique(np.unique(cams)))\n \"\"\"\n img = inv_preprocess(images)\n attMap = decode_labels(cams)\n output_dir = './output_maps_binary_without_norm/'\n img_name = output_dir + str(step) + '.jpg'\n map_name = output_dir + str(step) + '.png'\n misc.imsave(img_name, img[0,:,:,:])\n misc.imsave(map_name, attMap[0,:,:,:])\n coord.request_stop()\n coord.join(threads)", "def __call__(self, inputs, state, scope=None, *args, **kwargs):\r\n # Bypass RNNCell's variable capturing semantics for LayerRNNCell.\r\n # Instead, it is up to subclasses to provide a proper build\r\n # method. See the class docstring for more details.\r\n return base_layer.Layer.__call__(self, inputs, state, scope=scope,\r\n *args, **kwargs)", "def __init__(self):\r\n super(HarrisNet, self).__init__()\r\n\r\n image_gradients_layer = ImageGradientsLayer()\r\n\r\n\r\n # (1) ImageGradientsLayer: Compute image gradients Ix Iy. Can be\r\n # approximated by convolving with sobel filter.\r\n # (2) EigenvalueApproxLayer: Compute S_xx, S_yy and S_xy, the output is\r\n # a tensor of size num_image x 3 x width x height\r\n # (3) CornerResponseLayer: Compute R matrix, the output is a tensor of\r\n # size num_image x 1 x width x height\r\n # (4) NMSLayer: Perform non-maximum suppression, the output is a tensor\r\n # of size num_image x 1 x width x height\r\n\r\n layer_1 = ChannelProductLayer()\r\n layer_2 = SecondMomentMatrixLayer()\r\n layer_3 = CornerResponseLayer()\r\n layer_4 = NMSLayer()\r\n\r\n self.net = nn.Sequential(\r\n image_gradients_layer,\r\n layer_1,\r\n layer_2,\r\n layer_3,\r\n layer_4\r\n )", "def test_CNN_hyperparameters_nrlayers(self):\n hyperparams = modelgen.generate_CNN_hyperparameter_set(\n min_fc_nodes=123, max_fc_nodes=123)\n assert hyperparams.get('fc_hidden_nodes') == 123", "def inference(image, keep_prob):\r\n '''\r\n print(\"setting up vgg initialized conv layers ...\")\r\n model_data = utils.get_model_data(FLAGS.model_dir, MODEL_URL)\r\n\r\n mean = model_data['normalization'][0][0][0]\r\n mean_pixel = np.mean(mean, axis=(0, 1))\r\n\r\n weights = np.squeeze(model_data['layers'])\r\n print(\"weights.shape\",weights.shape)\r\n\r\n processed_image = utils.process_image(image, mean_pixel)'''\r\n\r\n with tf.variable_scope(\"inference\"):\r\n pooling_net,conv_final_layer = inference_op(image)\r\n #conv_final_layer = image_net[\"conv5_3\"]\r\n\r\n pool5 = utils.max_pool_2x2(conv_final_layer)\r\n\r\n W6 = utils.weight_variable([7, 7, 512, 4096], name=\"W6\")\r\n b6 = utils.bias_variable([4096], name=\"b6\")\r\n conv6 = utils.conv2d_basic(pool5, W6, b6)\r\n relu6 = tf.nn.relu(conv6, name=\"relu6\")\r\n if FLAGS.debug:\r\n utils.add_activation_summary(relu6)\r\n relu_dropout6 = tf.nn.dropout(relu6, keep_prob=keep_prob)\r\n\r\n W7 = utils.weight_variable([1, 1, 4096, 4096], name=\"W7\")\r\n b7 = utils.bias_variable([4096], name=\"b7\")\r\n conv7 = utils.conv2d_basic(relu_dropout6, W7, b7)\r\n relu7 = tf.nn.relu(conv7, name=\"relu7\")\r\n if FLAGS.debug:\r\n utils.add_activation_summary(relu7)\r\n relu_dropout7 = tf.nn.dropout(relu7, keep_prob=keep_prob)\r\n\r\n W8 = utils.weight_variable([1, 1, 4096, NUM_OF_CLASSESS], name=\"W8\")\r\n b8 = utils.bias_variable([NUM_OF_CLASSESS], name=\"b8\")\r\n conv8 = utils.conv2d_basic(relu_dropout7, W8, b8)\r\n # annotation_pred1 = tf.argmax(conv8, dimension=3, name=\"prediction1\")\r\n\r\n # now to upscale to actual image size\r\n deconv_shape1 = pooling_net[\"pool4\"].get_shape()\r\n W_t1 = utils.weight_variable([4, 4, deconv_shape1[3].value, NUM_OF_CLASSESS], name=\"W_t1\")\r\n b_t1 = utils.bias_variable([deconv_shape1[3].value], name=\"b_t1\")\r\n # 对第8层的结果进行反卷积(上采样),通道数也由NUM_OF_CLASSESS变为第4层的通道数\r\n conv_t1 = utils.conv2d_transpose_strided(conv8, W_t1, b_t1, output_shape=tf.shape(pooling_net[\"pool4\"]))\r\n fuse_1 = tf.add(conv_t1, pooling_net[\"pool4\"], name=\"fuse_1\")\r\n\r\n deconv_shape2 = pooling_net[\"pool3\"].get_shape()\r\n W_t2 = utils.weight_variable([4, 4, deconv_shape2[3].value, deconv_shape1[3].value], name=\"W_t2\")\r\n b_t2 = utils.bias_variable([deconv_shape2[3].value], name=\"b_t2\")\r\n conv_t2 = utils.conv2d_transpose_strided(fuse_1, W_t2, b_t2, output_shape=tf.shape(pooling_net[\"pool3\"]))\r\n fuse_2 = tf.add(conv_t2, pooling_net[\"pool3\"], name=\"fuse_2\")\r\n\r\n shape = tf.shape(image)\r\n deconv_shape3 = tf.stack([shape[0], shape[1], shape[2], NUM_OF_CLASSESS])\r\n W_t3 = utils.weight_variable([16, 16, NUM_OF_CLASSESS, deconv_shape2[3].value], name=\"W_t3\")\r\n b_t3 = utils.bias_variable([NUM_OF_CLASSESS], name=\"b_t3\")\r\n conv_t3 = utils.conv2d_transpose_strided(fuse_2, W_t3, b_t3, output_shape=deconv_shape3, stride=8)\r\n\r\n annotation_pred = tf.argmax(conv_t3, dimension=3, name=\"prediction\")\r\n print(\"annotation_pred.shape\",annotation_pred.shape)\r\n print(\"conv_t3\",conv_t3)\r\n print(\"tf.expand_dims(annotation_pred, dim=3)\",tf.expand_dims(annotation_pred, dim=3))\r\n return tf.expand_dims(annotation_pred, dim=3), conv_t3", "def __init__(self, network_path='.', logging=True,\n input_image_size=None, n_input_channels=None,\n n_output_classes=None,\n conv1_size=5, conv1_n_chan=32, conv1_n_pool=2,\n conv2_size=5, conv2_n_chan=64, conv2_n_pool=2,\n fc1_n_chan=1024, fc1_dropout=0.5, alpha=4e-4 ):\n self.logging = logging\n\n # If network path does not yet exists\n self.network_path = network_path\n if not os.path.isdir(self.network_path):\n # Make network directory\n os.mkdir(self.network_path)\n now = datetime.datetime.now()\n self.log(\"\\n\\n++++++++++++++++++++++++++++++++++++++++++++++++++++\")\n self.log( \"Creation of new network: \")\n self.log( \" {}\".format(self.network_path) )\n self.log( \" @ {}\".format(now.strftime(\"%Y-%m-%d %H:%M\")) )\n self.log( \"++++++++++++++++++++++++++++++++++++++++++++++++++++\")\n self.log(\"\\nNetwork did not exist ... \")\n self.log(\"Created new network with supplied (or default) architecture\")\n\n # Set up new network\n self.y_res = input_image_size[0]\n self.x_res = input_image_size[1]\n self.n_input_channels = n_input_channels\n self.n_output_classes = n_output_classes\n self.conv1_size = conv1_size\n self.conv1_n_chan = conv1_n_chan\n self.conv1_n_pool = conv1_n_pool\n self.conv2_size = conv2_size\n self.conv2_n_chan = conv2_n_chan\n self.conv2_n_pool = conv2_n_pool\n self.fc1_y_size = int( np.ceil( np.ceil(\n self.y_res/self.conv1_n_pool ) / self.conv2_n_pool ) )\n self.fc1_x_size = int( np.ceil( np.ceil(\n self.x_res/self.conv1_n_pool ) / self.conv2_n_pool ) )\n self.fc1_n_chan = fc1_n_chan\n self.fc1_dropout = fc1_dropout\n self.alpha = alpha\n self.n_samples_trained = 0\n self.n_class_samples_trained = self.n_output_classes*[0]\n self.n_samples_list = []\n self.n_class_samples_list = [[] for _ in range(self.n_output_classes)]\n self.accuracy_list = [[] for _ in range(self.n_output_classes)]\n self.precision_list = [[] for _ in range(self.n_output_classes)]\n self.recall_list = [[] for _ in range(self.n_output_classes)]\n self.F1_list = [[] for _ in range(self.n_output_classes)]\n\n # Save network architecture\n self.save_network_architecture( network_path=self.network_path )\n\n else:\n now = datetime.datetime.now()\n self.log(\"\\n\\n++++++++++++++++++++++++++++++++++++++++++++++++++++\")\n self.log( \"Re-initialization of existing network: \")\n self.log( \" {}\".format(self.network_path) )\n self.log( \" @ {}\".format(now.strftime(\"%Y-%m-%d %H:%M\")) )\n self.log( \"++++++++++++++++++++++++++++++++++++++++++++++++++++\")\n self.log( \" \")\n\n # Load network architecture from directory\n net_architecture = self.load_network_architecture(self.network_path)\n\n # Set up network variables from loaded architecture\n self.y_res = net_architecture['y_res']\n self.x_res = net_architecture['x_res']\n self.n_input_channels = net_architecture['n_input_channels']\n self.n_output_classes = net_architecture['n_output_classes']\n self.conv1_size = net_architecture['conv1_size']\n self.conv1_n_chan = net_architecture['conv1_n_chan']\n self.conv1_n_pool = net_architecture['conv1_n_pool']\n self.conv2_size = net_architecture['conv2_size']\n self.conv2_n_chan = net_architecture['conv2_n_chan']\n self.conv2_n_pool = net_architecture['conv2_n_pool']\n self.fc1_y_size = int( np.ceil( np.ceil(\n self.y_res/self.conv1_n_pool ) / self.conv2_n_pool ) )\n self.fc1_x_size = int( np.ceil( np.ceil(\n self.x_res/self.conv1_n_pool ) / self.conv2_n_pool ) )\n self.fc1_n_chan = net_architecture['fc1_n_chan']\n self.fc1_dropout = net_architecture['fc1_dropout']\n self.alpha = net_architecture['alpha']\n self.n_samples_trained = net_architecture['n_samples_trained']\n self.n_class_samples_trained = net_architecture['n_class_samples_trained']\n self.n_samples_list = net_architecture['n_samples_list']\n self.n_class_samples_list = net_architecture['n_class_samples_list']\n self.accuracy_list = net_architecture['accuracy_list']\n self.precision_list = net_architecture['precision_list']\n self.recall_list = net_architecture['recall_list']\n self.F1_list = net_architecture['F1_list']\n\n # Update values of alpha and dropout if supplied\n if self.alpha != alpha:\n self.alpha = alpha\n self.log(\"Updated learning rate 'alpha' to {}\".format(self.alpha))\n if self.fc1_dropout != fc1_dropout:\n self.fc1_dropout = fc1_dropout\n self.log(\"Updated dropout fraction to {}\".format(self.fc1_dropout))\n\n # Clear previous graphs\n tf.reset_default_graph()\n\n #########################################################\n # Input and target variable placeholders\n # x = [ m_samples x [channel_1_data, channel_2_data, etc.] ]\n self.x = tf.placeholder( tf.float32, shape = [None,\n self.n_input_channels * self.y_res * self.x_res] )\n self.y_trgt = tf.placeholder( tf.float32, \\\n shape = [None, self.n_output_classes] )\n\n # Convert input image to tensor with channel as last dimension\n # x_image = [-1 x im-height x im-width x n-input-channels]\n x_image_temp = tf.reshape(self.x, [-1,\n self.n_input_channels,self.y_res,self.x_res])\n x_image = tf.transpose(x_image_temp, [0,2,3,1])\n\n #########################################################\n # Set up convolutional layer 1\n # W = [im-height x im-width x n-input-channels x n-output-channels])\n self.conv1_shape = [self.conv1_size, self.conv1_size,\n self.n_input_channels, self.conv1_n_chan]\n self.W_conv1 = tf.Variable( tf.truncated_normal(\n shape=self.conv1_shape, stddev=0.1))\n self.b_conv1 = tf.Variable( tf.constant(0.1,\n shape=[self.conv1_n_chan] ))\n\n # Convolve x_image with the weight tensor\n self.conv1_lin = tf.nn.conv2d( x_image, self.W_conv1,\n strides=[1, 1, 1, 1], padding='SAME' )\n\n # Add bias and apply transfer function\n self.conv1_relu = tf.nn.relu( self.conv1_lin + self.b_conv1 )\n\n # Max pooling\n self.conv1_kernel = [1, self.conv1_n_pool, self.conv1_n_pool, 1]\n self.conv1_pool = tf.nn.max_pool( self.conv1_relu,\n ksize=self.conv1_kernel, strides=self.conv1_kernel, padding='SAME')\n\n #########################################################\n # Convolutional layer 2\n self.conv2_shape = [self.conv2_size, self.conv2_size,\n self.conv1_n_chan, self.conv2_n_chan]\n self.W_conv2 = tf.Variable( tf.truncated_normal(\n shape=self.conv2_shape, stddev=0.1 ) )\n self.b_conv2 = tf.Variable( tf.constant(0.1,\n shape=[self.conv2_n_chan] ))\n\n # Convolve x_image with the weight tensor\n self.conv2_lin = tf.nn.conv2d( self.conv1_pool, self.W_conv2,\n strides=[1, 1, 1, 1], padding='SAME' )\n\n # Add bias and apply transfer function\n self.conv2_relu = tf.nn.relu( self.conv2_lin + self.b_conv2 )\n\n # Max pooling\n self.conv2_kernel = [1, self.conv2_n_pool, self.conv2_n_pool, 1]\n self.conv2_pool = tf.nn.max_pool( self.conv2_relu,\n ksize=self.conv2_kernel, strides=self.conv2_kernel, padding='SAME')\n\n\n #########################################################\n # Densely Connected Layer\n # Weights and bias\n self.fc1_shape = [self.fc1_y_size * self.fc1_x_size * self.conv2_n_chan,\n self.fc1_n_chan]\n self.W_fc1 = tf.Variable( tf.truncated_normal(\n shape=self.fc1_shape, stddev=0.1 ) )\n self.b_fc1 = tf.Variable( tf.constant(0.1, shape=[self.fc1_n_chan] ))\n\n # Flatten output from conv2\n self.conv2_pool_flat = tf.reshape(\n self.conv2_pool, [-1, self.fc1_shape[0]] )\n\n # Calculate network step\n self.fc1_relu = tf.nn.relu( tf.matmul( self.conv2_pool_flat,\n self.W_fc1) + self.b_fc1 )\n\n # Set up dropout option for fc1\n self.fc1_keep_prob = tf.placeholder(tf.float32)\n self.fc1_relu_drop = tf.nn.dropout(self.fc1_relu, self.fc1_keep_prob)\n\n #########################################################\n # Readout layer\n # Weights and bias\n self.fc_out_shape = [self.fc1_n_chan, self.n_output_classes]\n self.W_fc_out = tf.Variable( tf.truncated_normal(\n shape=self.fc_out_shape, stddev=0.1 ) )\n self.b_fc_out = tf.Variable( tf.constant(0.1,\n shape=[self.fc_out_shape[1]] ))\n\n # Calculate network step\n self.fc_out_lin = tf.matmul( self.fc1_relu_drop,\n self.W_fc_out ) + self.b_fc_out\n\n #########################################################\n # Define cost function and optimizer algorithm\n self.cross_entropy = tf.reduce_mean(\n tf.nn.softmax_cross_entropy_with_logits(\n logits=self.fc_out_lin, labels=self.y_trgt ) )\n self.train_step = tf.train.AdamOptimizer(self.alpha).minimize(\n self.cross_entropy )\n\n #########################################################\n # Define how to test trained model\n self.network_prediction = tf.cast( tf.argmax(\n self.fc_out_lin, 1 ), tf.float32 )\n self.is_correct_prediction = tf.equal( tf.argmax( self.fc_out_lin, 1 ),\n tf.argmax( self.y_trgt, 1 ) )\n self.accuracy = tf.reduce_mean( tf.cast(\n self.is_correct_prediction, tf.float32 ) )\n\n #########################################################\n # Create save operation\n self.saver = tf.train.Saver()", "def my_hom_cnn_model_fn(features, labels, mode):\n # Input Layer\n # Reshape X to 4-D tensor: [batch_size, width, height, channels]\n\n #HOM Images are 128x128, and have two channels\n input_layer = tf.reshape(features[\"x\"], [-1, pSize, pSize, 2])\n\n # Convolutional Layer #1\n # Computes 32 features using a 3x3 filter with ReLU activation.\n # Padding is added to preserve width and height.\n conv1 = tf.layers.conv2d(inputs=input_layer, filters=64,kernel_size=[3, 3], padding=\"same\",activation=tf.nn.relu) #None\n conv2 = tf.layers.conv2d(inputs=conv1, filters=64, kernel_size=[3, 3], padding=\"same\", activation=tf.nn.relu)\n pool1 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2)\n\n conv3 = tf.layers.conv2d(inputs=pool1, filters=64, kernel_size=[3, 3], padding=\"same\",activation=tf.nn.relu)\n conv4 = tf.layers.conv2d(inputs=conv3,filters=64,kernel_size=[3, 3],padding=\"same\",activation=tf.nn.relu)\n pool2 = tf.layers.max_pooling2d(inputs=conv4, pool_size=[2, 2], strides=2)\n\n conv5 = tf.layers.conv2d(inputs=pool2, filters=128, kernel_size=[3, 3], padding=\"same\",activation=tf.nn.relu)\n conv6 = tf.layers.conv2d(inputs=conv5,filters=128,kernel_size=[3, 3],padding=\"same\",activation=tf.nn.relu)\n pool3 = tf.layers.max_pooling2d(inputs=conv6, pool_size=[2, 2], strides=2)\n\n conv7 = tf.layers.conv2d(inputs=pool3, filters=128, kernel_size=[3, 3], padding=\"same\",activation=tf.nn.relu)\n conv8 = tf.layers.conv2d(inputs=conv7,filters=128,kernel_size=[3, 3],padding=\"same\",activation=tf.nn.relu)\n\n # Dense Layer\n # Densely connected layer with 1024 neurons\n conv2_flat = tf.reshape(conv8, [-1, 128 * 128 * 2])\n # Add dropout operation; 0.5 probability that element will be kept\n dropout = tf.layers.dropout(inputs=conv2_flat, rate=0.5, training=mode == tf.estimator.ModeKeys.TRAIN)\n\n fully_connect = tf.layers.dense(inputs=dropout, units=1024, activation=None) #activation=None\n predictions = tf.layers.dense(inputs=fully_connect, units=8, activation=None)\n\n #predictions\n if mode == tf.estimator.ModeKeys.PREDICT:\n return tf.estimator.EstimatorSpec(mode=mode, predictions = predictions)\n \n loss = tf.losses.mean_squared_error(labels=labels, predictions = predictions)\n \n #loss = tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=predictions, name=\"softmax_tensor\")\n\n # Configure the Training Op (for TRAIN mode)\n if mode == tf.estimator.ModeKeys.TRAIN:\n optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001)\n train_op = optimizer.minimize(loss=loss, global_step=tf.train.get_global_step())\n return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op)\n\n # Add evaluation metrics (for EVAL mode)\n #eval_metric_ops = {\"accuracy\": tf.metrics.accuracy(labels=labels, predictions=predictions[\"coord\"])}#predictions=predictions[\"classes\"])}\n \n if mode == tf.estimator.ModeKeys.EVAL:\n eval_metric_ops = {\n \"mean_square_error\": tf.metrics.mean_squared_error(labels=labels, predictions = predictions)}#predictions=predictions[\"classes\"])}\n\n return tf.estimator.EstimatorSpec(mode=mode, loss=loss, eval_metric_ops=eval_metric_ops, predictions = predictions)", "def QCNN_layer_gen(self):\n pixels = self.filter_size**2\n # filter size: 2^n only for this version!\n if np.log2(pixels) % 1 != 0:\n raise NotImplementedError(\"filter size: 2^n only available\")\n cirq_qubits = cirq.GridQubit.rect(self.filter_size, self.filter_size)\n # mapping input data to circuit\n input_circuit = cirq.Circuit()\n input_params = [sympy.symbols('a%d' %i) for i in range(pixels)]\n for i, qubit in enumerate(cirq_qubits):\n input_circuit.append(cirq.rx(np.pi*input_params[i])(qubit))\n # apply learnable gate set to QCNN circuit\n QCNN_circuit = cirq.Circuit()\n step_size = [2**i for i in range(np.log2(pixels).astype(np.int32))]\n for step in step_size:\n for target in range(0, pixels, 2*step):\n QCNN_circuit.append(self._QConv(step, target, cirq_qubits))\n # merge the circuits\n full_circuit = cirq.Circuit()\n full_circuit.append(input_circuit)\n full_circuit.append(QCNN_circuit)\n self.circuit = full_circuit # save circuit to the QCNN layer obj.\n self.params = input_params + self.learning_params\n self.op = cirq.Z(cirq_qubits[0])", "def __init__(self, layers, input_size):\n super(ConvNetMaker, self).__init__()\n self.conv_layers = []\n self.fc_layers = []\n # h, w, d = 32, 32, 3\n h, w, d = input_size, input_size, 3\n previous_layer_filter_count = 3\n previous_layer_size = h * w * d\n num_fc_layers_remained = len([1 for l in layers if l.startswith('FC')])\n for layer in layers:\n if layer.startswith('Conv'):\n filter_count = int(layer[4:])\n self.conv_layers += [\n nn.Conv2d(previous_layer_filter_count,\n filter_count,\n kernel_size=3,\n padding=1),\n nn.BatchNorm2d(filter_count),\n nn.ReLU(inplace=True)\n ]\n\n previous_layer_filter_count = filter_count\n d = filter_count\n previous_layer_size = h * w * d\n elif layer.startswith('MaxPool'):\n self.conv_layers += [nn.MaxPool2d(kernel_size=2, stride=2)]\n h, w = int(h / 2.0), int(w / 2.0)\n previous_layer_size = h * w * d\n elif layer.startswith('FC'):\n num_fc_layers_remained -= 1\n current_layer_size = int(layer[2:])\n if num_fc_layers_remained == 0:\n self.fc_layers += [nn.Linear(previous_layer_size,\n current_layer_size)]\n else:\n self.fc_layers += [nn.Linear(previous_layer_size,\n current_layer_size),\n nn.ReLU(inplace=True)]\n previous_layer_size = current_layer_size\n\n conv_layers = self.conv_layers\n fc_layers = self.fc_layers\n self.conv_layers = nn.Sequential(*conv_layers)\n self.fc_layers = nn.Sequential(*fc_layers)", "def build_net(self, inputs):\n with tf.variable_scope(self._scope, self._scope, [inputs]) as sc:\n end_points_collection = sc.name + '_end_points'\n\n with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.batch_norm],\n outputs_collections=end_points_collection):\n net = slim.conv2d(inputs, 32, 3, 1, scope='conv1')\n net = slim.conv2d(net, 32, 3, 1, scope='conv2')\n\n net = slim.conv2d(net, 64, 3, 1, scope='conv3')\n net = slim.conv2d(net, 64, 3, 1, scope='conv4')\n\n net = slim.max_pool2d(net, 2, 2, scope='pool1')\n\n net = slim.conv2d(net, 128, 3, 1, scope='conv5')\n net = slim.conv2d(net, 128, 3, 1, scope='conv6')\n\n net = slim.max_pool2d(net, 2, 2, scope='pool2')\n\n net = slim.conv2d(net, 256, 3, scope='conv7')\n net = slim.conv2d(net, 256, 3, scope='conv8')\n\n net = slim.max_pool2d(net, 2, [2, 1], scope='pool3')\n\n net = slim.conv2d(net, 512, 3, scope='conv9')\n net = slim.conv2d(net, 512, 3, scope='conv10')\n\n net = slim.max_pool2d(net, 2, [1, 1], scope='pool4')\n\n net = slim.conv2d(net, 512, 2, padding='VALID', scope='conv11')\n\n net = slim.dropout(net, keep_prob=0.5)\n\n self.end_points = utils.convert_collection_to_dict(end_points_collection)\n self.net = net", "def apply_network(inputs):\n return apply_layer(tf.sigmoid(apply_layer(inputs, 64)), 1)", "def explain(self):\n # build the 2 versions of the model\n model = self.build_model()\n last_conv_model = self.build_cut_model()\n\n for i, label_name in enumerate(self.label_names):\n # This is the algorithm for the last convolution layer's tensor image\n # Get the index of the image that was classified correctly with the most confidence for the class\n predicted_col_proba = np.array(self.predicted_labels)[0][:, i]\n predicted_col_argsort = predicted_col_proba.argsort()[::-1]\n predicted_col = (predicted_col_proba > 0.2).astype(int)\n true_col = self.true_labels[:, 0]\n\n representative_image_index = None\n for most_probable_arg_index in predicted_col_argsort:\n if predicted_col[most_probable_arg_index] == true_col[most_probable_arg_index]:\n representative_image_index = most_probable_arg_index\n break\n\n # Resize the image to fit the neural network and keep the original resized image\n original_img = io.imread('{}/{}/{}'.format(path_to_img_directory, self.ex_format, np.array(self.image_names)[representative_image_index]))\n original_img = cv2.normalize(original_img, None, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_32F)\n original_img = cv2.resize(original_img, dsize=(self.ex_input_size, self.ex_input_size), interpolation=cv2.INTER_CUBIC)\n img = np.expand_dims(original_img, axis=0)\n original_img = original_img[:, :, :3]\n\n # Get the output of the neural network for this image as a tensor\n model.predict(np.array(img))\n class_output = model.output[:, i]\n last_conv_layer = model.get_layer(self.ex_last_conv_layer_name1).output\n # if self.model_name == 'vit':\n # last_conv_layer = tf.nn.relu(tf.reshape(last_conv_layer[:, :256, :], (-1, 16, 16, 1024)))\n\n # Get the output for the cut model\n cut_img = last_conv_model.predict(np.array(img))[0]\n if self.model_name == 'vit':\n cut_img = np.reshape(cut_img[:256, :], (16, 16, 1024))\n cut_img = np.mean(cut_img, axis=-1)\n cut_img = cv2.normalize(cut_img, None, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_32F)\n if self.model_name == 'vit':\n cut_img[0, 0] = np.mean(cut_img)\n cut_img = cv2.normalize(cut_img, None, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_32F)\n cut_img = cv2.resize(cut_img, (self.ex_input_size, self.ex_input_size))\n\n # This is the algorithm of the Grad-CAM model\n # Refine the output of the last convolutional layer according to the class output\n grads = K.gradients(class_output, last_conv_layer)[0]\n if self.model_name == 'vit':\n last_conv_layer = tf.reshape(last_conv_layer[:, :256, :], (-1, 16, 16, 1024))\n last_conv_layer = last_conv_layer / tf.norm(last_conv_layer)\n\n grads = tf.reshape(grads[:, :256, :], (-1, 16, 16, 1024))\n grads = grads / tf.norm(grads)\n\n pooled_grads = K.mean(grads, axis=(0, 1, 2))\n iterate = K.function([model.input], [pooled_grads, last_conv_layer[0]])\n pooled_grads_value, conv_layer_output_value = iterate([img])\n for j in range(self.ex_last_conv_layer_filter_number):\n conv_layer_output_value[:, :, j] *= pooled_grads_value[j]\n\n # Create a 16x16 heatmap and scale it to the same size as the original image\n heatmap = np.mean(conv_layer_output_value, axis=-1)\n heatmap = np.maximum(heatmap, 0)\n heatmap /= np.max(heatmap)\n heatmap = cv2.resize(heatmap, (self.ex_input_size, self.ex_input_size))\n heatmap = np.uint8(255 * heatmap)\n heatmap = cv2.applyColorMap(heatmap, cv2.COLORMAP_JET)\n heatmap = cv2.normalize(heatmap, None, alpha=0, beta=1, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_32F)\n superimposed_img = cv2.addWeighted(original_img, 0.7, heatmap, 0.4, 0)\n\n # save the original image\n plt.matshow(original_img)\n plt.axis('off')\n plt.title(label_name, fontdict={'fontsize': 18})\n plt.savefig('{}/{}/{}_{}.png'.format(path_to_explainable, 'original', self.model_name, label_name), bbox_inches='tight', pad_inches=0.1)\n\n # save the cut image\n plt.matshow(cut_img, cmap=plt.get_cmap('Spectral'))\n plt.colorbar(shrink=0.75, ticks=np.linspace(0, 1, 11).tolist())\n plt.axis('off')\n plt.title(label_name, fontdict={'fontsize': 18})\n plt.savefig('{}/{}/{}_{}.png'.format(path_to_explainable, 'cut', self.model_name, label_name), bbox_inches='tight', pad_inches=0.1)\n\n # save the superimposed gradcam image\n plt.matshow(superimposed_img, cmap=plt.get_cmap('Spectral'))\n plt.colorbar(shrink=0.75, ticks=np.linspace(0, 1, 11).tolist())\n plt.axis('off')\n plt.title(label_name, fontdict={'fontsize': 18})\n plt.savefig('{}/{}/{}_{}.png'.format(path_to_explainable, 'gradcam', self.model_name, label_name), bbox_inches='tight', pad_inches=0.1)", "def __init__(self, args):\n\n self.args = args\n\n \"\"\"class args:\n rows=16\n cols=640\n baseinfo=10\n hps = 128\n hpdist = 33\n \"\"\"\n\n inputs = KK.layers.Input(shape=(args.rows,args.cols,args.baseinfo))\n x = KK.layers.Conv2D(64, kernel_size= (1, 6), strides=(1,5), activation='relu')(inputs)\n x = KK.layers.Conv2D(64, (16, 1), activation='relu')(x)\n bottle = KK.layers.Flatten()(x) # This is now the bottleneck\n\n self.model = KK.models.Model(inputs=inputs, outputs=[bottle])\n self.model.summary()\n\n ################################\n # predict hp lengths\n predsHPLEN = []\n for ii in range( args.hps ):\n tmp = KK.layers.Dense(args.hpdist, activation='softmax')(bottle)\n # print(\"tmp\",tmp.shape) # tmp (N, 33)\n predsHPLEN.append(tmp)\n\n # # stack into one array yielding (128,N,33)\n predsHPLEN2 = KK.backend.stack(predsHPLEN)\n print(\"predsHPLEN2.shape\", predsHPLEN2.shape)\n\n # # permute to yield (N,128,33) to compare against truth\n predictionsHPLEN = KK.backend.permute_dimensions(predsHPLEN2,(1,0,2))\n print(\"predictionsHPLEN.shape\",predictionsHPLEN.shape) # predictions.shape (?, 128, 33)\n\n ################################\n # predict hp base IDentity\n predsHPID = []\n for ii in range( args.hps ):\n tmp = KK.layers.Dense(4, activation='softmax')(bottle)\n # print(\"tmp\",tmp.shape) # tmp (N, 4)\n predsHPID.append(tmp)\n\n # # stack into one array yielding (128,N,4)\n predsHPID2 = KK.backend.stack(predsHPID)\n print(\"predsHPID2.shape\", predsHPID2.shape)\n\n # # permute to yield (N,128,4) to compare against truth\n predictionsHPID = KK.backend.permute_dimensions(predsHPID2,(1,0,2))\n print(\"predictionsHPID.shape\",predictionsHPID.shape) # predictions.shape (?, 128, 4)\n\n ################################\n self.model = KK.models.Model(inputs=inputs, outputs=[predictionsHPID,predictionsHPLEN])\n\n #self.model.summary()\n # # ValueError: You tried to call `count_params` on stack, but the layer isn't built. You can build it manually via: `stack.build(batch_input_shape)`.\n\n self.model.compile(optimizer=\"adam\", loss=\"categorical_crossentropy\", metrics=[\"categorical_accuracy\",\"kullback_leibler_divergence\"])\n\n # # instrument tensorboard\n # tf.summary.histogram('output', self.output)\n # tf.summary.histogram('softmax_w', softmax_w)\n # tf.summary.histogram('logits', self.logits)\n # tf.summary.histogram('probs', self.probs)\n # tf.summary.scalar('train_loss', self.cost)", "def get_unet0(nClasses, input_height, input_width, nchannel=3):\n\n inputs = Input(shape=(input_height, input_width, nchannel))\n # temp = BatchNormalization()(inputs)\n\n conv1 = Conv2D(32, (3, 3), padding='same', kernel_initializer='he_uniform')(inputs)\n conv1 = BatchNormalization()(conv1)\n conv1 = Activation(\"relu\")(conv1)\n conv1 = Conv2D(32, (3, 3), padding='same', kernel_initializer='he_uniform')(conv1)\n conv1 = BatchNormalization()(conv1)\n conv1 = Activation(\"relu\")(conv1)\n pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)\n\n conv2 = Conv2D(64, (3, 3), padding='same', kernel_initializer='he_uniform')(pool1)\n conv2 = BatchNormalization()(conv2)\n conv2 = Activation(\"relu\")(conv2)\n conv2 = Conv2D(64, (3, 3), padding='same', kernel_initializer='he_uniform')(conv2)\n conv2 = BatchNormalization()(conv2)\n conv2 = Activation(\"relu\")(conv2)\n pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)\n\n conv3 = Conv2D(128, (3, 3), padding='same', kernel_initializer='he_uniform')(pool2)\n conv3 = BatchNormalization()(conv3)\n conv3 = Activation(\"relu\")(conv3)\n conv3 = Conv2D(128, (3, 3), padding='same', kernel_initializer='he_uniform')(conv3)\n conv3 = BatchNormalization()(conv3)\n conv3 = Activation(\"relu\")(conv3)\n pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)\n\n conv4 = Conv2D(256, (3, 3), padding='same', kernel_initializer='he_uniform')(pool3)\n conv4 = BatchNormalization()(conv4)\n conv4 = Activation(\"relu\")(conv4)\n conv4 = Conv2D(256, (3, 3), padding='same', kernel_initializer='he_uniform')(conv4)\n conv4 = BatchNormalization()(conv4)\n conv4 = Activation(\"relu\")(conv4)\n pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)\n\n conv5 = Conv2D(512, (3, 3), padding='same', kernel_initializer='he_uniform')(pool4)\n conv5 = BatchNormalization()(conv5)\n conv5 = Activation(\"relu\")(conv5)\n conv5 = Conv2D(512, (3, 3), padding='same', kernel_initializer='he_uniform')(conv5)\n conv5 = BatchNormalization()(conv5)\n conv5 = Activation(\"relu\")(conv5)\n\n up6 = concatenate([UpSampling2D(size=(2, 2))(conv5), conv4], axis=3)\n conv6 = Conv2D(256, (3, 3), padding='same', kernel_initializer='he_uniform')(up6)\n conv6 = BatchNormalization()(conv6)\n conv6 = Activation(\"relu\")(conv6)\n conv6 = Conv2D(256, (3, 3), padding='same', kernel_initializer='he_uniform')(conv6)\n conv6 = BatchNormalization()(conv6)\n conv6 = Activation(\"relu\")(conv6)\n\n up7 = concatenate([UpSampling2D(size=(2, 2))(conv6), conv3], axis=3)\n conv7 = Conv2D(128, (3, 3), padding='same', kernel_initializer='he_uniform')(up7)\n conv7 = BatchNormalization()(conv7)\n conv7 = Activation(\"relu\")(conv7)\n conv7 = Conv2D(128, (3, 3), padding='same', kernel_initializer='he_uniform')(conv7)\n conv7 = BatchNormalization()(conv7)\n conv7 = Activation(\"relu\")(conv7)\n\n up8 = concatenate([UpSampling2D(size=(2, 2))(conv7), conv2], axis=3)\n conv8 = Conv2D(64, (3, 3), padding='same', kernel_initializer='he_uniform')(up8)\n conv8 = BatchNormalization()(conv8)\n conv8 = Activation(\"relu\")(conv8)\n conv8 = Conv2D(64, (3, 3), padding='same', kernel_initializer='he_uniform')(conv8)\n conv8 = BatchNormalization()(conv8)\n conv8 = Activation(\"relu\")(conv8)\n\n up9 = concatenate([UpSampling2D(size=(2, 2))(conv8), conv1], axis=3)\n conv9 = Conv2D(32, (3, 3), padding='same', kernel_initializer='he_uniform')(up9)\n conv9 = BatchNormalization()(conv9)\n conv9 = Activation(\"relu\")(conv9)\n conv9 = Conv2D(32, (3, 3), padding='same', kernel_initializer='he_uniform')(conv9)\n conv9 = BatchNormalization()(conv9)\n conv9 = Activation(\"relu\")(conv9)\n\n conv10 = Conv2D(nClasses, (1, 1), activation='relu', padding='same')(conv9)\n conv10 = Reshape((input_height * input_width, nClasses))(conv10)\n # conv10 = Permute((2, 1))(conv10)\n\n conv11 = Activation('softmax')(conv10)\n\n\n model = Model(inputs=inputs, outputs=conv11)\n\n return model", "def __init__(self, C, num_classes):\n super(AuxiliaryHeadImageNet, self).__init__()\n self.features = nn.Sequential(nn.ReLU(inplace=True), nn.AvgPool2d(5, stride=2, padding=0, count_include_pad=False), nn.Conv2d(C, 128, 1, bias=False), nn.BatchNorm2d(128), nn.ReLU(inplace=True), nn.Conv2d(128, 768, 2, bias=False), nn.ReLU(inplace=True))\n self.classifier = nn.Linear(768, num_classes)", "def build_resnet101(self):\n use_batch_norm = self.use_batch_norm\n\n imgs = tf.placeholder(tf.float32, [self.batch_size]+self.img_shape)\n is_train = tf.placeholder(tf.bool)\n\n conv1_feats = convolution(imgs, 7, 7, 64, 2, 2, 'conv1')\n conv1_feats = batch_norm(conv1_feats, 'bn_conv1', is_train, use_batch_norm)\n conv1_feats = nonlinear(conv1_feats, 'relu')\n pool1_feats = max_pool(conv1_feats, 3, 3, 2, 2, 'pool1')\n\n res2a_feats = self.basic_block(pool1_feats, 'res2a', 'bn2a', is_train, use_batch_norm, 64, 1)\n res2b_feats = self.basic_block2(res2a_feats, 'res2b', 'bn2b', is_train, use_batch_norm, 64)\n res2c_feats = self.basic_block2(res2b_feats, 'res2c', 'bn2c', is_train, use_batch_norm, 64)\n \n res3a_feats = self.basic_block(res2c_feats, 'res3a', 'bn3a', is_train, use_batch_norm, 128) \n temp = res3a_feats\n for i in range(1, 4):\n temp = self.basic_block2(temp, 'res3b'+str(i), 'bn3b'+str(i), is_train, use_batch_norm, 128)\n res3b3_feats = temp\n \n res4a_feats = self.basic_block(res3b3_feats, 'res4a', 'bn4a', is_train, use_batch_norm, 256)\n temp = res4a_feats\n for i in range(1, 23):\n temp = self.basic_block2(temp, 'res4b'+str(i), 'bn4b'+str(i), is_train, use_batch_norm, 256)\n res4b22_feats = temp\n\n res5a_feats = self.basic_block(res4b22_feats, 'res5a', 'bn5a', is_train, use_batch_norm, 512)\n res5b_feats = self.basic_block2(res5a_feats, 'res5b', 'bn5b', is_train, use_batch_norm, 512)\n res5c_feats = self.basic_block2(res5b_feats, 'res5c', 'bn5c', is_train, use_batch_norm, 512)\n\n res5c_feats_flat = tf.reshape(res5c_feats, [self.batch_size, 49, 2048])\n self.conv_feats = res5c_feats_flat\n self.conv_feat_shape = [49, 2048]\n self.num_ctx = 49 \n self.dim_ctx = 2048\n\n self.imgs = imgs\n self.is_train = is_train", "def build_cnn(input_var=None):\n\n # input layer\n network = lasagne.layers.InputLayer(\n shape=(\n None,\n 1,\n 128,\n 129\n ),\n input_var=input_var\n )\n\n # conv\n network = lasagne.layers.Conv2DLayer(\n lasagne.layers.batch_norm(network), # Batch norm on incoming\n num_filters=32, # Number of convolution filters to use\n filter_size=(5, 5),\n stride=(1, 1), # Stride fo (1,1)\n pad='same', # Keep output size same as input\n nonlinearity=lasagne.nonlinearities.leaky_rectify, #rectify, # ReLU\n W=lasagne.init.GlorotUniform() # W initialization\n )\n\n # conv\n #network = lasagne.layers.Conv2DLayer(\n #lasagne.layers.batch_norm(network), # Batch norm on incoming\n #num_filters=32, # Number of convolution filters to use\n #filter_size=(5, 5),\n #stride=(1, 1), # Stride fo (1,1)\n #pad='same', # Keep output size same as input\n #nonlinearity=lasagne.nonlinearities.leaky_rectify, #rectify, # ReLU\n #W=lasagne.init.GlorotUniform() # W initialization\n #)\n\n # pool (2x2 max pool)\n network = lasagne.layers.MaxPool2DLayer(\n network, pool_size=(2, 2)\n )\n\n # conv\n network = lasagne.layers.Conv2DLayer(\n lasagne.layers.batch_norm(network), # Batch norm on incoming\n num_filters=32, # Number of convolution filters to use\n filter_size=(3, 3),\n stride=(1, 1), # Stride fo (1,1)\n pad='same', # Keep output size same as input\n nonlinearity=lasagne.nonlinearities.leaky_rectify, #rectify, # ReLU\n W=lasagne.init.GlorotUniform() # W initialization\n )\n\n # conv\n #network = lasagne.layers.Conv2DLayer(\n #lasagne.layers.batch_norm(network), # Batch norm on incoming\n #num_filters=32, # Number of convolution filters to use\n #filter_size=(3, 3),\n #stride=(1, 1), # Stride fo (1,1)\n #pad='same', # Keep output size same as input\n #nonlinearity=lasagne.nonlinearities.leaky_rectify, #rectify, # ReLU\n #W=lasagne.init.GlorotUniform() # W initialization\n #)\n\n # pool (2x2 max pool)\n network = lasagne.layers.MaxPool2DLayer(\n network, pool_size=(2, 2)\n )\n\n # Fully-connected layer of 256 units with 50% dropout on its inputs\n network = lasagne.layers.DenseLayer(\n lasagne.layers.dropout(network, p=.5),\n num_units=256,\n nonlinearity=lasagne.nonlinearities.leaky_rectify, #rectify, # ReLU\n W=lasagne.init.HeUniform() # W initialization\n )\n\n # Finally add a 1-unit softmax output layer\n network = lasagne.layers.DenseLayer(\n network,\n num_units=1,\n nonlinearity=lasagne.nonlinearities.sigmoid\n )\n\n return network", "def __call__(self, x_input):\n reuse = True if self.built else None\n net = load_kaffe_model(self.model_name, x_input, reuse=reuse)\n self.built = True\n self.net = net\n #output = end_points['alexnet_v2/fc8']\n # Strip off the extra reshape op at the output\n output = self.net.get_output()\n probs = output.op.inputs[0]\n return probs", "def __init__(self, n_input, n_output, hidden_layer_size, reg):\n self.reg = reg\n self.fulllayer1 = FullyConnectedLayer(n_input, hidden_layer_size)\n self.reglayer1 = ReLULayer()\n self.fulllayer2 = FullyConnectedLayer(hidden_layer_size, n_output)", "def __init__(self, kernel_size, filters, stage, block):\n super().__init__(name='identity' + str(stage) + block)\n filters1, filters2, filters3 = filters\n if K.image_data_format() == 'channels_last':\n bn_axis = 3\n else:\n bn_axis = 1\n\n conv_name_base = 'res' + str(stage) + block + '_branch'\n bn_name_base = 'bn' + str(stage) + block + '_branch'\n\n self.conv1 = layers.Conv2D(\n filters1, (1, 1),\n kernel_initializer='he_normal',\n name=conv_name_base + '2a')\n self.bn1 = layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')\n self.act1 = layers.Activation('relu')\n\n self.conv2 = layers.Conv2D(\n filters2,\n kernel_size,\n padding='same',\n kernel_initializer='he_normal',\n name=conv_name_base + '2b')\n self.bn2 = layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')\n self.act2 = layers.Activation('relu')\n\n self.conv3 = layers.Conv2D(\n filters3, (1, 1),\n kernel_initializer='he_normal',\n name=conv_name_base + '2c')\n self.bn3 = layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2c')\n\n self.add = layers.Add()\n self.act = layers.Activation('relu')", "def Compute_EdgeBoxesAndCNN_features(demonet='res152',nms_thresh = 0.7,database='IconArt_v1',\n augmentation=False,L2 =False,\n saved='all',verbose=True,filesave='tfrecords',k_regions=300,\n testMode=False,plotProposedBoxes=False):\n\n path_data = '/media/gonthier/HDD/output_exp/ClassifPaintings/'\n path_imgs = path_data + 'EdgeBoxesIllust/'+database +'/'\n \n if plotProposedBoxes:\n print(\"We will only plot the regions of the EdgeBoxes with k_regions = \",k_regions,path_imgs)\n pathlib.Path(path_imgs).mkdir(parents=True, exist_ok=True) \n \n item_name,path_to_img,default_path_imdb,classes,ext,num_classes,str_val,df_label,\\\n path_data,Not_on_NicolasPC = get_database(database)\n \n if augmentation:\n raise NotImplementedError\n N = 50\n else: \n N=1\n if L2:\n raise NotImplementedError\n extL2 = '_L2'\n else:\n extL2 = ''\n if saved=='all':\n savedstr = '_all'\n elif saved=='fc7':\n savedstr = ''\n elif saved=='pool5':\n savedstr = '_pool5'\n \n tf.reset_default_graph() # Needed to use different nets one after the other\n if verbose: print('=== EdgeBoxes net',demonet,'database',database,' ===')\n \n if demonet=='res152':\n weights_path = '/media/gonthier/HDD/models/resnet152_weights_tf.h5'\n model = resnet_152_keras.resnet152_model_2048output(weights_path)\n num_features = 2048\n else:\n raise(NotImplementedError)\n tfconfig = tf.ConfigProto(allow_soft_placement=True)\n tfconfig.gpu_options.allow_growth=True\n # init session\n# sess = tf.Session(config=tfconfig)\n \n features_resnet_dict= {}\n \n sets = ['train','val','trainval','test']\n \n if filesave == 'pkl':\n name_pkl_all_features = path_data+'EdgeBoxes_'+ demonet +'_'+database+'_N'+str(N)+extL2+'_TLforMIL_nms_'+str(nms_thresh)+savedstr+'.pkl'\n pkl = open(name_pkl_all_features, 'wb')\n elif filesave =='tfrecords':\n if k_regions==300:\n k_per_bag_str = ''\n else:\n k_per_bag_str = '_k'+str(k_regions)\n dict_writers = {}\n for set_str in sets:\n name_pkl_all_features = path_data\n if testMode: name_pkl_all_features+= 'TestMode_'\n name_pkl_all_features += 'EdgeBoxes_'+ demonet +'_'+database+'_N'+str(N)+extL2+'_TLforMIL_nms_'+str(nms_thresh)+savedstr+k_per_bag_str+'_'+set_str+'.tfrecords'\n dict_writers[set_str] = tf.python_io.TFRecordWriter(name_pkl_all_features)\n \n model_edgeboxes = 'model/model.yml'\n print('Need of pip install opencv-contrib-python')\n edge_detection = cv2.ximgproc.createStructuredEdgeDetection(model_edgeboxes)\n \n number_of_regions = []\n Itera = 1000\n if testMode:\n Itera = 1\n for i,name_img in enumerate(df_label[item_name]):\n if testMode and i>1:\n break\n if filesave=='pkl':\n if not(k_regions==300):\n raise(NotImplementedError)\n if i%Itera==0:\n if verbose : print(i,name_img)\n if not(i==0):\n pickle.dump(features_resnet_dict,pkl) # Save the data\n features_resnet_dict= {}\n if database in ['IconArt_v1','VOC2007','clipart','comic','Paintings',\\\n 'watercolor','WikiTenLabels','MiniTrain_WikiTenLabels',\\\n 'WikiLabels1000training','CASPApaintings']:\n complet_name = path_to_img + name_img + '.jpg'\n elif database=='PeopleArt':\n complet_name = path_to_img + name_img\n name_sans_ext = os.path.splitext(name_img)[0]\n elif(database=='Wikidata_Paintings') or (database=='Wikidata_Paintings_miniset_verif'):\n name_sans_ext = os.path.splitext(name_img)[0]\n complet_name = path_to_img +name_sans_ext + '.jpg'\n\n if plotProposedBoxes:\n plot_im_withBoxes(complet_name,edge_detection,k_regions,path_imgs)\n list_im, rois = get_crops(complet_name,edge_detection,k_regions,demonet,augmentation=False)\n number_of_regions += [len(list_im)]\n fc7 = model.predict(list_im)\n # Need a BGR and between 0 and 255 minus the mean per color \n \n roi_scores = np.ones((len(list_im,)))\n# cls_score, cls_prob, bbox_pred, rois,roi_scores, fc7,pool5 = TL_im_detect(sess, net, im) # Arguments: im (ndarray): a color image in BGR order\n #features_resnet_dict[name_img] = fc7[np.concatenate(([0],np.random.randint(1,len(fc7),29))),:]\n if saved=='fc7':\n features_resnet_dict[name_img] = fc7\n# elif saved=='pool5':\n# features_resnet_dict[name_img] = pool5\n elif saved=='all':\n features_resnet_dict[name_img] = rois,roi_scores,fc7\n \n elif filesave=='tfrecords':\n if i%Itera==0:\n if verbose : print(i,name_img)\n if database in ['IconArt_v1','VOC2007','clipart','comic','Paintings','watercolor'\\\n ,'CASPApaintings','WikiTenLabels','MiniTrain_WikiTenLabels','WikiLabels1000training']:\n complet_name = path_to_img + name_img + '.jpg'\n name_sans_ext = name_img\n elif database=='PeopleArt':\n complet_name = path_to_img + name_img\n name_sans_ext = os.path.splitext(name_img)[0]\n elif(database=='Wikidata_Paintings') or (database=='Wikidata_Paintings_miniset_verif'):\n name_sans_ext = os.path.splitext(name_img)[0]\n complet_name = path_to_img +name_sans_ext + '.jpg'\n\n im = cv2.imread(complet_name)\n \n height = im.shape[0]\n width = im.shape[1]\n\n if plotProposedBoxes:\n plot_im_withBoxes(complet_name,edge_detection,k_regions,path_imgs)\n list_im, rois = get_crops(complet_name,edge_detection,k_regions,demonet,augmentation=False)\n # Boxes are x, y, w, h\n number_of_regions += [len(list_im)]\n fc7 = model.predict(list_im)\n roi_scores = np.ones((len(list_im,)))\n# cls_score, cls_prob, bbox_pred, rois,roi_scores, fc7,pool5 = TL_im_detect(sess, net, im) # Arguments: im (ndarray): a color image in BGR order\n \n if testMode:\n print('Image :',height,width)\n print('Normally ROI (x1,x2,y1,y2) :')\n print(rois)\n \n if(len(fc7) >= k_regions):\n rois = rois[0:k_regions,:]\n roi_scores =roi_scores[0:k_regions,]\n fc7 = fc7[0:k_regions,:]\n else:\n number_repeat = k_regions // len(fc7) +1\n f_repeat = np.repeat(fc7,number_repeat,axis=0)\n roi_scores_repeat = np.repeat(roi_scores,number_repeat,axis=0)\n rois_repeat = np.repeat(rois,number_repeat,axis=0)\n rois = rois_repeat[0:k_regions,:]\n roi_scores =roi_scores_repeat[0:k_regions,]\n fc7 = f_repeat[0:k_regions,:]\n num_regions = fc7.shape[0]\n num_features = fc7.shape[1]\n dim1_rois = rois.shape[1]\n classes_vectors = np.zeros((num_classes,1),dtype=np.float32)\n \n if database=='Paintings':\n for j in range(num_classes):\n if(classes[j] in df_label['classe'][i]):\n classes_vectors[j] = 1\n if database in ['VOC2007','clipart','watercolor','comic','PeopleArt','CASPApaintings']:\n for j in range(num_classes):\n value = int((int(df_label[classes[j]][i])+1.)/2.)\n # En fait ce qui se passe la c'est que tu rescale a la sauvage \n # entre 0 et 1 un truc qui peut etre entre 0 et 1 mais aussi entre -1 et 1\n # C'est chelou\n classes_vectors[j] = value\n if database in ['WikiTenLabels','MiniTrain_WikiTenLabels','WikiLabels1000training','IconArt_v1']:\n for j in range(num_classes):\n value = int(df_label[classes[j]][i])\n classes_vectors[j] = value\n \n #features_resnet_dict[name_img] = fc7[np.concatenate(([0],np.random.randint(1,len(fc7),29))),:]\n if saved=='fc7':\n print('It is possible that you need to replace _bytes_feature by _floats_feature in this function')\n print('!!!!!!!!!!!!!!!!!!!!!')\n raise(NotImplementedError)\n # TODO : modifier cela !\n features=tf.train.Features(feature={\n 'height': _int64_feature(height),\n 'width': _int64_feature(width),\n 'num_regions': _int64_feature(num_regions),\n 'num_features': _int64_feature(num_features),\n 'fc7': _bytes_feature(tf.compat.as_bytes(fc7.tostring())),\n 'label' : _bytes_feature(tf.compat.as_bytes(classes_vectors.tostring())),\n 'name_img' : _bytes_feature(str.encode(name_sans_ext))})\n elif saved=='pool5':\n raise(NotImplementedError)\n elif saved=='all':\n feature={\n 'height': _int64_feature(height),\n 'width': _int64_feature(width),\n 'num_regions': _int64_feature(num_regions),\n 'num_features': _int64_feature(num_features),\n 'dim1_rois': _int64_feature(dim1_rois),\n 'rois': _floats_feature(rois),\n 'roi_scores': _floats_feature(roi_scores),\n 'fc7': _floats_feature(fc7),\n 'label' : _floats_feature(classes_vectors),\n 'name_img' : _bytes_feature(str.encode(name_sans_ext))}\n features=tf.train.Features(feature=feature)\n example = tf.train.Example(features=features) \n# print(len(feature['rois']))\n if database=='VOC2007' or database=='PeopleArt':\n if (df_label.loc[df_label[item_name]==name_img]['set']=='train').any():\n dict_writers['train'].write(example.SerializeToString())\n dict_writers['trainval'].write(example.SerializeToString())\n elif (df_label.loc[df_label[item_name]==name_img]['set']=='val').any():\n dict_writers['val'].write(example.SerializeToString())\n dict_writers['trainval'].write(example.SerializeToString())\n elif (df_label.loc[df_label[item_name]==name_img]['set']=='test').any():\n dict_writers['test'].write(example.SerializeToString())\n if (database=='Wikidata_Paintings_miniset') or database=='Paintings':\n if (df_label.loc[df_label[item_name]==name_img]['set']=='train').any():\n dict_writers['train'].write(example.SerializeToString())\n dict_writers['trainval'].write(example.SerializeToString())\n elif (df_label.loc[df_label[item_name]==name_img]['set']=='validation').any():\n dict_writers['val'].write(example.SerializeToString())\n dict_writers['trainval'].write(example.SerializeToString())\n elif (df_label.loc[df_label[item_name]==name_img]['set']=='test').any():\n dict_writers['test'].write(example.SerializeToString())\n if database in ['IconArt_v1','watercolor','clipart','comic','WikiTenLabels',\\\n 'MiniTrain_WikiTenLabels','WikiLabels1000training','CASPApaintings']:\n if (df_label.loc[df_label[item_name]==name_img]['set']=='train').any():\n dict_writers['train'].write(example.SerializeToString())\n dict_writers['trainval'].write(example.SerializeToString())\n elif (df_label.loc[df_label[item_name]==name_img]['set']=='test').any():\n dict_writers['test'].write(example.SerializeToString())\n \n if filesave=='pkl':\n pickle.dump(features_resnet_dict,pkl)\n pkl.close()\n elif filesave=='tfrecords':\n for set_str in sets:\n dict_writers[set_str].close()\n \n print('Mean number of regions per image :',np.mean(number_of_regions),'with k max = ',k_regions)\n \n tf.reset_default_graph()\n \n if testMode:\n sets = ['train','test','trainval','val']\n dim_rois = 4\n for set_str in sets:\n name_pkl_all_features = path_data\n if testMode: name_pkl_all_features+= 'TestMode_'\n name_pkl_all_features += 'EdgeBoxes_'+ demonet +'_'+database+'_N'+str(N)+extL2+'_TLforMIL_nms_'+str(nms_thresh)+savedstr+k_per_bag_str+'_'+set_str+'.tfrecords'\n print(name_pkl_all_features)\n if set_str=='train':\n train_dataset = tf.data.TFRecordDataset(name_pkl_all_features)\n sess = tf.Session()\n train_dataset = train_dataset.map(lambda r: parser_w_rois_all_class(r, \\\n num_classes=num_classes,with_rois_scores=True,num_features=num_features,\n num_rois=k_regions,dim_rois=dim_rois))\n mini_batch_size = 1\n dataset_batch = train_dataset.batch(mini_batch_size)\n dataset_batch.cache()\n iterator = dataset_batch.make_one_shot_iterator()\n next_element = iterator.get_next()\n print(next_element)\n nx = sess.run(next_element)\n print(nx)\n name_img = nx[-1][0].decode('utf8')\n if database in ['IconArt_v1','VOC2007','clipart','comic','Paintings',\\\n 'watercolor','WikiTenLabels','MiniTrain_WikiTenLabels',\\\n 'WikiLabels1000training','CASPApaintings']:\n complet_name = path_to_img + name_img + '.jpg'\n name_sans_ext = name_img\n elif database=='PeopleArt':\n complet_name = path_to_img + name_img\n name_sans_ext = os.path.splitext(name_img)[0]\n elif(database=='Wikidata_Paintings') or (database=='Wikidata_Paintings_miniset_verif'):\n name_sans_ext = os.path.splitext(name_img)[0]\n complet_name = path_to_img +name_sans_ext + '.jpg'\n \n im = cv2.imread(complet_name)\n \n blobs, im_scales = get_blobs(im)\n dd = nx[1]/ im_scales[0] \n score = nx[2]\n roi = np.hstack((dd[0],score[0].reshape((-1,1))))\n \n \n class_name = ['']\n vis_detections_list(im, class_name, [roi])\n \n os.remove(name_pkl_all_features)", "def __init__(self, input_dim=(3, 32, 32), hidden_dims_CNN = ((32, 5, 1, 1), (2, 2, 2)),\n hidden_dims_FC = ((1024), (0.5)), num_classes=10, weight_scale=1e-3, \n reg=0.0, dtype=np.float32):\n self.params = {}\n self.fix_params = {}\n self.reg = reg\n self.dtype = dtype\n \n C_input, H_input, W_input = input_dim\n pre_C = C_input \n pre_H = H_input\n pre_W = W_input\n \n num_CNN = len(hidden_dims_CNN)\n num_FC = len(hidden_dims_FC)\n\n for i in range(0, num_CNN):\n W_name = \"W\" + str(i)\n b_name = \"b\" + str(i)\n conv_param_name = \"conv_param\" + str(i)\n gamma_name = \"gamma\" + str(i)\n beta_name = \"beta\" + str(i)\n bn_param_name = \"bn_param\" + str(i)\n pool_param_name = \"pool_param\" + str(i)\n\n if num_CNN == 1:\n num_filters, filter_size, stride, pad = hidden_dims_CNN[0] # (F, filter_size, stride, pad)\n pool_stride, pool_height, pool_width = hidden_dims_CNN[1] # (pooling_stride, pooling_size)\n else:\n num_filters, filter_size, stride, pad = hidden_dims_CNN[i][0] # (F, filter_size, stride, pad)\n pool_stride, pool_height, pool_width = hidden_dims_CNN[i][1] # (pooling_stride, pooling_size)\n \n if weight_scale == -1:\n self.params[W_name] = np.random.randn(num_filters, pre_C, filter_size, filter_size) / np.sqrt(filter_size * filter_size * pre_C)\n else: \n self.params[W_name] = np.random.randn(num_filters, pre_C, filter_size, filter_size) * weight_scale\n self.params[b_name] = np.zeros(num_filters)\n self.fix_params[conv_param_name] = {'stride': stride, 'pad': pad}\n \n self.params[gamma_name] = np.random.randn(num_filters)\n self.params[beta_name] = np.random.randn(num_filters)\n self.fix_params[bn_param_name] = {'mode': 'train'}\n\n self.fix_params[pool_param_name] = {'pool_height': pool_height, 'pool_width': pool_width, 'stride': pool_stride}\n \n pre_H, pre_W = cnn_out_shape(pre_H, pre_W, filter_size, filter_size, stride, pad)\n pre_C = num_filters \n pre_H, pre_W = pool_out_shape(pre_H, pre_W, pool_height, pool_width, pool_stride)\n\n pre_fc_dim = pre_H * pre_W * pre_C\n\n for i in range(0, num_FC):\n W_name = \"W\" + str(i + num_CNN)\n b_name = \"b\" + str(i + num_CNN)\n gamma_name = \"gamma\" + str(i + num_CNN)\n beta_name = \"beta\" + str(i + num_CNN)\n bn_param_name = \"bn_param\" + str(i + num_CNN)\n drop_name = \"drop_ratio\" + str(i + num_CNN)\n \n if num_FC == 1 :\n fc_num = hidden_dims_FC[0]\n drop_ratio = hidden_dims_FC[1]\n else:\n fc_num = hidden_dims_FC[i][0]\n drop_ratio = hidden_dims_FC[i][1]\n\n if weight_scale == -1:\n self.params[W_name] = np.random.randn(pre_fc_dim, fc_num) / np.sqrt(pre_fc_dim)\n else:\n self.params[W_name] = np.random.randn(pre_fc_dim, fc_num) * weight_scale\n self.params[b_name] = np.zeros(fc_num)\n\n self.params[gamma_name] = np.random.randn(fc_num)\n self.params[beta_name] = np.random.randn(fc_num)\n self.fix_params[bn_param_name] = {'mode': 'train'}\n\n self.fix_params[drop_name] = {'mode': 'train', 'p': drop_ratio}\n\n pre_fc_dim = fc_num\n\n total_layer = num_CNN + num_FC\n W_name = \"W\" + str(total_layer)\n b_name = \"b\" + str(total_layer)\n if weight_scale == -1:\n self.params[W_name] = np.random.randn(pre_fc_dim, num_classes) / np.sqrt(pre_fc_dim)\n else:\n self.params[W_name] = np.random.randn(pre_fc_dim, num_classes) * weight_scale\n self.params[b_name] = np.zeros(num_classes)\n\n\n self.num_CNN = num_CNN\n self.num_FC = num_FC\n self.total_layer = num_CNN + num_FC\n\n for k, v in self.params.iteritems():\n self.params[k] = v.astype(dtype)", "def net(self):\n model = self.get('forward','model')\n weights = self.get('forward','weights')\n return caffe.Net(model, weights, caffe.TEST)", "def __init__(self, h_units, weight_decay, dropout_rate):\n \n # inherit class constructor attributes from tf.keras.layers.Layer\n super(fc_block, self).__init__()\n \n # add dense layer attribute with L2 Regulariser\n self.dense = tf.keras.layers.Dense(h_units, use_bias = False, kernel_regularizer = \n tf.keras.regularizers.l2(l = weight_decay))\n \n # add batch norm layer attribute\n self.batch_norm = tf.keras.layers.BatchNormalization()", "def arch(self, nn) -> 'final node of the tensor flow graph (y_conv)':\n\n print(self)\n\n # first conv. layer \n # 5x5 filter, 1 input channel, 32 output channels\n W_conv1 = nn.weight_variable([5, 5, 1, 32])\n b_conv1 = nn.bias_variable([32])\n stride1 = 1\n h_conv1 = tf.nn.relu(nn.conv2d(nn.x_image, W_conv1, stride1) + b_conv1)\n \n # first pooling layer (2x2) \n h_pool1 = nn.max_pool_2x2(h_conv1)\n\n # second conv. layer \n # 5x5 filter, 32 input channel, 64 output channels\n W_conv2 = nn.weight_variable([5, 5, 32, 64])\n b_conv2 = nn.bias_variable([64])\n stride2 = 1\n h_conv2 = tf.nn.relu(nn.conv2d(h_pool1, W_conv2, stride2) + b_conv2)\n\n # second pooling layer (2x2) \n h_pool2 = nn.max_pool_2x2(h_conv2)\n\n # reshape (flatten) output\n h_pool2_flat = tf.reshape(h_pool2, [-1, 7*7*64])\n\n # first fully connected layer\n W_fc1 = nn.weight_variable([7 * 7 * 64, 1024])\n b_fc1 = nn.bias_variable([1024])\n h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)\n\n # dropout\n h_fc1_drop = tf.nn.dropout(h_fc1, nn.keep_prob)\n\n # second (final) fully connected layer (softmax)\n W_fc2 = nn.weight_variable([1024, 10])\n b_fc2 = nn.bias_variable([10])\n y_conv=tf.nn.softmax(tf.matmul(h_fc1_drop, W_fc2) + b_fc2)\n\n return y_conv", "def __init__(self, settings):\n super(CaffeNet, self).__init__(settings)\n\n self._range_scale = 1.0 # not needed; image already in [0,255]\n\n \n #ULF[todo]: explain, make this a setting\n self._net_channel_swap = (2,1,0)\n #self._net_channel_swap = None\n if self._net_channel_swap:\n self._net_channel_swap_inv = tuple([self._net_channel_swap.index(ii) for ii in range(len(self._net_channel_swap))])\n else:\n self._net_channel_swap_inv = None\n\n\n # (1) import caffe library\n #\n sys.path.insert(0, os.path.join(settings.caffevis_caffe_root, 'python'))\n import caffe\n print 'debug[caffe]: CaffeNet.__init__: using Caffe in', caffe.__file__\n\n # Check if the imported caffe provides all required functions\n self._check_caffe_version(caffe)\n \n # Set the mode to CPU or GPU.\n # Note: in the latest Caffe versions, there is one Caffe object\n # *per thread*, so the mode must be set per thread!\n # Here we set the mode for the main thread; it is also separately\n # set in CaffeProcThread.\n if settings.caffevis_mode_gpu:\n caffe.set_mode_gpu()\n print 'debug[caffe]: CaffeNet.__init__: CaffeVisApp mode (in main thread): GPU'\n else:\n caffe.set_mode_cpu()\n print 'debug[caffe]: CaffeNet.__init__: CaffeVisApp mode (in main thread): CPU'\n print 'debug[caffe]: CaffeNet.__init__: Loading the classifier (', settings.caffevis_deploy_prototxt, settings.caffevis_network_weights, ') ...'\n\n\n # (2) load the caffe model\n # \n # ULF[hack]: make Caffe silent - there should be a better\n # (i.e. official) way to do so. We only want to suppress\n # the info (like network topology) while still seeing warnings\n # and errors!\n suppress_output = (hasattr(self.settings, 'caffe_init_silent')\n and self.settings.caffe_init_silent)\n\n if suppress_output:\n # open 2 file descriptors\n null_fds = [os.open(os.devnull, os.O_RDWR) for x in xrange(2)]\n # save the current file descriptors to a tuple\n original_fds = os.dup(1), os.dup(2)\n # put /dev/null fds on stdout (1) and stderr (2)\n os.dup2(null_fds[0], 1)\n os.dup2(null_fds[1], 2)\n\n self.net = caffe.Classifier(\n settings.caffevis_deploy_prototxt,\n settings.caffevis_network_weights,\n mean = None, # Set to None for now, assign later # self._data_mean,\n channel_swap = self._net_channel_swap,\n raw_scale = self._range_scale,\n )\n \n if suppress_output:\n # restore file original descriptors for stdout (1) and stderr (2)\n os.dup2(original_fds[0], 1)\n os.dup2(original_fds[1], 2)\n # close the temporary file descriptors\n os.close(null_fds[0])\n os.close(null_fds[1])\n print 'debug[caffe]: CaffeNet.__init__: ... loading completed.'\n\n self._init_data_mean()\n self._check_force_backward_true()", "def __init__(self, num_input_channels, k_way, final_layer_size):\n super(MamlCNN, self).__init__()\n self.conv1 = conv_block(num_input_channels, 64)\n self.conv2 = conv_block(64, 64)\n self.conv3 = conv_block(64, 64)\n self.conv4 = conv_block(64, 64)\n self.conv5 = conv_block(64, 64)\n self.conv6 = conv_block(64, 64)\n\n self.logits = nn.Linear(final_layer_size, k_way)", "def _get_resnet_fc_layer(self):\n\t\tlayer_iterator = ww.WeightWatcher().make_layer_iterator(self.model)\n\t\tfc_layer= None\n\t\tfor ww_layer in layer_iterator:\n\t\t\tprint(ww_layer.name)\n\t\t\tif ww_layer.name=='fc':\n\t\t\t\tfc_layer = ww_layer\n\t\t\n\t\treturn fc_layer", "def __init__(self):\n torch.nn.Module.__init__(self)\n ######################### Convolution and pooling layers of VGG-16.\n self.features = torchvision.models.vgg16(pretrained=True).features # fine tune?\n self.features = torch.nn.Sequential(*list(self.features.children())\n [:-22]) # Remove pool2 and rest, lack of computational resource\n # No grad for convVGG\n # for param in self.features.parameters():\n # param.requires_grad = False\n\n #################### Channel Grouping Net\n # self.fc1_ = torch.nn.Linear(128, 128*16)#lack of resource\n # self.fc2_ = torch.nn.Linear(128, 128*16)\n # self.fc3_ = torch.nn.Linear(128, 128*16)\n #\n # torch.nn.init.kaiming_normal_(self.fc1_.weight.data, nonlinearity='relu')\n # if self.fc1_.bias is not None:\n # torch.nn.init.constant_(self.fc1_.bias.data, val=0) # fc层的bias进行constant初始化\n # torch.nn.init.kaiming_normal_(self.fc2_.weight.data, nonlinearity='relu')\n # if self.fc2_.bias is not None:\n # torch.nn.init.constant_(self.fc2_.bias.data, val=0) # fc层的bias进行constant初始化\n # torch.nn.init.kaiming_normal_(self.fc3_.weight.data, nonlinearity='relu')\n # if self.fc3_.bias is not None:\n # torch.nn.init.constant_(self.fc3_.bias.data, val=0) # fc层的bias进行constant初始化\n\n self.fc1 = torch.nn.Linear(128*28*28, 128)\n self.fc2 = torch.nn.Linear(128*28*28, 128)\n self.fc3 = torch.nn.Linear(128*28*28, 128)\n\n\n torch.nn.init.kaiming_normal_(self.fc1.weight.data, nonlinearity='relu')\n if self.fc1.bias is not None:\n torch.nn.init.constant_(self.fc1.bias.data, val=0) # fc层的bias进行constant初始化\n torch.nn.init.kaiming_normal_(self.fc2.weight.data, nonlinearity='relu')\n if self.fc2.bias is not None:\n torch.nn.init.constant_(self.fc2.bias.data, val=0) # fc层的bias进行constant初始化\n torch.nn.init.kaiming_normal_(self.fc3.weight.data, nonlinearity='relu')\n if self.fc3.bias is not None:\n torch.nn.init.constant_(self.fc3.bias.data, val=0) # fc层的bias进行constant初始化\n\n self.layerNorm=nn.LayerNorm([224,224])\n\n # global grad for hook\n self.image_reconstruction = None\n self.register_hooks()\n self.GradWeight=1e-1\n\n # ################### STN input N*3*448*448\n # self.localization = [\n # nn.Sequential(\n # nn.MaxPool2d(4,stride=4),#112\n # nn.ReLU(True),\n #\n # nn.Conv2d(3, 32, kernel_size=5,stride=1,padding=2), # 112\n # nn.MaxPool2d(2, stride=2), # 56\n # nn.ReLU(True),\n #\n # nn.Conv2d(32, 48, kernel_size=3,stride=1,padding=1),\n # nn.MaxPool2d(2, stride=2), # 56/2=28\n # nn.ReLU(True),\n #\n # nn.Conv2d(48, 64, kernel_size=3, stride=1, padding=1),\n # nn.MaxPool2d(2, stride=2), # 28/2=14\n # nn.ReLU(True) #output 64*14*14\n # ).cuda(),\n # nn.Sequential(\n # nn.MaxPool2d(4, stride=4), # 112\n # nn.ReLU(True),\n #\n # nn.Conv2d(3, 32, kernel_size=5, stride=1, padding=2), # 112\n # nn.MaxPool2d(2, stride=2), # 56\n # nn.ReLU(True),\n #\n # nn.Conv2d(32, 48, kernel_size=3, stride=1, padding=1),\n # nn.MaxPool2d(2, stride=2), # 56/2=28\n # nn.ReLU(True),\n #\n # nn.Conv2d(48, 64, kernel_size=3, stride=1, padding=1),\n # nn.MaxPool2d(2, stride=2), # 28/2=14\n # nn.ReLU(True) # output 64*14*14\n # ).cuda(),\n # nn.Sequential(\n # nn.MaxPool2d(4, stride=4), # 112\n # nn.ReLU(True),\n #\n # nn.Conv2d(3, 32, kernel_size=5, stride=1, padding=2), # 112\n # nn.MaxPool2d(2, stride=2), # 56\n # nn.ReLU(True),\n #\n # nn.Conv2d(32, 48, kernel_size=3, stride=1, padding=1),\n # nn.MaxPool2d(2, stride=2), # 56/2=28\n # nn.ReLU(True),\n #\n # nn.Conv2d(48, 64, kernel_size=3, stride=1, padding=1),\n # nn.MaxPool2d(2, stride=2), # 28/2=14\n # nn.ReLU(True) # output 64*14*14\n # ).cuda()\n # ]\n # # Regressor for the 3 * 2 affine matrix\n # self.fc_loc = [\n # nn.Sequential(\n # nn.Linear(64 * 14 * 14, 32),\n # nn.ReLU(True),\n # nn.Linear(32, 3 * 2)\n # ).cuda(),\n # nn.Sequential(\n # nn.Linear(64 * 14 * 14, 32),\n # nn.ReLU(True),\n # nn.Linear(32, 3 * 2)\n # ).cuda(),\n # nn.Sequential(\n # nn.Linear(64 * 14 * 14, 32),\n # nn.ReLU(True),\n # nn.Linear(32, 3 * 2)\n # ).cuda()\n # ]\n # # Initialize the weights/bias with identity transformation\n # for fc_locx in self.fc_loc:\n # fc_locx[2].weight.data.zero_()\n # fc_locx[2].bias.data.copy_(torch.tensor([1, 0, 0, 0, 1, 0], dtype=torch.float))\n\n ########################Bilinear CNN output 256 channels\n self.bcnnConv_1=torch.nn.Sequential(*list(torchvision.models.vgg16(pretrained=True).features.children())\n [:-1]) # Remove pool3 and rest.\n self.bcnnConv_2 = torch.nn.Sequential(*list(torchvision.models.vgg16(pretrained=True).features.children())\n [:-1]) # Remove pool3 and rest.\n self.bcnnConv_3 = torch.nn.Sequential(*list(torchvision.models.vgg16(pretrained=True).features.children())\n [:-1]) # Remove pool3 and rest.\n #BCNN Linear classifier.\n self.bfc1 = torch.nn.Linear(512*512, 200)\n self.bfc2 = torch.nn.Linear(512*512, 200)\n self.bfc3 = torch.nn.Linear(512*512, 200)\n torch.nn.init.kaiming_normal_(self.bfc1.weight.data) # 何凯明初始化\n if self.bfc1.bias is not None:\n torch.nn.init.constant_(self.bfc1.bias.data, val=0) # fc层的bias进行constant初始化\n torch.nn.init.kaiming_normal_(self.bfc2.weight.data) # 何凯明初始化\n if self.bfc2.bias is not None:\n torch.nn.init.constant_(self.bfc2.bias.data, val=0) # fc层的bias进行constant初始化\n torch.nn.init.kaiming_normal_(self.bfc3.weight.data) # 何凯明初始化\n if self.bfc3.bias is not None:\n torch.nn.init.constant_(self.bfc3.bias.data, val=0) # fc层的bias进行constant初始化\n\n # self.CBP1 = CompactBilinearPooling(512, 512, 50000)\n # self.CBP2 = CompactBilinearPooling(512, 512, 50000)\n # self.CBP3 = CompactBilinearPooling(512, 512, 50000)", "def __init__(self, input_dim=(3,32,32), num_filters=[32], hidden_layers=[100], \r\n num_classes=10 ,filter_size=7, weight_scale=1e-3, reg=0, dropout=0, \r\n use_batch_norm=False, dtype=np.float32):\r\n self.params={}\r\n self.use_dropout = dropout > 0\r\n self.use_batch_norm = use_batch_norm\r\n self.conv_params = {'stride': 1, 'pad': (filter_size - 1) // 2}\r\n self.pool_params = {'pool_height': 2, 'pool_width': 2, 'stride': 2}\r\n self.num_conv_layers = len(num_filters)\r\n self.num_hidden_layers = len(hidden_layers)\r\n self.bn_params = []\r\n self.dropout_params = []\r\n self.reg = reg\r\n\r\n # Initialize batch normalization parameters if necessary.\r\n num_layers = self.num_conv_layers + self.num_hidden_layers\r\n if self.use_batch_norm:\r\n for i in range(num_layers):\r\n self.bn_params.append({'mode':'train'})\r\n # Initialize dropout parameters if necessary\r\n if self.use_dropout:\r\n self.dropout_params = {'mode':'trian', 'p':dropout}\r\n\r\n C, H, W = input_dim\r\n channels, HH, WW = C, H, W\r\n # Initialize the parameters for the Convolutional network.\r\n for i in range(1, self.num_conv_layers+1):\r\n self.params['W{}'.format(i)] = np.random.randn(num_filters[i-1], \r\n channels, filter_size, \r\n filter_size) * weight_scale\r\n self.params['b{}'.format(i)] = np.zeros(num_filters[i-1])\r\n # Keeping track of the Height and Width of the image as we convolve\r\n # it through multiple layers. After pooling make sure the dimensions\r\n # make sense\r\n if (HH <= self.pool_params['pool_height']):\r\n raise Exception('The pool height and input height are equal'.\\\r\n format(self.pool_params['pool_height'], HH))\r\n else:\r\n HH = (HH - self.pool_params['pool_height']) / self.pool_params['stride'] + 1\r\n if (WW <= self.pool_params['pool_width']):\r\n raise Exception('The pool width and input width are equal'.\\\r\n format(self.params['pool_width'], WW))\r\n else:\r\n WW = (WW - self.pool_params['pool_width']) / self.pool_params['stride'] + 1\r\n\r\n\r\n # Updating the number of channels for the new input.\r\n channels = num_filters[i-1]\r\n # Initialize the parameters for the batch normalization if necessary.\r\n if self.use_batch_norm:\r\n self.params['gamma{}'.format(i)] = np.ones(channels)\r\n self.params['beta{}'.format(i)] = np.zeros(channels)\r\n\r\n # Initialize the parameters for the fully connected network.\r\n fc_input_dim = np.prod((HH, WW, channels))\r\n for i in range(1, self.num_hidden_layers+1):\r\n self.params['W{}'.format(i+self.num_conv_layers)] = np.random.randn(fc_input_dim, \r\n hidden_layers[i-1]) * weight_scale\r\n self.params['b{}'.format(i+self.num_conv_layers)] = np.zeros(hidden_layers[i-1])\r\n # Initialize the parameters for batch normalization if necessary.\r\n if self.use_batch_norm:\r\n self.params['gamma{}'.format(i+self.num_conv_layers)] = np.ones(hidden_layers[i-1])\r\n self.params['beta{}'.format(i+self.num_conv_layers)] = np.zeros(hidden_layers[i-1])\r\n fc_input_dim = hidden_layers[i-1]\r\n\r\n # Initialize the parameters for the last layer of the fully connected network.\r\n self.params['W{}'.format(i+self.num_conv_layers+1)] = np.random.randn(hidden_layers[i-1],\r\n num_classes) * weight_scale\r\n self.params['b{}'.format(i+self.num_conv_layers+1)] = np.zeros(num_classes)\r\n\r\n # Convert the dtype for the parameters of the model.\r\n for k, v in self.params.items():\r\n self.params[k] = v.astype(dtype)", "def __init__(self, num_features, lr, train_index2value, test_index2value, target_grp, optimizer, dataset_type, strength):\n super().__init__()\n\n #self.save_hyperparameters()\n\n self.lr = lr\n self.train_index2value = train_index2value\n self.test_index2value = test_index2value\n self.target_grp = target_grp\n self.optimizer = optimizer\n self.dataset_type = dataset_type\n\n if self.dataset_type == 'tabular':\n self.net = nn.Linear(num_features, 1)\n elif self.dataset_type == 'image':\n # construct network\n if strength == 'weak':\n self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=2, kernel_size=(3, 3)),\n nn.MaxPool2d(kernel_size=(2, 2)),\n nn.Flatten())\n self.fc = nn.Linear(338 + 1, 1)\n elif strength == 'normal':\n self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=32, kernel_size=(3, 3)),\n nn.MaxPool2d(kernel_size=(2, 2)),\n nn.Flatten())\n self.fc = nn.Linear(5408 + 1, 1)\n elif strength == 'strong':\n self.cnn = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=64, kernel_size=(3, 3)),\n nn.MaxPool2d(kernel_size=(2, 2)),\n nn.Flatten())\n self.fc = nn.Linear(10816 + 1, 1)\n else:\n raise Exception(\"Strength of the Adversary CNN not recognized!\")\n else:\n raise Exception(f\"Model was unable to recognize dataset type {self.dataset_type}!\")\n \n # init loss\n self.loss_fct = nn.BCEWithLogitsLoss()", "def __init__(self):\n super(DLStudio.ExperimentsWithCIFAR.Net2, self).__init__()\n self.relu = nn.ReLU()\n strides = []\n patch_size = 2\n ## conv1:\n out_ch, ker_size, conv_stride, pool_stride = 128,5,1,2\n self.conv1 = nn.Conv2d(3, out_ch, (ker_size,ker_size), padding=(ker_size-1)//2) \n self.pool1 = nn.MaxPool2d(patch_size, pool_stride) \n strides += (conv_stride, pool_stride)\n ## conv2:\n in_ch = out_ch\n out_ch, ker_size, conv_stride, pool_stride = 128,3,1,2\n self.conv2 = nn.Conv2d(in_ch, out_ch, ker_size, padding=(ker_size-1)//2)\n self.pool2 = nn.MaxPool2d(patch_size, pool_stride) \n strides += (conv_stride, pool_stride)\n ## conv3: \n ## meant for repeated invocation, must have same in_ch, out_ch and strides of 1\n in_ch = out_ch\n out_ch, ker_size, conv_stride, pool_stride = in_ch,2,1,1\n self.conv3 = nn.Conv2d(in_ch, out_ch, ker_size, padding=1)\n self.pool3 = nn.MaxPool2d(patch_size, pool_stride) \n# strides += (conv_stride, pool_stride)\n ## figure out the number of nodes needed for entry into fc:\n in_size_for_fc = out_ch * (32 // np.prod(strides)) ** 2 ## (A)\n self.in_size_for_fc = in_size_for_fc\n self.fc1 = nn.Linear(in_size_for_fc, 150)\n self.fc2 = nn.Linear(150, 100)\n self.fc3 = nn.Linear(100, 10)", "def arch(self, nn) -> 'final node of the tensor flow graph (y_conv)':\n\n print(self)\n\n # first conv. layer \n # 5x5 filter, 1 input channel, 32 output channels\n W_conv1 = nn.weight_variable([5, 5, 1, 32])\n b_conv1 = nn.bias_variable([32])\n stride1 = 1\n h_conv1 = tf.nn.relu(nn.conv2d(nn.x_image, W_conv1, stride1, 'VALID') + b_conv1) \n # outputs a 24x24x32 image\n \n # first pooling layer (2x2) \n h_pool1 = nn.max_pool_2x2(h_conv1) \n # outputs a 12x12x32 image\n\n # second conv. layer \n # 3x3 filter, 32 input channel, 32 output channels\n W_conv2 = nn.weight_variable([3, 3, 32, 32])\n b_conv2 = nn.bias_variable([32])\n stride2 = 1\n h_conv2 = tf.nn.relu(nn.conv2d(h_pool1, W_conv2, stride2, 'VALID') + b_conv2)\n # outputs a 10x10x32 image\n\n # third conv. layer\n # 3x3 filter, 32 input channel, 32 output channels\n W_conv3 = nn.weight_variable([3, 3, 32, 32])\n b_conv3 = nn.bias_variable([32])\n stride3 = 1\n h_conv3 = tf.nn.relu(nn.conv2d(h_conv2, W_conv3, stride3, 'VALID') + b_conv3)\n # outputs a 8x8x32 image\n\n # reshape (flatten) output\n h_conv3_flat = tf.reshape(h_conv3, [-1, 8*8*32])\n\n # first fully connected layer\n W_fc1 = nn.weight_variable([8 * 8 * 32, 1024])\n b_fc1 = nn.bias_variable([1024])\n h_fc1 = tf.nn.relu(tf.matmul(h_conv3_flat, W_fc1) + b_fc1)\n\n # dropout\n h_fc1_drop = tf.nn.dropout(h_fc1, nn.keep_prob)\n\n # second (final) fully connected layer (softmax)\n W_fc2 = nn.weight_variable([1024, 10])\n b_fc2 = nn.bias_variable([10])\n y_conv=tf.nn.softmax(tf.matmul(h_fc1_drop, W_fc2) + b_fc2)\n\n return y_conv", "def cnn_model_fn(features):\n print(\"features shape\", features.shape)\n\n input_layer = tf.reshape(features, [-1, 28, 28, 1])\n\n conv1 = tf.layers.conv2d(inputs=input_layer, filters=64, kernel_size=[3, 3], padding=\"same\", activation=tf.nn.relu)\n print(conv1)\n pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2, padding=\"same\")\n print(pool1)\n conv2 = tf.layers.conv2d(inputs=pool1, filters=128, kernel_size=[3, 3], padding=\"same\", activation=tf.nn.relu)\n print(conv2)\n pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2, padding=\"same\")\n print(pool2)\n conv3 = tf.layers.conv2d(inputs=pool2, filters=256, kernel_size=[3, 3], padding=\"same\", activation=tf.nn.relu)\n print(conv3)\n conv4 = tf.layers.conv2d(inputs=conv3, filters=256, kernel_size=[3, 3], padding=\"same\", activation=tf.nn.relu)\n print(conv4)\n pool3 = tf.layers.max_pooling2d(inputs=conv4, pool_size=[2, 2], strides=2, padding=\"same\")\n print(pool3)\n pool3_flat = tf.reshape(pool3, [-1, 4 * 4 * 256])\n print(pool3_flat)\n fc1 = tf.layers.dense(inputs=pool3_flat, units=1024, activation=tf.nn.relu)\n print(fc1)\n fc2 = tf.layers.dense(inputs=fc1, units=1024, activation=tf.nn.relu)\n print(fc2)\n fc2_bn = tf.nn.batch_normalization(x=fc2, mean=0, variance=1, scale=1, offset=0, variance_epsilon=1e-6)\n print(fc2_bn)\n fc3 = tf.layers.dense(inputs=fc2_bn, units=10)\n print(fc3)\n return fc3", "def build_network(self):\n\n input_placeholder = Input(shape = self.input_shape)\n\n # Stage 1\n x = self.main_path_block(\n input_placeholder,\n 64, (7, 7), 'same',\n 'conv1', 'bn_conv1',\n activation = 'relu',\n strides = (2, 2)\n )\n x = MaxPooling2D((3, 3), strides = (2, 2), padding = 'same')(x)\n\n # Stage 2\n x = self.identity_block(x, 64, 'relu', 2, 'a', False)\n x = self.identity_block(x, 64, 'relu', 2, 'b')\n\n # Stage 3\n x = self.convolutional_block(x, [128, 128, 128], 'relu', 3, 'a')\n x = self.identity_block(x, 128, 'relu', 3, 'b')\n\n # Stage 4\n x = self.convolutional_block(x, [256, 256, 256], 'relu', 4, 'a')\n x = self.identity_block(x, 256, 'relu', 4, 'b')\n\n # Stage 5\n x = self.convolutional_block(x, [512, 512, 512], 'relu', 5, 'a')\n x = self.identity_block(x, 512, 'relu', 4, 'b')\n\n # Fully Connected Layers\n x = BatchNormalization(axis = 3)(x)\n x = Activation('relu')(x)\n x = AveragePooling2D((2, 1), padding = 'valid', strides = (2, 2))(x)\n x = Flatten()(x)\n x = Dense(512)\n x = Dense(\n self.classes, activation = 'softmax',\n name = 'fc_' + str(self.classes),\n kernel_initializer = glorot_uniform(seed = 0)\n )(x)\n\n self.model = Model(input_placeholder, x, name = 'Resnet18')" ]
[ "0.63327324", "0.62027663", "0.61002606", "0.6070533", "0.59601736", "0.59473467", "0.5912948", "0.5911498", "0.58962667", "0.58913976", "0.58264315", "0.58224165", "0.5808085", "0.58010757", "0.5788575", "0.5781931", "0.5776973", "0.57699645", "0.57483983", "0.57286537", "0.57209235", "0.5720423", "0.5713747", "0.5705745", "0.5699857", "0.5698856", "0.56975317", "0.5687108", "0.5684361", "0.5680609", "0.5669319", "0.5654715", "0.56479096", "0.562241", "0.5620957", "0.5611294", "0.5607407", "0.5603152", "0.55951905", "0.5589841", "0.5588083", "0.55877286", "0.5586913", "0.55843574", "0.55815274", "0.55803883", "0.5574771", "0.5572783", "0.5568836", "0.5567572", "0.5562557", "0.55552006", "0.55534595", "0.55486876", "0.55485326", "0.5545483", "0.5541824", "0.5534615", "0.5533759", "0.5533114", "0.5530339", "0.5529269", "0.55229706", "0.5508254", "0.5494694", "0.5488905", "0.5488381", "0.5484103", "0.5482768", "0.5473985", "0.5470668", "0.54673684", "0.5467164", "0.5465541", "0.54633164", "0.5462976", "0.5462092", "0.54619277", "0.54606485", "0.5459439", "0.545723", "0.5454476", "0.5453386", "0.5453326", "0.5452251", "0.5448844", "0.5446767", "0.54407555", "0.54405594", "0.5438879", "0.54383546", "0.5434781", "0.54341054", "0.54307866", "0.5421651", "0.5420113", "0.5419239", "0.54173404", "0.5416654", "0.541537", "0.5412738" ]
0.0
-1
This is some other solution from codewars to test against.
def my_fizz_buzz_cuckoo_clock(time): hours, minutes = map(int, time.split(":")) sounds = {0: " ".join(["Cuckoo"] * (hours % 12 or 12)), 15: "Fizz Buzz", 30: "Cuckoo", 45: "Fizz Buzz"} if minutes in sounds: return sounds[minutes] return "Fizz" if minutes % 3 == 0 else "Buzz" if minutes % 5 == 0 else "tick"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exercise_b2_53():\r\n pass", "def exercise_b2_106():\r\n pass", "def exercise_b2_113():\r\n pass", "def exercise_b2_107():\r\n pass", "def test_4_4_1_1(self):\n pass", "def exercise_b2_69():\r\n pass", "def exercise_b2_52():\r\n pass", "def exercise_b2_56():\r\n pass", "def exercise_b2_27():\r\n pass", "def exercise_b2_39():\r\n pass", "def exercise_b2_26():\r\n pass", "def exercise_b2_98():\r\n pass", "def exercise_b2_82():\r\n pass", "def exercise_b2_70():\r\n pass", "def exercise_b2_43():\r\n pass", "def substantiate():", "def exercise_b2_93():\r\n pass", "def nulltest():", "def test_create_unexpected_problem(self):\n pass", "def solution(s):", "def think(s):", "def test_T4():", "def test_T4():", "def test_uparforvarg(self):", "def test_probabilistic_parsers():", "def testBeliefs1sk(self):", "def test_fix_code_typical_code():\r\n\r\n pass", "def check():", "def test_03_visit_special(self):", "def test_T01():", "def exercise_b2_95():\r\n pass", "def degibber(self):", "def exo2():", "def test_T0():", "def problem_298():\n pass", "def exercise_b2_86():\r\n pass", "def regular(self):", "def testBeliefs2sk(self):", "def _test(self):", "def _test(self):", "def _test(self):", "def _test(self):", "def _test(self):", "def __call__(self) -> None:", "def fn():", "def base():", "def _regr_basic():", "def test_T3():", "def test_T3():", "def unitary_test():", "def test_4():", "def test_replace_identity(self):\n pass", "def test_trailing_data(self):", "def test_string():", "def check(self) -> None:", "def exercise_2b():\n\n return", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def test_get_list8(self):\n pass", "def sth():", "def g():", "def test_optimize_parse():\n assert True", "def test_get_parts(self):\n pass", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def test_identical(self):\n write this test!", "def support(self):", "def util():\n pass", "def util():\n pass", "def testGeneratorType(self):", "def test_vargs(self):", "def func():", "def test_T2():", "def test_T2():", "def test_open_fill(self):", "def smarter():\r\n pass", "def simple():", "def simple():" ]
[ "0.63127756", "0.6088745", "0.607841", "0.60318285", "0.5995737", "0.5885328", "0.586407", "0.577447", "0.57653236", "0.57585347", "0.57469904", "0.57358444", "0.56477886", "0.564617", "0.5639498", "0.55612564", "0.55611384", "0.5469325", "0.5427286", "0.5421196", "0.5418454", "0.54146034", "0.54146034", "0.53854716", "0.53742695", "0.53642774", "0.5359028", "0.5356991", "0.5353228", "0.5324926", "0.531178", "0.52954376", "0.5285454", "0.5254097", "0.52339953", "0.51679623", "0.5158468", "0.5125574", "0.5102975", "0.5102975", "0.5102975", "0.5102975", "0.5102975", "0.5085822", "0.50804144", "0.5030538", "0.5017904", "0.5013748", "0.5013748", "0.50076914", "0.4999001", "0.49868155", "0.49683502", "0.49587232", "0.4952172", "0.49518982", "0.49493307", "0.49493307", "0.49493307", "0.49493307", "0.49493307", "0.49342734", "0.49335015", "0.49235168", "0.49233004", "0.4918259", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4901271", "0.4898787", "0.48978794", "0.48873872", "0.48873872", "0.48860157", "0.4877838", "0.4876113", "0.48728192", "0.48728192", "0.48700532", "0.48656595", "0.4864257", "0.4864257" ]
0.0
-1
The index view, for the home page. Shows Campaigns this UserProfile is in.
def index(request): context = dict() if request.user.is_authenticated(): context['campaigns'] = [ CampaignSerializer(c).serialize() for c in request.user.userprofile.campaigns.order_by('pk')] return render(request, 'voter_validation/index.html', context)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def index(request): \n \n all_projects = models.Project.objects.all()\n projects = get_objects_for_user(request.user, 'view_project', all_projects)\n \n fbads_settings = FacebookAdsSettings.objects.first()\n return render_to_response('index.html',{\n 'projects': projects, \n 'fbads_settings': fbads_settings},\n context_instance=RequestContext(request))", "def index(request):\n\n LOGGER.debug('socialprofile_demo.views.index')\n\n response_data = {}\n\n return render_to_response('index.html', response_data, context_instance=RequestContext(request))", "def index(request):\n return render(request, 'commissioned_sites/index.html',\n {'sites': CommissionedSite.objects.all().order_by('-date')},\n context_instance=RequestContext(request))", "def index(page):\r\n per_page = 24\r\n count = cached_users.get_total_users()\r\n accounts = cached_users.get_users_page(page, per_page)\r\n if not accounts and page != 1:\r\n abort(404)\r\n pagination = Pagination(page, per_page, count)\r\n if current_user.is_authenticated():\r\n user_id = current_user.id\r\n else:\r\n user_id = 'anonymous'\r\n top_users = cached_users.get_leaderboard(current_app.config['LEADERBOARD'],\r\n user_id)\r\n return render_template('account/index.html', accounts=accounts,\r\n total=count,\r\n top_users=top_users,\r\n title=\"Community\", pagination=pagination)", "def index(request):\n users = User.objects.filter(is_staff=False, is_active=True).order_by('username')\n return render(request, 'users/view_all_users.html',\n { 'users': users })", "def index():\r\n if current_user.is_authenticated():\r\n user_id = current_user.id\r\n else:\r\n user_id = 'anonymous'\r\n top_users = cached_users.get_leaderboard(current_app.config['LEADERBOARD'],\r\n user_id=user_id)\r\n\r\n return render_template('/stats/index.html', title=\"Community Leaderboard\",\r\n top_users=top_users)", "def index(request):\n home_user = request.user.profile\n \"\"\"num_activities = Activity.objects.count()\"\"\"\n Cactivity = CompletedActivity.objects.filter(user=home_user)\n UActivity = Cactivity.values('activity_id', 'activity__name', 'activity__value', 'activity__group').annotate \\\n (count=Count('activity__name'), earned=Sum('activity__value'))\n TimesCompelted = Cactivity.annotate(count=Count('activity__name'))\n # Generate counts of some of the main objects\n\n context = {\n 'huser': home_user,\n 'Lname' : home_user.user.last_name,\n 'Fname': home_user.user.first_name,\n 'num_activities': 1,\n 'activity_list' : UActivity,\n \"times_completed\" : TimesCompelted\n }\n\n # Render the HTML template index.html with the data in the context variable\n return render(request, 'index.html', context=context)", "def index(request):\n\t# Generate counts of some of the main objects\n\tnum_customers = Customers.objects.all().count()\n\tnum_instructors = Instructors.objects.all().count()\n\tnum_membership_plans = MembershipPlans.objects.count() # The 'all()' is implied by default.\n\n\tselectInstructors = SelectInstructors();\n\tjoinQuery = JoinQuery();\n\taggregationQuery = AggregationQuery();\n\tdivisionQuery = DivisionQuery();\n\tnestedAggregationQuery = NestedAggregationQuery();\n\tdeleteOperationCascade = DeleteOperationCascade();\n\tdeleteOperation = DeleteOperation();\n\tupdateNumberOfPeople = UpdateNumberOfPeople();\n\t# Render the HTML template index.html with the data in the context variable\n\treturn render(\n\t\trequest,\n\t\t'index.html',\n\t\tcontext={'num_membership_plans':num_membership_plans,'num_instructors':num_instructors,'num_customers':num_customers,'select_instructors':selectInstructors,'join_query':joinQuery,'aggregation_query':aggregationQuery,'division_query':divisionQuery,'nested_aggregation_query':nestedAggregationQuery,'delete_operation_cascade':deleteOperationCascade, 'delete_operation': deleteOperation, 'update_number_of_people': updateNumberOfPeople},\n\t)", "def index():\n user, user_id = get_user()\n # Get this user's course with their roles\n my_courses = []\n if user:\n my_courses = user.get_courses()\n # Get all public courses\n public_courses = Course.get_public()\n\n return render_template('courses/index.html',\n user=user,\n my_courses=my_courses,\n public_courses=public_courses)", "def index(request):\n params = get_user_profile_params(request)\n\n competition = Competition.get_active()\n params['top_competition_id'] = competition.id\n params['minify_js'] = settings.MINIFY_JS\n\n params['first_page_text'] = ''\n config = Config.objects.all()\n if config.count() > 0:\n params['first_page_text'] = config[0].first_page_text\n\n #order email test\n #order = Order.objects.get(pk=25)\n #send_order_email(order.email, order, order.items.all)\n\n return render(request, 'base.html', params)", "def index(request, extra_context={}, user=AnonymousUser()):\r\n\r\n # The course selection work is done in courseware.courses.\r\n domain = settings.FEATURES.get('FORCE_UNIVERSITY_DOMAIN') # normally False\r\n # do explicit check, because domain=None is valid\r\n if domain is False:\r\n domain = request.META.get('HTTP_HOST')\r\n\r\n courses = get_courses(user, domain=domain)\r\n courses = sort_by_announcement(courses)\r\n\r\n context = {'courses': courses}\r\n\r\n context.update(extra_context)\r\n return render_to_response('index.html', context)", "def index(request):\n if request.user.is_authenticated:\n return redirect('/dashboard')\n else:\n context = {'client_id': settings.OPENHUMANS_CLIENT_ID,\n 'oh_proj_page': settings.OH_ACTIVITY_PAGE}\n\n return render(request, 'main/index.html', context=context)", "def index():\n user_list = Users.query.all()\n return render_template('users/index.html'\n ,user_list=user_list\n ,t=t\n ,m=m)", "def index(request):\n # Generate counts of some of the main objects\n num_orgs=Organization.objects.count()\n num_contacts=Contact.objects.count()\n # Get the number of Volunteers\n num_volunteers=ContactTypeTag.objects.filter(tag_type__exact='vo').count()\n num_projects=Project.objects.count() # The 'all()' is implied by default.\n \n # Render the HTML template index.html with the data in the context variable\n return render(\n request,\n 'index.html',\n context={\n 'num_orgs':num_orgs,\n 'num_contacts':num_contacts,\n 'num_volunteers':num_volunteers,\n 'num_projects':num_projects,\n },\n )", "def list_campaigns(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), params=kwargs)", "def index(request):\n if request.user is None:\n return view_all(request, index_call=True)\n else:\n return mine(request)", "def index(request):\n\n dests = Destination.objects.all()\n\n return render(request,'index.html',{'dests': dests})", "def my_index(request):\n try:\n denied = models.ParticipantStatus.objects.get(codename=models.ParticipantStatus.DENIED)\n except:\n denied = -1\n\n competitions_im_creator_of = Competition.objects.filter(creator=request.user).order_by('-pk').select_related(\n 'creator').annotate(num_participants=Count('participants'))\n competitions_im_admin_of = Competition.objects.exclude(pk__in=[c.pk for c in competitions_im_creator_of]).filter(\n admins__in=[request.user]).order_by('-pk').select_related('creator').annotate(num_participants=Count('participants'))\n my_competitions = list(competitions_im_creator_of) + list(competitions_im_admin_of)\n\n # Invalid select related previously\n published_competitions = models.Competition.objects.filter(published=True).select_related('creator').annotate(num_participants=Count('participants'))\n published_competitions = reversed(sorted(published_competitions, key=lambda c: c.get_start_date))\n context_dict = {\n 'my_competitions': my_competitions,\n # Invalid select related previously\n 'competitions_im_in': list(request.user.participation.all().exclude(status=denied)),\n 'published_competitions': published_competitions,\n }\n return render(request, \"web/my/index.html\", context_dict)", "def contest_won_viewall(request):\n is_loggedin, username = get_session_variables(request)\n contest_list = Contest_won.objects.all()\n\n if contest_list:\t\n contest_participant_list = []\n for contest_won_obj in contest_list:\t\n c_id = contest_won_obj.contest_id\n c_p_objs = Contest_won_participant.objects. \\\n filter(contest_id = c_id)\n contest_participant_list.extend(c_p_objs)\n\n return render_to_response('achievement/contest_viewall.html', \\\n {'is_loggedin':is_loggedin, \\\n 'username':username, \\\n 'contest_list':contest_list, \\\n 'contest_participant_list':contest_participant_list}, \\\n RequestContext(request))\n else:\n return render_to_response('achievement/noview.html', \\\n {'is_loggedin':is_loggedin, \\\n 'username':username, \\\n 'type': 'Contest\\'s won'}, \\\n RequestContext(request))", "def all_accounts(request):\n accounts = Account.objects.all()\n return render(request, 'app/home.html', {'accounts': accounts})", "def get(self):\n query = Campaign.query\n return paginate(Campaign.__tablename__, query, self.schema), HTTPStatus.OK", "def index(request):\n\n context = {'employees': User.objects.select_related('profile').filter(is_staff=True).order_by('first_name')}\n return render(request, 'Employees/index.html', context)", "def all_users(request):\n # order users by last name\n users = UserProfile.objects.all().order_by('last_name')\n return render(request, \"allusers.html\", {'users': users})", "def show_campaigns(request, utm_campaign, **kwargs):\n \n err_msg = ''\n try:\n err_msg = str(kwargs['kwargs']['err_msg'])\n except:\n pass\n \n test_type_override = ''\n try:\n test_type_override = MySQLdb._mysql.escape_string(request.POST['test_type_override'])\n \n if test_type_override == 'Banner':\n test_type_var = FDH._TESTTYPE_BANNER_\n elif test_type_override == 'Landing Page':\n test_type_var = FDH._TESTTYPE_LP_\n elif test_type_override == 'Banner and LP':\n test_type_var = FDH._TESTTYPE_BANNER_LP_\n \n except:\n test_type_var = ''\n pass\n \n try:\n \"\"\" Find the earliest and latest page views for a given campaign \"\"\"\n lptl = DL.LandingPageTableLoader()\n ccrml = DL.CiviCRMLoader()\n \n start_time = ccrml.get_earliest_donation(utm_campaign)\n end_time = ccrml.get_latest_donation(utm_campaign)\n \n one_step = lptl.is_one_step(start_time, end_time, utm_campaign) \n \n if not(one_step): \n start_time = lptl.get_earliest_campaign_view(utm_campaign)\n end_time = lptl.get_latest_campaign_view(utm_campaign) \n\n interval = 1\n \n \"\"\" Create reporting object to retrieve campaign data and write plots to image repo on disk \"\"\"\n ir = DR.IntervalReporting(was_run=False, use_labels=False, font_size=20, plot_type='line', query_type='campaign', file_path=projSet.__web_home__ + 'campaigns/static/images/')\n \n \"\"\" Produce analysis on the campaign view data \"\"\" \n ir.run(start_time, end_time, interval, 'views', utm_campaign, {}, one_step=one_step)\n \n \"\"\" \n ESTIMATE THE START AND END TIME OF THE CAMPAIGN\n ===============================================\n \n Search for the first instance when more than 10 views are observed over a sampling period\n \"\"\"\n \n col_names = ir._data_loader_.get_column_names()\n \n views_index = col_names.index('views')\n ts_index = col_names.index('ts')\n \n row_list = list(ir._data_loader_._results_) # copy the query results\n for row in row_list:\n if row[views_index] > 100:\n start_time_est = row[ts_index]\n break\n row_list.reverse()\n for row in row_list:\n if row[views_index] > 100:\n end_time_est = row[ts_index]\n break\n \n \n \"\"\"\n BUILD THE VISUALIZATION FOR THE TEST VIEWS OF THIS CAMAPAIGN\n ============================================================ \n \"\"\"\n \n \"\"\" Read the test name \"\"\"\n ttl = DL.TestTableLoader()\n row = ttl.get_test_row(utm_campaign)\n test_name = ttl.get_test_field(row ,'test_name')\n \n \"\"\" Regenerate the data using the estimated start and end times \"\"\"\n ir = DR.IntervalReporting(was_run=False, use_labels=False, font_size=20, plot_type='line', query_type='campaign', file_path=projSet.__web_home__ + 'campaigns/static/images/')\n ir.run(start_time_est, end_time_est, interval, 'views', utm_campaign, {}, one_step=one_step)\n \n \"\"\" Determine the type of test (if not overridden) and retrieve the artifacts \"\"\"\n test_type, artifact_name_list = FDH.get_test_type(utm_campaign, start_time, end_time, DL.CampaignReportingLoader(query_type=''), test_type_var)\n \n return render_to_response('campaigns/show_campaigns.html', {'utm_campaign' : utm_campaign, 'test_name' : test_name, 'start_time' : start_time_est, 'end_time' : end_time_est, 'one_step' : one_step, \\\n 'artifacts' : artifact_name_list, 'test_type' : test_type, 'err_msg' : err_msg}, context_instance=RequestContext(request)) \n\n except Exception as inst:\n \n logging.error('Failed to correctly produce campaign diagnostics.')\n logging.error(type(inst))\n logging.error(inst.args)\n logging.error(inst)\n \n \"\"\" Return to the index page with an error \"\"\"\n err_msg = 'There is insufficient data to analyze this campaign: %s. Check to see if the <a href=\"/LML/\">impressions have been loaded</a>. <br><br>ERROR:<br><br>%s' % (utm_campaign, inst.__str__())\n \n return index(request, kwargs={'err_msg' : err_msg})", "def view(args):\n if args.available:\n printAvailableCampaigns()\n if args.search_help:\n print(getSearchQueryHelp())", "def get_list_of_campaigns(self, limit=0, offset=0):\n logger.info(\"Function call: get_list_of_campaigns\")\n return self.__handle_result(self.__send_request('campaigns', 'GET', {'limit': limit or 0, 'offset': offset or 0}))", "def index(request):\n\n chats = Chat.objects.all().order_by('-created_at')\n\n if request.user.is_authenticated():\n chats = chats.filter(friend_groups__in=request.user.get_profile().\\\n friend_groups.all().values_list('id'))\n else:\n chats = chats.filter(friend_groups__isnull=True)\n\n return render_to_response('index.html', {\n 'chats': chats[:10],\n }, context_instance=RequestContext(request))", "def dashboard(request):\r\n profile = get_object_or_404(Profile, user=request.user)\r\n wallet = Wallet.objects.get(user=request.user)\r\n history = History.objects.get(pk=1)\r\n referrals = Referral.objects.filter(referee=request.user).count()\r\n invoices = Invoice.objects.filter(issuer=request.user).count()\r\n return render(request, 'coin/dashboard.html', {'profile': profile, \r\n 'wallet': wallet, 'history': history, 'referrals': referrals, \r\n 'invoices': invoices})", "def my_dashboard(request):\n #Get the associated contact for our user\n user_con = request.user.contact\n qs_proj_assoc, qs_task_assoc = get_tiered_upcoming(user_con)\n\n #Get the projects associated with the user\n user_proj_table = table_proj.ProjectAssocAjaxTable(qs_proj_assoc)\n #Get the tasks associated with the user\n user_task_table = table_task.TaskAssocAjaxTable(qs_task_assoc)\n\n # Render the HTML template index.html with the data in the context variable\n return render(\n request,\n 'my_dashboard.html',\n context={\n 'user_con':user_con,\n 'user_proj_table':user_proj_table,\n 'user_task_table':user_task_table,\n 'project_source' : 'data-dashboard-project-upcoming',\n 'task_source' : 'data-dashboard-task-upcoming',\n 'input_id' : user_con.pk,\n 'print_url':reverse_lazy('my-dashboard-print'),\n },\n )", "def test_admin_sms_campaign_view_list(self):\n response = self.client.get('/admin/sms_module/smscampaign/')\n self.failUnlessEqual(response.status_code, 200)", "def index(request):\n posts = Post.objects.filter(status=1).order_by('-created_on')[:2]\n\n # Pre populate fields with profile information\n if request.user.is_authenticated:\n profile = UserProfile.objects.get(user=request.user)\n form = ContactForm(initial={\n 'contact_name': profile.default_full_name,\n 'contact_email': profile.default_email,\n })\n else:\n form = ContactForm()\n\n context = {\n 'posts': posts,\n 'form': form,\n }\n\n return render(request, 'home/index.html', context)", "def index(request):\n template_var = base_template_vals(request)\n up = UserProfile.objects.filter(django_user=request.user)\n template_var[\"likes\"] = Like.objects.filter(user=up[0])\n \n try:\n event_list = Event.objects.all().order_by('id')\n except Event.DoesNotExist:\n raise Http404\n output = ', '.join([e.title for e in event_list]) \n template_var[\"events\"] = output\t\n return render_to_response(\"event/index.html\", template_var,\n context_instance=RequestContext(request))", "def list_view(request):\n campgrounds = Campground.objects.all()\n return render(request, 'campgrounds.html')", "def index(request):\n template = 'index.html'\n hospitals = Hospital.objects.all()\n return render_to_response(template, {'hospitals': hospitals}, context_instance=RequestContext(request))", "def index(request):\n if _use_new_ui(request):\n return _serve_new_ui(request)\n\n if request.user is None:\n return view_all(request, index_call=True)\n else:\n return mine(request)", "def index():\n try:\n # Retrieve a list of active clients from the BancBox API for \n # the right side bar.\n active_clients = api.get_active_clients()\n except Exception, e:\n active_clients = []\n logger.error('Error retrieving active clients: %s', e)\n return render_template('index.html', active_clients=active_clients)", "def index():\n if (session_get_int(\"user_id\") is not None):\n return render_template(\"dashboard.html\")\n else:\n return render_template(\"index.html\")", "def index(request):\n # TODO: Use django.views.generic.ListView\n courses = Course.objects.order_by('-name')\n context = {\n 'courses': courses,\n }\n return render(request, 'hall_pass/index.html', context)", "def index(request):\n user_data_list = UserData.objects.all().order_by('environment')\n context = {'user_data_list': user_data_list}\n\n return render(request, 'index.html', context)", "def index(self):\n\n return render_template(\n 'home_page/index.html',\n **locals()\n )", "def all_users():\n\n users = crud.get_users()\n\n return render_template('all_users.html', users=users)", "def index(self):\n log.debug('index()')\n return redirect_to('/admin/dashboard')", "def achieve_viewall(request):\n is_loggedin, username = get_session_variables(request)\n contrib_list = []\n article_list = []\n gsoc_list = []\n speaker_list = []\n intern_list = []\n contest_participant_list = []\n icpc_participants_regional_list = []\n icpc_participants_final_list = []\n\n contrib_list_all = Contribution.objects.all()\n contrib_list = Contribution.objects.all()[:5]\n article_list = Article.objects.all()[:5]\n gsoc_list = Gsoc.objects.all()[:5]\n speaker_list = Speaker.objects.all()[:5]\n intern_list = Intern.objects.all()[:5]\n contest_list = Contest_won.objects.all()[:5]\n\n \n contrib_org = {}\n if contrib_list_all:\n for contrib in contrib_list_all:\n if contrib.org_name not in contrib_org.keys():\n contrib_org[contrib.org_name] = 0\n\n for contrib in contrib_list:\n contrib_org[contrib.org_name] += 1\n\n if contest_list:\t\n contest_participant_list = []\n\tfor contest_won_obj in contest_list:\t\n\t c_id = contest_won_obj.contest_id\n\t c_p_objs = Contest_won_participant.objects.filter(contest_id = c_id)\n\t contest_participant_list.extend(c_p_objs)\n \n icpc_list_regionals = ACM_ICPC_detail.objects.filter(level='regional').order_by('ranking')[:2]\n if icpc_list_regionals:\n for icpc_obj in icpc_list_regionals:\n team = icpc_obj.team_name\n member1 = [icpc_obj.participant1_name, \\\n get_username_from_email(icpc_obj.participant1_email)]\n\n member2 = [icpc_obj.participant2_name, \\\n get_username_from_email(icpc_obj.participant2_email)]\n\n member3 = [icpc_obj.participant3_name, \\\n get_username_from_email(icpc_obj.participant3_email)]\n\n icpc_participant_list = [icpc_obj, member1,member2,member3]\n icpc_participants_regional_list.append(icpc_participant_list)\n\n icpc_list_finals = ACM_ICPC_detail.objects.filter(level='finals').order_by('ranking')[:2]\n if icpc_list_finals:\n for icpc_obj in icpc_list_finals:\n team = icpc_obj.team_name\n member1 = [icpc_obj.participant1_name, \\\n get_username_from_email(icpc_obj.participant1_email)]\n\n member2 = [icpc_obj.participant2_name, \\\n get_username_from_email(icpc_obj.participant2_email)]\n\n member3 = [icpc_obj.participant3_name, \\\n get_username_from_email(icpc_obj.participant3_email)]\n\n icpc_participant_list = [icpc_obj, member1,member2,member3]\n icpc_participants_final_list.append(icpc_participant_list)\n\n return render_to_response('achievement/achievement_viewall.html',\\\n\t\t{'username':username, \\\n 'is_loggedin':is_loggedin, \\\n 'contrib_list':contrib_list, \\\n 'contrib_org':contrib_org,\\\n 'article_list':article_list, \\\n 'gsoc_list':gsoc_list, \\\n 'speaker_list':speaker_list, \\\n 'intern_list':intern_list, \\\n 'contest_list':contest_list, \\\n 'contest_participant_list':contest_participant_list, \\\n 'icpc_participants_final_list':icpc_participants_final_list, \\\n 'icpc_participants_regional_list':icpc_participants_regional_list}, \\\n RequestContext(request))", "def homepage(request):\n limit = 7\n featured_marketer_list = (\n PimpUser.objects\n .filter(featured=True)\n .filter(usertype=PimpUser.MARKETER)\n .order_by('-date_joined')[:limit]\n )\n featured_cause_list = (\n PimpUser.objects\n .filter(featured=True)\n .filter(usertype=PimpUser.CAUSE)\n .order_by('-date_joined')[:limit]\n )\n news_post_list = (\n NewsPost.objects\n .order_by('-created_at')[:3]\n )\n\n context = {\n 'featured_marketer_list': featured_marketer_list,\n 'featured_cause_list': featured_cause_list,\n 'news_post_list': news_post_list,\n }\n\n return render(\n request,\n 'index.html',\n context\n )", "def get_homepage():\n\n list_of_students = hackbright.list_all_students()\n list_of_projects = hackbright.list_all_projects()\n\n return render_template(\"index.html\",\n list_of_students=list_of_students,\n list_of_projects=list_of_projects)", "def index():\n\n if current_user.is_anonymous:\n flash(\"Login to add, edit and delete items\", category='warning')\n return render_template('index.html', title=\"index\")\n else:\n categories = Category.query.filter(\n current_user.id == Category.user_id).all()\n items = Item.query.filter(current_user.id == Item.user_id).all()\n return render_template(\n 'index.html',\n title=\"index\",\n categories=categories,\n items=items,\n category_id=None)", "def home_app():\n users = User.query.order_by(User.last_name, User.first_name).all()\n\n return render_template('index.html', users=users)", "def index(request):\n table = dynamodb.Table(table_name)\n sis_account_id = request.LTI[\"custom_canvas_account_sis_id\"]\n school_id = sis_account_id.split(\":\")[1]\n school_key = f'SCHOOL#{school_id.upper()}'\n query_params = {\n 'KeyConditionExpression': Key('pk').eq(school_key),\n 'ScanIndexForward': False,\n }\n logger.debug(f'Retrieving jobs for school {school_key}.')\n jobs_for_school = table.query(**query_params)['Items']\n\n # Update created_at (ISO8601) string timestamp to datetime.\n [item.update(created_at=parse_datetime(item['created_at']))\n for item in jobs_for_school]\n\n context = {\n 'jobs_for_school': jobs_for_school\n }\n logger.debug(f'Retrieved jobs for school {school_key}.', extra=context)\n return render(request, \"bulk_site_creator/index.html\", context=context)", "def index(request):\n # # Generate counts of some of the main objects\n # class_iter = Course.objects.all()\n # num_instances = BookInstance.objects.all().count()\n # # Available books (status = 'a')\n # num_instances_available = BookInstance.objects.filter(status__exact='a').count()\n # num_authors = Author.objects.count() # The 'all()' is implied by default.\n\n # Render the HTML template index.html with the data in the context variable\n cr=Course.objects.filter(ta={Ta.full_name})\n context = dict()\n # for item in class_iter:\n # context[str(item)] = item.cname\n\n return render(\n request,\n 'index.html',\n # context={'num_books': num_books, 'num_instances': num_instances,\n # 'num_instances_available': num_instances_available, 'num_authors': num_authors},\n context,\n )", "def icpc_viewall(request):\n is_loggedin, username = get_session_variables(request)\n icpc_participants_list = []\n p_list= []\n icpc_list = ACM_ICPC_detail.objects.all().order_by('ranking')\n if icpc_list:\n\n for icpc_obj in icpc_list:\n \n team = icpc_obj.team_name\n member1 = [icpc_obj.participant1_name, \\\n get_username_from_email(icpc_obj.participant1_email)]\n\n member2 = [icpc_obj.participant2_name, \\\n get_username_from_email(icpc_obj.participant2_email)]\n\n member3 = [icpc_obj.participant3_name, \\\n get_username_from_email(icpc_obj.participant3_email)]\n\n icpc_participant_list = [icpc_obj, member1,member2,member3]\n icpc_participants_list.append(icpc_participant_list)\n \n return render_to_response('achievement/icpc_viewall.html', \\\n {'is_loggedin':logged_in(request), \\\n 'username':username, \\\n 'icpc_list':icpc_list,\\\n 'icpc_participants_list':icpc_participants_list}, RequestContext(request))\n\n else:\n return render_to_response('achievement/noview.html', \\\n {'is_loggedin':logged_in(request), \\\n 'username':username, \\\n 'type': 'ACM ICPC Contest'}, \\\n RequestContext(request))", "def base_dashboard(request):\n appointments = None\n\n if request.user.userprofile.is_patient():\n appointments = Appointment.objects.filter(patient=request.user.id).order_by('date')\n elif request.user.userprofile.is_doctor():\n appointments = Appointment.objects.filter(doctor=request.user.id).order_by('date')\n else:\n appointments = Appointment.objects.all().order_by('date')\n\n return render(request, 'base_dashboard.html', {'appointments': appointments, 'the_user': request.user})", "def display_users(cls):\n return cls.user_list", "def dashboard():\n # TODO: Optionally, old proposals should be shown in a read-only mode.\n talks = Talk.query.current.filter(Talk.user == current_user)\n return render_template(\n 'profile/dashboard.html', talks=talks)", "def index(request):\n\n # Generate counts of some of the main objects\n num_blogs = Blog.objects.all().count()\n num_authors = BlogAuthor.objects.count()\n num_comments = BlogComment.objects.count()\n\n context = {\n 'num_blogs': num_blogs,\n 'num_authors': num_authors,\n 'num_comments': num_comments,\n }\n\n # Render the HTML template index.html with the data in the context variable\n return render(request, 'index.html', context=context)", "def index(request):\n # Generate counts of some of the main objects\n num_inventions = Invention.objects.all().count()\n inventions = Invention.objects.order_by('-date_of_invention')\n\n inventions_to_random = list(Invention.objects.all())\n random_invention = random.choice(inventions_to_random)\n\n context = {\n 'num_inventions': num_inventions,\n 'inventions': inventions,\n 'random_invention': random_invention\n }\n\n # Render the HTML template index.html with the data in the context variable\n return render(request, 'index.html', context=context)", "def index(request):\n return render(request, \"county/index.html\")", "def show_users():\n\n user = User(connection=connection, cursor=cursor)\n\n all_users = user.get_all_users()\n\n context = {\n 'all_users': all_users\n }\n\n return render_template('pages/tables/users.html', **context)", "def index(request):\n\tusuario = request.user\n\ttemplate = 'customerbuy/index.html'\n\treturn render_to_response(template,{'usuario': usuario},context_instance=RequestContext(request))", "def index(request):\n if request.user.is_authenticated:\n return HttpResponseRedirect(reverse('surveys-dashboard'))\n\n context = {}\n\n # Render the HTML template index.html with the data in the context variable\n return render(request, 'index.html', context=context)", "def list_users():\n\n db_users = User.query.all()\n\n return render_template(\"list_users.html\", headline=\"Blogly Users\", users=db_users)", "def index(request):\n warnings.warn(\n \"icekit_events.views.index is deprecated and will disappear in a \"\n \"future version. If you need this code, copy it into your project.\"\n , DeprecationWarning\n )\n\n occurrences = models.Occurrence.objects.visible()\n context = {\n 'occurrences': occurrences,\n }\n return TemplateResponse(request, 'icekit_events/index.html', context)", "def index(request):\n num_blogs = Blog.objects.all().count()\n num_authors = BlogAuthor.objects.all().count()\n num_unique_authors = Blog.objects.distinct().count()\n num_comments = Comment.objects.all().count()\n return render(request, 'index.html', context={'num_blogs' : num_blogs, 'num_authors' : num_authors,\n 'num_unique_authors' : num_unique_authors, 'num_comments' : num_comments})", "def index(self):\n\n\t\tself.db = DB()\n\t\tactivityTuple = self.db.select_all_from(\"activity\")[1]\n\t\ttmpl = lookup.get_template(\"index.html\")\n\t\treturn (tmpl.render(activity=activityTuple))", "def get(self, request):\n context = {}\n if request.user.is_authenticated:\n instagram_accounts = InstagramAccount.objects.filter(user_id=request.user.pk)\n context['instagram_accounts'] = instagram_accounts\n return render(request, 'home.html', context)", "def intern_viewall(request):\n is_loggedin, username = get_session_variables(request)\n intern_list = Intern.objects.all()\n\n if intern_list:\n return render_to_response('achievement/intern_viewall.html', \\\n {'is_loggedin':is_loggedin, \\\n 'username':username, \\\n 'intern_list':intern_list}, \\\n RequestContext(request))\n else:\n return render_to_response('achievement/noview.html', \\\n {'is_loggedin':is_loggedin, \\\n 'username':username, \\\n 'type': 'Internship'}, \\\n RequestContext(request))", "def index(self, page=1):\n users_obj = User.query.paginate(page)\n return render_template('item/list.html', items=users_obj)", "def users_page(request):", "def user(self):\n try:\n data_definitions = self.get_data_definitions(current_user.scheme_id)\n user_data = self.get_all_user_data(current_user.scheme_id, current_user.k_number)\n\n return render_template(\"user/dashboard_page.html\", title=\"Your Profile\", user_data=user_data)\n\n except Exception as e:\n self._log.exception(\"Could not execute get user logic\")\n return abort(500)", "def index(request):\n cat_filter = request.GET.get('category_filter')\n context = {\"cat_filter\": cat_filter,\n \"gifs_with_cats\": GifToUser.get_user_gifs_for_category(\n request.user, cat_filter=cat_filter),\n \"all_cats\": GifToUserCategory.get_user_categories(request.user)}\n template = loader.get_template('gifs/index.html')\n return HttpResponse(template.render(context, request))", "def list_users():\n users = User.query.order_by(User.last_name, User.first_name).all()\n return render_template('index.html', users=users)", "def index(request):\n # Generate counts of some of the main objects\n projects = Project.objects.all()\n projects = projects[0:4]\n press = Press.objects.all().count()\n # Available books (status = 'a')\n # num_instances_available=BookInstance.objects.filter(status__exact='a').count()\n # num_authors=Author.objects.count() # The 'all()' is implied by default.\n\n # Render the HTML template index.html with the data in the context variable\n\n all_services = Service.objects.all()\n services = all_services[0:4]\n\n return render(\n request,\n 'index.html',\n context={'projects': projects, 'press': press, 'services': services})", "def display_homepage(user_id):\n\n user_inv = (UserInv.query.filter_by(user_id=user_id)).all()\n\n return render_template('user-homepage.html',\n user_inv=user_inv)", "def index(request):\n\treturn render(request, 'Toeic/index.html')", "def index(request):\r\n assert isinstance(request, HttpRequest)\r\n return render(\r\n request,\r\n 'app/site/indexSite.html',\r\n context_instance=RequestContext(request,\r\n {\r\n 'title': 'Colmeia | Início',\r\n 'year': datetime.now().year,\r\n })\r\n )", "def gsoc_viewall(request):\n is_loggedin, username = get_session_variables(request)\n gsoc_list = Gsoc.objects.all()\n\n if gsoc_list:\n return render_to_response('achievement/gsoc_viewall.html', \\\n {'is_loggedin':is_loggedin, \\\n 'username':username, \\\n 'gsoc_list':gsoc_list}, \\\n RequestContext(request))\n else:\n return render_to_response('achievement/noview.html', \\\n {'is_loggedin':is_loggedin, \\\n 'username':username, \\\n 'type': 'Gsoc'}, \\\n RequestContext(request))", "def index_view(self) -> str:\n return render_template(\n \"index.html\",\n challenge_groups=self._challenge_groups,\n ctff=current_app,\n )", "def users_in_cohort(request, course_key, cohort_id):\r\n # this is a string when we get it here\r\n course_key = SlashSeparatedCourseKey.from_deprecated_string(course_key)\r\n\r\n get_course_with_access(request.user, 'staff', course_key)\r\n\r\n # this will error if called with a non-int cohort_id. That's ok--it\r\n # shoudn't happen for valid clients.\r\n cohort = cohorts.get_cohort_by_id(course_key, int(cohort_id))\r\n\r\n paginator = Paginator(cohort.users.all(), 100)\r\n page = request.GET.get('page')\r\n try:\r\n users = paginator.page(page)\r\n except PageNotAnInteger:\r\n # return the first page\r\n page = 1\r\n users = paginator.page(page)\r\n except EmptyPage:\r\n # Page is out of range. Return last page\r\n page = paginator.num_pages\r\n contacts = paginator.page(page)\r\n\r\n user_info = [{'username': u.username,\r\n 'email': u.email,\r\n 'name': '{0} {1}'.format(u.first_name, u.last_name)}\r\n for u in users]\r\n\r\n return json_http_response({'success': True,\r\n 'page': page,\r\n 'num_pages': paginator.num_pages,\r\n 'users': user_info})", "def user_home(request):\n # crop = Crop.objects.get(user=request.user)\n return render(request, 'user_home.html', locals())", "def index():\n # return render_template('index.html', events=get_calendar_events_today(CALENDAR_URL))\n return render_template('index.html', events=get_calendar_events_limit(CALENDAR_URL), events_sorted=True)", "def home(request):\n context = {\n 'tweets': Tweet.objects.all()\n }\n return render(request, 'index.html', context)", "def index(request):\r\n return render(request, 'team_tasks_managers/index.html')", "def index(request):\n\n # Generate counts of blog posts and blog authors\n\n num_blogs = Blog.objects.all().count()\n num_authors = BlogAuthor.objects.count()\n\n context = {\n 'num_blogs' : num_blogs,\n 'num_authors' : num_authors,\n }\n\n # Render the HTML template index.html with the data in the context variable\n return render(request, 'index.html', context = context)", "def all_memberships(request):\n memberships = Membership.objects.all()\n context = {\n 'memberships': memberships,\n }\n return render(request, \"memberships.html\", context)", "def user_home(request, user_name):\n query = User.objects.filter(username=user_name)\n if query.count() == 0:\n raise Http404(\"Can't find a user named: %s\" % user_name)\n else:\n user = query[0]\n if UserProfile.objects.filter(user=user):\n user_profile = UserProfile.objects.filter(user=user)[0]\n groups = get_user_groups(user)\n return render_to_response('user_home.html', locals())", "def get(self):\n\n usrs = get_mapviewers(24)\n for usr in usrs:\n logging.info(\"Mapviewer: \" + usr.loginuser);\n\n template_values = {\n 'mapviewers': usrs,\n }\n\n logging.info(\"Showusers visited.\")\n template = JINJA_ENVIRONMENT.get_template('showusers.html')\n self.response.write(template.render(template_values))", "def index(request):\n context = {'is_logged_in': request.user.is_authenticated}\n return render(request, 'sacms/index.html', context)", "def home(request):\n\n swms = Swms.objects.all()\n\n context = {\n 'swms': swms,\n }\n\n return render(request, 'dashboard.html', context)", "def get(self, request):\n cards = self.get_queryset().all()\n user = None\n auth = request.user.is_authenticated\n if auth:\n user = request.user\n return render(request, 'index/index.html', {\n 'cards': cards,\n 'user': user\n })", "def home():\r\n\r\n page = 1\r\n per_page = current_app.config.get('APPS_PER_PAGE')\r\n if per_page is None: # pragma: no cover\r\n per_page = 5\r\n d = {'featured': cached_apps.get_featured_front_page(),\r\n 'top_apps': cached_apps.get_top(),\r\n 'top_users': None}\r\n\r\n # Get all the categories with apps\r\n categories = cached_cat.get_used()\r\n d['categories'] = categories\r\n d['categories_apps'] = {}\r\n for c in categories:\r\n tmp_apps, count = cached_apps.get(c['short_name'], page, per_page)\r\n d['categories_apps'][str(c['short_name'])] = tmp_apps\r\n\r\n # Add featured\r\n tmp_apps, count = cached_apps.get_featured('featured', page, per_page)\r\n if count > 0:\r\n featured = model.category.Category(name='Featured', short_name='featured')\r\n d['categories'].insert(0,featured)\r\n d['categories_apps']['featured'] = tmp_apps\r\n\r\n if current_app.config['ENFORCE_PRIVACY'] and current_user.is_authenticated():\r\n if current_user.admin:\r\n d['top_users'] = cached_users.get_top()\r\n if not current_app.config['ENFORCE_PRIVACY']:\r\n d['top_users'] = cached_users.get_top()\r\n return render_template('/home/index.html', **d)", "def home(request):\n if not request.user.is_authenticated():\n soccer = Game.objects.filter(sport=Sport.objects.get(sport=\"soccer\"), active=\"true\")\n volleyball = Game.objects.filter(sport=Sport.objects.get(sport=\"volleyball\"), active=\"true\")\n baseball = Game.objects.filter(sport=Sport.objects.get(sport=\"baseball\"), active=\"true\")\n hockey = Game.objects.filter(sport=Sport.objects.get(sport=\"hockey\"), active=\"true\")\n basketball = Game.objects.filter(sport=Sport.objects.get(sport=\"basketball\"), active=\"true\")\n return render(request, 'index.html', {'soccer': soccer, 'hockey': hockey, 'basketball': basketball, 'baseball': baseball, 'volleyball': volleyball})\n\n if request.user.is_staff:\n return HttpResponseRedirect('/admin/')\n\n player = Player.objects.get(user=request.user)\n sports = player.sports.all()\n joined_games = player.game_set.all()\n my_games = Game.objects.filter(owner=request.user)\n profile_pic_url = player.image_url\n return render(request, 'profile.html', {'player': player, 'profile_pic_url': profile_pic_url, 'sports': sports, 'games': my_games | joined_games})", "def show():\n return render_template(\n 'listUsers.html',\n title='List Users',\n message='These are the users in our system'\n )", "def get_drip_campaigns(self):\n return list(DripCampaign.objects(user_id=self.user_id))", "def index():\n\n class_entry_relations = title_context\n\n\n default_classes = sorted(class_entry_relations.keys())\n default_values = class_entry_relations[default_classes[0]]\n return render_template('index.html',\n all_classes=default_classes,\n all_entries=default_values,\n len = len(default_values)\n\n )", "def index(request):\n\n\treturn render(request, 'index.html', {})\n\n\t# uncomment this line vvv and comment the above ^^^ line once we cut off scheduling\n\t#return render(request, 'cannot_schedule_anymore.html', {})", "def _get_campaigns(self, params):\n return self._api.account.get_campaigns(params={**params, **self._state_filter()}, fields=[self.state_pk])", "def index(request):\n\n # Generate counts of the main objects\n\n num_sales=Prodsale.objects.all().count()\n\n # Render the HTML template index.html with the data in the context variable\n\n return render(\n request, 'index.html',\n context={'num_sales':num_sales},\n )", "def index():\n entertainment_news = get_sources('entertainment')\n fashion_news = get_sources('fashion')\n title = 'Vnews'\n return render_template('index.html', title=title, fashion=fashion_news, entertainment=entertainment_news)", "def list_all(request):\n companies = Company.objects.order_by('-created')\n context = dict(companies=companies)\n return render(request, 'companies/all.html', context)", "def overview(request):\n LOGGER.info('Rendering WMT16 HIT overview for user \"{0}\".'.format(\n request.user.username or \"Anonymous\"))\n \n # Re-initialise random number generator.\n seed(None)\n \n # Collect available language pairs for the current user.\n language_codes = set([x[0] for x in LANGUAGE_PAIR_CHOICES])\n language_pairs = request.user.groups.filter(name__in=language_codes)\n \n # Collect available annotation projects for the current user.\n annotation_projects = request.user.project_set.all()\n \n hit_data = []\n total = [0, 0, 0]\n\n for language_pair in language_pairs:\n for annotation_project in annotation_projects:\n hit = _compute_next_task_for_user(request.user, annotation_project, language_pair)\n user_status = HIT.compute_status_for_user(request.user, annotation_project, language_pair)\n for i in range(3):\n total[i] = total[i] + user_status[i]\n \n if hit:\n # Convert status seconds back into datetime.time instances.\n for i in range(2):\n user_status[i+1] = seconds_to_timedelta(int(user_status[i+1]))\n \n hit_data.append(\n (hit.get_language_pair_display(), hit.get_absolute_url(),\n hit.hit_id, user_status, annotation_project)\n )\n \n # Convert total seconds back into datetime.timedelta instances.\n total[1] = seconds_to_timedelta(int(total[2]) / float(int(total[0]) or 1))\n \n # Remove microseconds to get a nicer timedelta rendering in templates.\n total[1] = total[1] - timedelta(microseconds=total[1].microseconds)\n \n total[2] = seconds_to_timedelta(int(total[2]))\n \n groups = _identify_groups_for_user(request.user)\n group = None\n if len(groups) > 1:\n LOGGER.debug(u'User \"{0}\" assigned to multiple annotation groups: {1}'.format(\n request.user.username or u'Anonymous',\n u', '.join([x.name for x in groups]))\n )\n group = groups[0]\n \n if group is not None:\n group_name = group.name\n group_status = HIT.compute_status_for_group(group)\n for i in range(2):\n group_status[i+1] = seconds_to_timedelta(int(group_status[i+1]))\n \n else:\n group_status = None\n group_name = None\n \n LOGGER.debug(u'\\n\\nHIT data for user \"{0}\":\\n\\n{1}\\n'.format(\n request.user.username or \"Anonymous\",\n u'\\n'.join([u'{0}\\t{1}\\t{2}\\t{3}'.format(*x) for x in hit_data])))\n\n # Compute admin URL for super users.\n admin_url = None\n if request.user.is_superuser:\n admin_url = reverse('admin:index')\n \n dictionary = {\n 'active_page': \"OVERVIEW\",\n 'hit_data': hit_data,\n 'total': total,\n 'group_name': group_name,\n 'group_status': group_status,\n 'admin_url': admin_url,\n 'title': 'WMT16 Dashboard',\n 'annotation_groups': [x.name for x in groups],\n }\n dictionary.update(BASE_CONTEXT)\n \n LOGGER.info(dictionary.values())\n \n return render(request, 'wmt16/overview.html', dictionary)", "def display_accounts(cls):\n return cls.account_list" ]
[ "0.6278449", "0.5947191", "0.57573825", "0.57182586", "0.5707867", "0.5703257", "0.56390655", "0.5605963", "0.55317235", "0.5515906", "0.5514333", "0.5508371", "0.54833335", "0.5471401", "0.5455781", "0.54364294", "0.5406502", "0.5397928", "0.53924406", "0.53866136", "0.5347947", "0.5315827", "0.5313521", "0.52813923", "0.5277998", "0.5271437", "0.5270981", "0.52542007", "0.5239706", "0.52305603", "0.5220847", "0.5212447", "0.5202224", "0.5152002", "0.5130391", "0.5114246", "0.5110644", "0.51042706", "0.5078861", "0.50786954", "0.50684065", "0.50637984", "0.5060998", "0.5036714", "0.50347584", "0.5017168", "0.5016778", "0.5013453", "0.5012359", "0.50019675", "0.50016207", "0.4991074", "0.49908188", "0.4982552", "0.49672452", "0.49614766", "0.4960523", "0.49542", "0.4951789", "0.49457088", "0.49293897", "0.49247098", "0.4924053", "0.49173525", "0.491699", "0.49165308", "0.49144363", "0.49106506", "0.4905267", "0.49042198", "0.4901741", "0.48987973", "0.48952028", "0.4895117", "0.48945037", "0.48913905", "0.48898765", "0.48866144", "0.4884839", "0.4883926", "0.4881192", "0.48805594", "0.487919", "0.48687667", "0.4864542", "0.48612395", "0.48556048", "0.48529562", "0.48457542", "0.48424256", "0.4840113", "0.48388907", "0.4833237", "0.48323575", "0.48300847", "0.4827285", "0.4827236", "0.4824291", "0.48181108", "0.4805671" ]
0.6828237
0
The logout view. Redirects to home page after.
def logout(request): if request.user.is_authenticated(): auth_logout(request) return HttpResponseRedirect(reverse("voter_validation:index"))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def logout_view(request):\n logout(request)\n return redirect('home')", "def logout_view(request):\n auth_logout(request)\n return redirect('home')", "def logout_view(request):\n \n logout(request)\n return HttpResponseRedirect(reverse(\"index\"))", "def logout_view(request):\n logout(request)\n return HttpResponseRedirect(reverse(\"index\"))", "def logout():\n logout_user()\n return redirect(url_for('default.home'))", "def logout_view(request):\n logout(request)\n return redirect('/')", "def logout_view(request):\n logout(request)\n return redirect('/')", "def logout_view(request):\n\n if request.user.is_authenticated:\n logout(request)\n\n return redirect(reverse('index_view'))", "def GET_logout(self):\r\n self.logout()\r\n return self.redirect('/')", "def logout_page(request):\n logout(request)\n return redirect('/')", "def logoutPage(request):\n\n logout(request)\n return redirect('index')", "def logout():\n logout_user()\n return redirect(url_for('home'))", "def logout_view(request):\n logout(request)\n return redirect('login')", "def logout_view(request):\n logout(request)\n return redirect('login')", "def logout(request):\n auth.logout(request)\n return redirect('index')", "def logout_view(request):\n\n\tlogout(request)\n\treturn HttpResponseRedirect('login')", "def logoutView(request):\n logout(request)\n return redirect('/login')", "def logout():\n do_logout()\n return redirect('/login')", "def logout():\n session_logout()\n return redirect(url_for(\"home\"))", "def logout_view(request):\n\n logout(request)\n return redirect('/login')", "def logout_view(request):\n\n logout(request)\n return redirect('/user/login/')", "def logout_redirect():\n login_session.clear()\n flash('You have logged out')\n return redirect(url_for('show_homepage'))", "def logout_view(request):\n logout(request)\n return HttpResponseRedirect('/')", "def logout_view(request):\n logout(request)\n return HttpResponseRedirect('/')", "def logout_view(request):\n logout(request)\n return HttpResponseRedirect(reverse('dashboard:login'))", "def logout_view(request):\n logout(request) # Logout request user\n messages.info(request, \"You have logged out successfully.\", fail_silently=False)\n return redirect(\"showLoginPage\")", "def logout():\n session.pop('logged_in', None)\n return redirect(url_for('home'))", "def logout():\n logout_user()\n return redirect(\"/\")", "def logout_page(request):\n logout(request)\n return HttpResponseRedirect('/')", "def logout(request):\n alogout(request)\n \n return HttpResponseRedirect(\"/\")", "def auth_logout(request):\n logout(request)\n return HttpResponseRedirect( reverse('startpage') )", "def logout(request):\n\tlogoutt(request)\n\treturn redirect(\"firstpage\")", "def logout():\r\n logout_user()\r\n flash('You were logged out.')\r\n return redirect(url_for('index'))", "def logout():\n session.clear()\n return redirect(url_for('index'))", "def logout():\n logout_user()\n return redirect(url_for('index'))", "def logout():\n logout_user()\n return redirect(url_for('index'))", "def logout():\n logout_user()\n return redirect(url_for('main.index'))", "def account_logout(request):\n logout(request)\n return redirect('/')", "def logout(request):\n auth.logout(request)\n return redirect(settings.SITE_URL)", "def logout():\n session.clear()\n return redirect(url_for(\"home\"))", "def logout():\n logout_user()\n return redirect(url_for('auth.index'))", "def logout():\n session.clear()\n return redirect(url_for('home'))", "def logout(request):\n auth.logout(request)\n messages.success(request, 'You are logged out!')\n return redirect(reverse('index'))", "def logout(request):\n auth.logout(request)\n messages.success(request, 'You have successfully been logged out.')\n return redirect(reverse('index'))", "def logout(request):\n auth.logout(request)\n messages.success(request, \"You have successfully been logged out.\")\n return redirect(reverse('index'))", "def logout(self):\n\n # remove session\n session.delete()\n\n return render('login.html')", "def logout(request):\n auth.logout(request)\n messages.success(request, \"You have successfully been logged out!\",\n extra_tags=\"alert-success\")\n return redirect(reverse('index'))", "def logout(self):\r\n session.clear()\r\n return redirect(\"/user/login\")", "def user_logout(request):\r\n logout(request)\r\n return redirect('accounts:login')", "def logout(request):\n auth.logout(request)\n messages.success(request, \"You have successfully been logged out\")\n return redirect('index')", "def logout(request):\n auth.logout(request)\n messages.success(request, 'You have successfully logged out')\n return redirect(reverse('index'))", "def logout_user(request):\n\tlogout(request)\n\treturn HttpResponseRedirect('/')", "def logout_view():\n return url(r'^logout/$', logout, {'template_name': 'miniuser/logout.html'}, name='logout')", "def logout(request):\n headers = forget(request)\n return HTTPFound(location=request.route_url('home_view'), headers=headers)", "def logout(request):\n auth.logout(request)\n messages.success(request, \"You have successfully been logged out\", extra_tags=\"alert-success\")\n return redirect(reverse('index'))", "def logout(self):\r\n # should redirect\r\n check_for_get_code(self, 302, reverse('logout'))", "def logout():\n\n do_logout()\n flash('successfully logged out')\n return redirect(\"/\")", "def logout(request):\n auth.logout(request)\n messages.success(request, \"You have logged out succesfully\")\n return redirect(reverse('index'))", "def logout():\n flash(_('You were logged out'))\n session.pop('user_id', None)\n return redirect(url_for('index'))\n #return redirect(url_for('public_timeline'))", "def logout():\n session.clear()\n return redirect(\"/\")", "def logout():\n\n logout_user()\n return redirect('/')", "def logout(request, error=None):\n auth_logout(request)\n return redirect('app:index')", "def logout():\n \n # using the method from the flask module\n logout_user()\n return redirect(url_for('home'))", "def logout(request):\n django_logout(request)#TODO log out from BASF too (!?)\n return HttpResponseRedirect(reverse_lazy(\"landing_page\"))", "def logout():\n\tsession.pop(\"username\", None)\n\treturn redirect(url_for(\"default\"))", "def s_logout(request):\n logout(request) # use django.contrib.auth.logout , clear all session , redirect to logout\n return redirect('/')", "def log_out(request):\n logout(request)\n return redirect('user_login')", "def logout(request):\n django_logout(request)\n return HttpResponseRedirect('/')", "def logout_student(request):\n logout(request)\n return HttpResponseRedirect(reverse('home'))", "def logout_view(request):\n logout(request)\n return render(request, 'woofer/index.html')", "def user_logout(request):\n\n # Since we know the user is logged in, we can now just log them out.\n\tlogout(request)\n\n # Take the user back to the homepage. \n\treturn HttpResponseRedirect(reverse('website:index'))", "def logout():\n # Log user out if they are authenticated\n if current_user.is_authenticated:\n logout_user()\n # Redirect to index page\n flash(\"Successfully logged out.\", category=\"success\")\n # Redirect back to index\n return redirect(url_for('main.index'))", "def logout():\n session.clear()\n return redirect(url_for(\"index\"))", "def logout():\n session.clear()\n return redirect(url_for(\"index\"))", "def logout():\n session.clear()\n return redirect(url_for(\"index\"))", "def logout():\n logout_user()\n return redirect(url_for(\".login\"))", "def logout():\n flash(u'Zostałeś wylogowany')\n session.pop('user_id', None)\n return redirect(url_for('index'))", "def logoutCurrentUser(request):\n logout(request)\n return redirect('indexPage')", "def logout_view():\n return url(r'^logout/$', LogoutView.as_view(template_name='miniuser/logout.html'), name='logout')", "def logout():\n session.pop(\"user\")\n return redirect(url_for(\"home\"))", "def logout():\n logout_user()\n flash(\"Successfully signed out\", category='info')\n return redirect(url_for('url.index'))", "def logout(request):\n auth_logout(request)\n messages.success(request, 'You are now logged out')\n return redirect('/')", "def logout():\n session.clear()\n return redirect(url_for('index'))", "def logout():\n session.pop('user', None)\n return redirect(url_for('index'))", "def logout():\n session.pop('user', None)\n return redirect(url_for('index'))", "def logout():\n logout_user()\n flash('Successfully logged out.')\n return redirect(request.referrer)", "def logout_view(request):\n auth.logout(request)\n messages.info(request, _('You have logged out successfully.'), extra_tags='alert alert-info')\n return render(request, 'logout_complete.html')", "def logout():\n\n session.clear()\n return redirect(url_for('index'))", "def logout():\n\n session.clear()\n return redirect(url_for('index'))", "def logoutuser(request):\n logout(request)\n return redirect('login')", "def logout():\n\n # remove the username from the session if it is there\n out_user = current_user.get_id()\n logout_user()\n logger.info(out_user + ' has been logged out.')\n return redirect(url_for('home'))", "def logout():\n\n if do_logout():\n flash('Logout Successful.', 'success')\n\n return redirect('/login')", "def logout():\n\n do_logout()\n flash(\"Successfully logged out\", \"success\")\n return redirect('/')", "def get(self, request):\n logout(request)\n return redirect(\"/\")", "def logout():\n session.clear()\n return redirect(\"/showlog\")", "def logout():\n\n do_logout()\n flash(f\"You are now logged out!\", \"success\")\n return redirect('/')", "def logout():\n logout_user()\n flash('You have successfully been logged out.')\n # redirect to the login page\n return redirect(url_for('view.login'))", "def logout(self) -> Union[redirect, HTMLBody]:\n\t\tif self.is_authorized:\n\t\t\tsession.clear()\n\t\t\tflash(\"Successfully logged out.\")\n\t\t\treturn render_template(\"index.jinja2\")\n\t\tflash(\"You must login before logging out.\")\n\t\treturn redirect(url_for(\"login\"))", "def logoutUser(request):\n\n logout(request)\n return redirect('loginpage')", "def logout():\n\n logout_user()\n return redirect(url_for('login'))" ]
[ "0.8744168", "0.8659496", "0.8525579", "0.8507581", "0.8489514", "0.8458301", "0.8458301", "0.84309167", "0.84134465", "0.83970946", "0.8394286", "0.8385555", "0.83608264", "0.83608264", "0.83596367", "0.8345407", "0.8336058", "0.83315647", "0.83303416", "0.8329741", "0.83283705", "0.8294551", "0.82840526", "0.82840526", "0.8275323", "0.8268875", "0.82398176", "0.82339036", "0.82318133", "0.8231378", "0.82286763", "0.8214554", "0.8197308", "0.8195826", "0.81866413", "0.81866413", "0.8184838", "0.8161194", "0.81581724", "0.8148838", "0.8138842", "0.81277066", "0.8126657", "0.81252337", "0.8124634", "0.81200767", "0.81151867", "0.8113337", "0.8111746", "0.8110318", "0.8108925", "0.8096077", "0.80949175", "0.8089595", "0.80841935", "0.80739665", "0.807221", "0.8058011", "0.8055578", "0.80489373", "0.8046632", "0.80448043", "0.8043686", "0.80324", "0.8019934", "0.8006556", "0.80038637", "0.80024755", "0.80017316", "0.79940003", "0.7988628", "0.79810876", "0.7976428", "0.7976428", "0.7976428", "0.7967457", "0.7967403", "0.7964069", "0.7951747", "0.7950239", "0.7940465", "0.7934086", "0.7930952", "0.79303294", "0.79303294", "0.7916134", "0.79136586", "0.79118305", "0.79118305", "0.7907741", "0.79055953", "0.79016", "0.7901307", "0.78922695", "0.78794503", "0.78630143", "0.78586775", "0.7846096", "0.78460824", "0.784526" ]
0.7844669
100
Shows validation UI for a given campaign, if this UserProfile is authorized to do data entry for the specified Campaign. This is also the endpoint for searching for Voters as part of validation. If doing a search, assume that a sufficient number of the specified fields is present (taken care of in frontend form validation).
def validate(request, campaign_id): if not request.user.userprofile.in_campaign(campaign_id): return HttpResponseRedirect(reverse("voter_validation:index")) campaign_id = int(campaign_id) campaign = get_object_or_404(Campaign, id=campaign_id) # Get the number of signatures validated by the current user for this # campaign, and also for the past 24 hours. val_sigs_set = ValidationRecord.objects.filter( validator=request.user.userprofile, campaign=campaign) val_sigs_24h = val_sigs_set.filter( last_updated__gte=datetime.now(SERVER_TIME_ZONE) - timedelta(hours=24)) context = { "campaign_name": campaign.name, "campaign_id": campaign_id, "val_sigs": val_sigs_set.count(), "val_sigs_24h": val_sigs_24h.count(), } # Search if specified in POST search = request.POST.get("search", "false") if search.lower() == "true": name = request.POST.get("name", None) address = request.POST.get("address", None) res_zip = request.POST.get("zip", None) # Pass in campaign_id so we can check the Voter was previously validated voters = voter_search(name, address, res_zip, campaign_id=campaign_id) context.update({ "name": name, "address": address, "zip": res_zip, "results": voters, }) return render(request, "voter_validation/validation.html", context)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def index(request):\n context = dict()\n if request.user.is_authenticated():\n context['campaigns'] = [\n CampaignSerializer(c).serialize() for c in\n request.user.userprofile.campaigns.order_by('pk')]\n return render(request, 'voter_validation/index.html', context)", "def view(args):\n if args.available:\n printAvailableCampaigns()\n if args.search_help:\n print(getSearchQueryHelp())", "def validate(self, data):\n company = data['company']\n if self.context['request'].user.has_perm(\"view_company\", company):\n return data\n else:\n raise PermissionDenied()", "def show_campaigns(request, utm_campaign, **kwargs):\n \n err_msg = ''\n try:\n err_msg = str(kwargs['kwargs']['err_msg'])\n except:\n pass\n \n test_type_override = ''\n try:\n test_type_override = MySQLdb._mysql.escape_string(request.POST['test_type_override'])\n \n if test_type_override == 'Banner':\n test_type_var = FDH._TESTTYPE_BANNER_\n elif test_type_override == 'Landing Page':\n test_type_var = FDH._TESTTYPE_LP_\n elif test_type_override == 'Banner and LP':\n test_type_var = FDH._TESTTYPE_BANNER_LP_\n \n except:\n test_type_var = ''\n pass\n \n try:\n \"\"\" Find the earliest and latest page views for a given campaign \"\"\"\n lptl = DL.LandingPageTableLoader()\n ccrml = DL.CiviCRMLoader()\n \n start_time = ccrml.get_earliest_donation(utm_campaign)\n end_time = ccrml.get_latest_donation(utm_campaign)\n \n one_step = lptl.is_one_step(start_time, end_time, utm_campaign) \n \n if not(one_step): \n start_time = lptl.get_earliest_campaign_view(utm_campaign)\n end_time = lptl.get_latest_campaign_view(utm_campaign) \n\n interval = 1\n \n \"\"\" Create reporting object to retrieve campaign data and write plots to image repo on disk \"\"\"\n ir = DR.IntervalReporting(was_run=False, use_labels=False, font_size=20, plot_type='line', query_type='campaign', file_path=projSet.__web_home__ + 'campaigns/static/images/')\n \n \"\"\" Produce analysis on the campaign view data \"\"\" \n ir.run(start_time, end_time, interval, 'views', utm_campaign, {}, one_step=one_step)\n \n \"\"\" \n ESTIMATE THE START AND END TIME OF THE CAMPAIGN\n ===============================================\n \n Search for the first instance when more than 10 views are observed over a sampling period\n \"\"\"\n \n col_names = ir._data_loader_.get_column_names()\n \n views_index = col_names.index('views')\n ts_index = col_names.index('ts')\n \n row_list = list(ir._data_loader_._results_) # copy the query results\n for row in row_list:\n if row[views_index] > 100:\n start_time_est = row[ts_index]\n break\n row_list.reverse()\n for row in row_list:\n if row[views_index] > 100:\n end_time_est = row[ts_index]\n break\n \n \n \"\"\"\n BUILD THE VISUALIZATION FOR THE TEST VIEWS OF THIS CAMAPAIGN\n ============================================================ \n \"\"\"\n \n \"\"\" Read the test name \"\"\"\n ttl = DL.TestTableLoader()\n row = ttl.get_test_row(utm_campaign)\n test_name = ttl.get_test_field(row ,'test_name')\n \n \"\"\" Regenerate the data using the estimated start and end times \"\"\"\n ir = DR.IntervalReporting(was_run=False, use_labels=False, font_size=20, plot_type='line', query_type='campaign', file_path=projSet.__web_home__ + 'campaigns/static/images/')\n ir.run(start_time_est, end_time_est, interval, 'views', utm_campaign, {}, one_step=one_step)\n \n \"\"\" Determine the type of test (if not overridden) and retrieve the artifacts \"\"\"\n test_type, artifact_name_list = FDH.get_test_type(utm_campaign, start_time, end_time, DL.CampaignReportingLoader(query_type=''), test_type_var)\n \n return render_to_response('campaigns/show_campaigns.html', {'utm_campaign' : utm_campaign, 'test_name' : test_name, 'start_time' : start_time_est, 'end_time' : end_time_est, 'one_step' : one_step, \\\n 'artifacts' : artifact_name_list, 'test_type' : test_type, 'err_msg' : err_msg}, context_instance=RequestContext(request)) \n\n except Exception as inst:\n \n logging.error('Failed to correctly produce campaign diagnostics.')\n logging.error(type(inst))\n logging.error(inst.args)\n logging.error(inst)\n \n \"\"\" Return to the index page with an error \"\"\"\n err_msg = 'There is insufficient data to analyze this campaign: %s. Check to see if the <a href=\"/LML/\">impressions have been loaded</a>. <br><br>ERROR:<br><br>%s' % (utm_campaign, inst.__str__())\n \n return index(request, kwargs={'err_msg' : err_msg})", "def validate(self, data):\n company = data['company']\n invoice = data.get(\"invoice\")\n if not self.context['request'].user.has_perm(\"view_company\", company) or not all(\n self.context['request'].user.has_perm(\"view_media\", media) for media in invoice):\n raise PermissionDenied()\n return data", "def validate(self, data):\n company = data['company']\n invoice = data.get(\"invoice\")\n if not self.context['request'].user.has_perm(\"view_company\", company) or not all(\n self.context['request'].user.has_perm(\"view_media\", media) for media in invoice):\n raise PermissionDenied()\n return data", "def campground_checker_view(request):\n # If POST request, retrieve data from API\n if request.method == 'POST':\n form = forms.CampgroundForm(request.POST)\n if form.is_valid():\n start_date = form.cleaned_data['start_date']\n end_date = form.cleaned_data['end_date']\n camp_ids = form.cleaned_data['camp_ids']\n camp_id_list = camp_ids.split()\n try:\n results, start_string, end_string = check.master_scraping_routine(camp_id_list, start_date, end_date)\n return render(request, 'availability_results.html', {'start_date': start_string,\n 'end_date': end_string,\n 'results': results})\n except:\n return render(request, 'no_results_found.html')\n else:\n return 'No success'\n # If GET or other type of request, load empty form\n else:\n form = forms.CampgroundForm()\n return render(request, 'availability.html', {'form': form})", "def validate_schema(self, data, **kwargs):\n if \"role\" not in data and \"visible\" not in data:\n raise ValidationError(_(\"Missing fields 'role' and/or 'visible'.\"))", "def __call__(self, data):\n data_combiner = DataCombiner(self.instance, data)\n company = data_combiner.get_value(self.company_field)\n contact = data_combiner.get_value(self.contact_field)\n\n if contact.company != company:\n raise ValidationError({\n self.contact_field: self.message,\n })", "def update_c_mandatory_fields(request, campaign_id):\n # print(request.POST)\n campaign = Campaign.objects.get(id=campaign_id)\n form = CampaignForm(request.POST, instance = campaign)\n # print(form)\n if form.is_valid():\n form.save()\n return redirect('add_campaign_spec', id=campaign_id)\n else:\n # return redirect('clonecampaign', id=campaign_id)\n print(form.errors)\n return redirect(reverse('edit_campaign', kwargs={'campaign_id':campaign_id}))", "def validate(self) -> bool:\n required = self.crud.validate(required=True)\n if required:\n raise ValueError(\n f\"Validation error. Required destination fields are not present in the crosswalk: {required}\"\n )", "def get_campaign_command(client: Client, campaign_id: str) -> CommandResults | str:\n try:\n raw_response = client.get_campaign(campaign_id)\n except ValueError:\n return 'Campaign Id not found'\n\n campaign_general_fields = ['id', 'name', 'description', 'startDate', 'notable']\n campaign_fields = ['families', 'techniques', 'actors', 'brands', 'malware']\n\n outputs = {}\n outputs['campaignMembers'] = dict_safe_get(raw_response, ['campaignMembers'])\n outputs['info'] = {key: value for key, value in raw_response.items() if key in campaign_general_fields}\n outputs.update({key: value for key, value in raw_response.items() if key in campaign_fields})\n fields_readable_output = \"\"\n for field in campaign_fields:\n fields_readable_output += \"\\n\" + tableToMarkdown(field.capitalize(),\n dict_safe_get(outputs, [field]), headers=['id', 'name'],\n headerTransform=pascalToSpace\n )\n\n campaign_info_output = tableToMarkdown('Campaign Information',\n outputs['info'],\n headers=['id', 'name', 'description', 'startDate', 'notable'],\n headerTransform=pascalToSpace\n )\n campaign_members_output = tableToMarkdown('Campaign Members',\n outputs['campaignMembers'],\n headers=['id', 'threat', 'type'],\n headerTransform=pascalToSpace\n )\n\n readable_output = campaign_info_output + \"\\n\" + campaign_members_output + fields_readable_output\n\n return CommandResults(\n readable_output=readable_output,\n outputs_prefix='Proofpoint.Campaign',\n outputs=outputs,\n outputs_key_field='id',\n raw_response=raw_response\n )", "def test_get_campaign_by_id_passes(self):\n response = self.client.get(f\"{self.endpoint_url}{self.test_campaign.id}/\")\n response_body = response.get_json()\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response_body, {\"id\": CAMPAIGN_ID, \"name\": CAMPAIGN_NAME})", "def validate(self, data):\n # if data['is_private'] and data['contestants']:\n # raise serializers.ValidationError(\"Can not be private and compete for an award.\")\n return data", "def validate(self):\n\n form = CallEventForm(self.data)\n if not form.is_valid():\n self.errors = form.errors\n map_dict_fields(self.errors, const.DB_FIELDS, const.API_FIELDS)", "def testGetCampaign(self):\n if self.__class__.campaign1 is None:\n self.testSaveCampaign()\n self.assert_(isinstance(self.__class__.service.GetCampaign(\n self.__class__.campaign1['id']), tuple))", "def run_validation(self, data=empty):\n\n if data is not empty:\n unknown = set(data) - set(self.fields)\n if unknown:\n errors = ['Unknown field: {}'.format(f) for f in unknown]\n raise ValidationError({api_settings.NON_FIELD_ERRORS_KEY: errors})\n return super().run_validation(data)", "def validate(self):\n return self.validator.validate(self.fields)", "def run_validation(self, data=empty):\n\n # no idea why there is no such built in feature in DRF\n if data is not empty:\n unknown = set(data) - set(self.fields)\n if unknown:\n errors = ['Unknown field: {}'.format(f) for f in unknown]\n raise ValidationError({api_settings.NON_FIELD_ERRORS_KEY: errors})\n return super().run_validation(data)", "def test_view_form_valid_sales_method(self, google):\n google.return_value = GeocoderMock()\n\n form_data = super(BaseSearchPageViewTestCase, self).get_data_sales()\n\n view = super(BaseSearchPageViewTestCase, self).initialize(BaseSearchPageView(), None)\n\n view.form_class = SearchForm\n\n form = SearchForm(form_data)\n\n form.is_valid()\n\n response = view.form_valid(form)\n\n (url, query) = super(BaseSearchPageViewTestCase, self).parse_url(response)\n\n form_data = super(BaseSearchPageViewTestCase, self).get_data_extra(form_data)\n\n for key, value in form_data.iteritems():\n self.assertTrue(key in query and query[key] == str(value))\n\n # Check we are dealing with a redirect and path as expected as sales/search\n self.assertIsInstance(response, HttpResponseRedirect)\n self.assertEqual(url.path, '/sales/search/')", "def send_validation_request(self):\r\n self.send_request(send_function=self._assemble_and_send_validation_request)", "def test_create_new_campaign_by_admin_passes(self):\n response = self.client.post(\n self.endpoint_url,\n json={\n \"logo\": None,\n \"name\": NEW_CAMPAIGN_NAME,\n \"organisations\": [self.test_org.id],\n \"url\": None,\n },\n headers={\"Authorization\": self.session_token},\n )\n response_body = response.get_json()\n self.assertEqual(response.status_code, 201)\n self.assertEqual(response_body, {\"campaignId\": 2})", "def test_update_existent_campaign_by_admin_passes(self):\n response = self.client.patch(\n f\"{self.endpoint_url}{self.test_campaign.id}/\",\n json={\n \"logo\": None,\n \"name\": NEW_CAMPAIGN_NAME,\n \"organisations\": [],\n \"url\": None,\n },\n headers={\"Authorization\": self.admin_token},\n )\n response_body = response.get_json()\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response_body[\"Success\"], \"Campaign 1 updated\")", "def test_update_existent_campaign_by_unauthenticated_user_fails(self):\n response = self.client.patch(\n f\"{self.endpoint_url}{self.test_campaign.id}/\",\n json={\n \"logo\": None,\n \"name\": NEW_CAMPAIGN_NAME,\n \"organisations\": [self.test_org.id],\n \"url\": None,\n },\n )\n self.assertEqual(response.status_code, 401)", "def show_validator(self, show_validator):\n\n self._show_validator = show_validator", "def show_validator(self, show_validator):\n\n self._show_validator = show_validator", "def cingValidation(self): \n \n self.cingRun()\n \n self.analyseCingResults()", "def check(self, args):\n self.parent.footer.set_text(\"Checking data...\")\n self.parent.refreshScreen()\n # Get field information\n responses = dict()\n\n for index, fieldname in enumerate(self.fields):\n if fieldname != \"blank\":\n responses[fieldname] = self.edits[index].get_edit_text()\n\n password = responses[\"FUEL_ACCESS/password\"]\n confirm_password = responses.pop(\"CONFIRM_PASSWORD\")\n\n if self.parent.save_only:\n return responses\n\n # Validate each field\n errors = []\n warnings = []\n\n # Passwords must match\n if password != confirm_password and \\\n password != self.defaults['FUEL_ACCESS/password']['value']:\n errors.append(\"Passwords do not match.\")\n\n # Password must not be empty\n if len(password) == 0:\n errors.append(\"Password must not be empty.\")\n\n # Password needs to be in ASCII character set\n try:\n if password.decode('ascii'):\n pass\n except UnicodeDecodeError:\n errors.append(\"Password contains non-ASCII characters.\")\n\n # Passwords should be at least 8 symbols\n if len(password) < 8:\n warnings.append(\"8 symbols\")\n\n # Passwords should contain at least one digit\n if re.search(r\"\\d\", password) is None:\n warnings.append(\"one digit\")\n\n if re.search(r\"[A-Z]\", password) is None:\n warnings.append(\"one uppercase letter\")\n\n if re.search(r\"[a-z]\", password) is None:\n warnings.append(\"one lowercase letter\")\n\n if re.search(r\"[!#$%&'()*+,-@./[\\\\\\]^_`{|}~\" + r'\"]', password) \\\n is None:\n warnings.append(\"one special character\")\n\n if len(errors) > 0:\n log.error(\"Errors: %s %s\" % (len(errors), errors))\n modulehelper.ModuleHelper.display_failed_check_dialog(self, errors)\n return False\n\n if len(warnings) > 0:\n self.parent.footer.set_text(\"Warning: Password should have \"\n \"at least %s.\" % (warnings[0]))\n else:\n self.parent.footer.set_text(\"No errors found.\")\n\n return responses", "def validator(self, *args, **kwargs):\n if 'framework_slug' not in kwargs:\n current_app.logger.error(\"Required parameter `framework_slug` is undefined for the calling view.\")\n abort(500, \"There was a problem accessing this page of your application. Please try again later.\")\n\n if current_user.is_authenticated and current_user.supplier_id:\n supplier_framework = self.data_api_client.get_supplier_framework_info(\n current_user.supplier_id, kwargs['framework_slug']\n )['frameworkInterest']\n\n if supplier_framework['applicationCompanyDetailsConfirmed'] is not True:\n return abort(400, \"You cannot access this part of your application until you have confirmed your \"\n \"company details.\")\n\n return True", "def validate_get_openings_result(self, result):\n\n\n requiredFields = {'company', 'title', 'url', 'locations'}\n #optionalFields = {'department', 'description'}\n\n for r in result:\n if not requiredFields.issubset(set(result.keys())):\n return False\n\n return True", "def validate(self, data: Dict[str, Any]):\n data[\"profile\"] = self.context.get(\"profile\")\n\n if data.get(\"separate_commission_for_seller_and_buyer\") or data.get(\"only_seller_commission\"):\n if not data.get(\"seller_commission_value\") or not data.get(\"seller_commission_applied_to\") or not data.get(\"seller_commission_type\"):\n raise serializers.ValidationError(texts.VALIDATION_DATA_FOR_COMMISSION_CALCULATION_REQUIRED.format(entity=\"Seller's\"))\n\n if data[\"seller_commission_type\"] == CommissionAndDiscountType.PERCENT and (not data.get(\"seller_price_for_commission\") or not data.get(\"seller_price_for_commission_applied_to\")):\n raise serializers.ValidationError(\n texts.VALIDATION_DATA_FOR_COMMISSION_CALCULATION_REQUIRED_WHEN_PERCENT.format(entity=\"Seller's\"))\n\n if data.get(\"separate_commission_for_seller_and_buyer\") or data.get(\"only_buyer_commission\"):\n if not data.get(\"buyer_commission_value\") or not data.get(\"buyer_commission_applied_to\") or not data.get(\"buyer_commission_type\"):\n raise serializers.ValidationError(texts.VALIDATION_DATA_FOR_COMMISSION_CALCULATION_REQUIRED.format(entity=\"Buyer's\"))\n\n if data[\"buyer_commission_type\"] == CommissionAndDiscountType.PERCENT and (not data.get(\"buyer_price_for_commission\") or not data.get(\"buyer_price_for_commission_applied_to\")):\n raise serializers.ValidationError(\n texts.VALIDATION_DATA_FOR_COMMISSION_CALCULATION_REQUIRED_WHEN_PERCENT.format(entity=\"Buyer's\"))\n\n if not data.get(\"separate_commission_for_seller_and_buyer\") and not data.get(\"only_seller_commission\") and not data.get(\"only_buyer_commission\"):\n if not data.get(\"seller_commission_value\") or not data.get(\"seller_commission_applied_to\") or not data.get(\"seller_commission_type\"):\n raise serializers.ValidationError(\n texts.VALIDATION_DATA_FOR_COMMISSION_CALCULATION_REQUIRED.format(entity=\"\"))\n\n if data[\"seller_commission_type\"] == CommissionAndDiscountType.PERCENT and (not data.get(\"seller_price_for_commission\") or not data.get(\"seller_price_for_commission_applied_to\")):\n raise serializers.ValidationError(\n texts.VALIDATION_DATA_FOR_COMMISSION_CALCULATION_REQUIRED_WHEN_PERCENT.format(entity=\"\"))\n\n return data", "def test_validate_when_user_found(self, view, mget_user):\n assert view.validate() is None", "def validate(self):\n if self.id == None:\n return False\n return self.validator.validate(self.fields)", "def contribute(request, campaign_id, template='campaign/campaign_contribution_form.html'):\r\n campaign = get_object_or_404(Campaign.objects.active(), pk=campaign_id)\r\n if not campaign.is_free:\r\n # Disable direct credit card based contribution\r\n request.user.message_set.create(message=_('That payment option is not available for this campaign.'))\r\n return HttpResponseRedirect(reverse('view_campaign', kwargs={'campaign_id':campaign_id}))\r\n err_msg = None\r\n try:\r\n qualifies, reasons = campaign.is_user_qualified(request.user)\r\n user_profile=request.user.get_profile()\r\n data = None\r\n if qualifies and request.user.first_name and request.user.last_name:\r\n # Skip the form and directly register this event attendee.\r\n data = {'first_name':request.user.first_name, \r\n 'last_name':request.user.last_name,\r\n 'birth_date':user_profile.birth_date}\r\n if data or request.method == 'POST':\r\n if request.method == 'POST':\r\n data = request.POST\r\n form = forms.DirectContributionForm(data=data, campaign=campaign, user_profile=user_profile)\r\n if form.is_valid():\r\n contribution = form.save(commit=True)\r\n _log.info('Contribution processed %s', contribution)\r\n if contribution.qty > 1:\r\n request.user.message_set.create(message=_('Your %s contributions totalling $.2f have been processed. Thank you.' % (contribution.qty, contribution.amount)))\r\n elif not campaign.is_free:\r\n request.user.message_set.create(message=_('Your contribution of $.2f has been processed. Thank you.' % contribution.amount))\r\n else:\r\n request.user.message_set.create(message=_('You have successfully joined this free campaign. Thank you.'))\r\n return HttpResponseRedirect(reverse('view_campaign', kwargs={'campaign_id':campaign_id}))\r\n else:\r\n form = forms.DirectContributionForm(campaign=campaign, user_profile=user_profile)\r\n ctx = {'campaign':campaign, 'c':campaign, 'form':form}\r\n except CampaignError, e:\r\n request.user.message_set.create(message=e.message)\r\n return HttpResponseRedirect(reverse('view_campaign', kwargs={'campaign_id':campaign.pk}))\r\n return render_view(request, template, ctx)", "def get_adcampaign(self, campaign_id, fields, batch=False):\n path = '%s' % campaign_id\n args = {'fields': fields}\n return self.make_request(path, 'GET', args, batch=batch)", "def test_context_data_with_valid_search_and_no_results(self):\n response = self.client.get(self.get_url(), {'description': 'test'})\n context = response.context\n self.assertIn('form', context)\n self.assertTrue(context['has_results'])\n self.assertIsInstance(context['results_table'], tables.SourceTraitTableFull)", "def check_for_required_fields(cls, fields=[], dataDict={}):\n\n validateRequired = Validate.required(fields=fields, dataDict=dataDict)\n if validateRequired['status'] == False:\n res = jsonify(\n {'status': 400, 'error': validateRequired['message'], 'data': []})\n return abort(make_response(res, 400))\n return True", "def test_get_existent_campaigns_returns_campaigns_list(self):\n test_campaign = return_canned_campaign()\n test_campaign.create()\n response = self.client.get(self.endpoint_url)\n response_body = response.get_json()\n self.assertEqual(response.status_code, 200)\n self.assertEqual(\n response_body, {\"campaigns\": [{\"id\": 1, \"name\": \"Test Campaign\"}]}\n )", "def validate_arguments(self, contest):\n #\n # validation if the contest argument\n self.validate_variable(Contest, contest)", "def test_admin_sms_campaign_view_list(self):\n response = self.client.get('/admin/sms_module/smscampaign/')\n self.failUnlessEqual(response.status_code, 200)", "def edit_campaigns(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), data=kwargs.pop('body'), params=kwargs)", "def test_request_form_successful(self):\n response = self.client.get(reverse(\n 'form', kwargs={'slug': self.agency.slug}))\n self.assertContains(response, self.agency.name)", "def get_campaign(self, campaignId, **kwargs) -> ApiResponse:\n return self._request(fill_query_params(kwargs.pop('path'), campaignId), params=kwargs)", "def view_edit_validate(request):\n template_file = \"validate_edit_or_validate.html\"\n # get all not validated curations\n curations = models.Curation.objects.\\\n filter(validated_by=None).\\\n order_by('created').all()\n template = {'curations': curations}\n return render_to_response(template_file, template,\n context_instance=RequestContext(request))", "def validate_data(self, data):\n try:\n event_id = data[\"event\"]\n guests = data[\"guests\"]\n section = data.get(\"section\", None)\n except KeyError:\n return False, \"\", \"\", \"\"\n try:\n event = Event.objects.get(pk=event_id)\n except Event.DoesNotExist:\n return False, \"\", \"\", \"\"\n if not isinstance(guests, list):\n return False, \"\", \"\", \"\"\n if section is not None:\n try:\n event.venue.sections.get(pk=section)\n except Section.DoesNotExist:\n return False, \"\", \"\", \"\"\n return True, event, guests, section", "def __call__(self, data=None):\n if not self.instance.assignees.count():\n raise ValidationError({\n 'assignees': [self.no_assignees_message],\n })\n\n if not self.instance.assignees.filter(is_lead=True).count():\n raise ValidationError({\n 'assignee_lead': [self.no_lead_assignee_message],\n })\n\n if not self.instance.assignees.aggregate(sum=models.Sum('estimated_time'))['sum']:\n raise ValidationError({\n 'assignee_time': [self.no_estimated_time_message],\n })", "def validate(self, visibility_field):\r\n if self.original_query.filter is wtypes.Unset:\r\n self.filter_expr = None\r\n else:\r\n self.filter_expr = json.loads(self.original_query.filter)\r\n self._validate_filter(self.filter_expr)\r\n self._replace_isotime_with_datetime(self.filter_expr)\r\n self._convert_operator_to_lower_case(self.filter_expr)\r\n self._normalize_field_names_for_db_model(self.filter_expr)\r\n\r\n self._force_visibility(visibility_field)\r\n\r\n if self.original_query.orderby is wtypes.Unset:\r\n self.orderby = None\r\n else:\r\n self.orderby = json.loads(self.original_query.orderby)\r\n self._validate_orderby(self.orderby)\r\n self._convert_orderby_to_lower_case(self.orderby)\r\n self._normalize_field_names_in_orderby(self.orderby)\r\n\r\n if self.original_query.limit is wtypes.Unset:\r\n self.limit = None\r\n else:\r\n self.limit = self.original_query.limit\r\n\r\n if self.limit is not None and self.limit <= 0:\r\n msg = _('Limit should be positive')\r\n raise ClientSideError(msg)", "def validate(self, data):\n request = self.context.get('request')\n data['poster'] = request.user\n\n return validate_complete_address(data)", "def validate(self):\r\n def validation_assert(predicate):\r\n \"\"\" Throw a ValidationError if false. \"\"\"\r\n if not predicate:\r\n raise ProfileDistribution.ValidationError()\r\n\r\n validation_assert(isinstance(self.feature, str))\r\n validation_assert(self.feature in DISPLAY_NAMES)\r\n validation_assert(isinstance(self.feature_display_name, str))\r\n validation_assert(self.type in ['EASY_CHOICE', 'OPEN_CHOICE'])\r\n validation_assert(isinstance(self.data, dict))\r\n if self.type == 'EASY_CHOICE':\r\n validation_assert(isinstance(self.choices_display_names, dict))", "def do_validate(self, request, _object):\n\n pass", "def validate_contact(data, err_obj):\n\n error_count = 0\n\n if not data.get('contact_type').strip():\n error_count += 1\n err_obj['contact_type'] = 'Contact Type Required'\n return\n\n phone_or_email_only = False\n if data.get('contact_type') == 'Phone/Email Only':\n phone_or_email_only = True\n\n if not data.get('contact_priority').strip():\n error_count += 1\n err_obj['contact_priority'] = 'Priority Required'\n\n if phone_or_email_only:\n if not data.get('contact_description'):\n err_obj['contact_description'] = 'Contact Description Required.'\n error_count += 1\n\n # if not data.get('contact_mobile').strip() and not data.get('contact_phone').strip() and not data.get('contact_email').strip() and not data.get('contact_description'):\n # error_count += 3\n # err_obj['contact_phone'] = 'Mobile or Phone or Email Required'\n # err_obj['contact_mobile'] = 'Mobile or Phone or Email Required'\n # err_obj['contact_email'] = 'Mobile or Phone or Email Required'\n\n else:\n if not data.get('contact_first_name').strip():\n error_count += 1\n err_obj['contact_first_name'] = 'Please enter a first name'\n\n if not data.get('contact_last_name').strip():\n error_count += 1\n err_obj['contact_last_name'] = 'Please enter a last name'\n\n if not data.get('contact_postcode').strip():\n error_count += 1\n err_obj['contact_postcode'] = 'Please enter a postcode'\n\n if not data.get('contact_address_1').strip():\n error_count += 1\n err_obj['contact_address_1'] = 'Please enter an address line 1'\n\n if not data.get('contact_mobile').strip() and not data.get('contact_phone').strip():\n error_count += 1\n err_obj['contact_phone'] = 'Mobile or Phone Required'\n err_obj['contact_mobile'] = 'Mobile or Phone Required'\n\n if data.get('contact_mobile').strip():\n if not validate_mobile_number(data['contact_mobile'], err_obj, 'contact_mobile'):\n error_count += 1\n\n if data.get('contact_phone').strip():\n if not validate_phone_number(data['contact_phone'], err_obj, 'contact_phone'):\n error_count += 1\n\n if data.get('contact_email').strip():\n if not validate_email_addr(data['contact_email'], err_obj, 'contact_email'):\n error_count += 1\n\n if error_count:\n return\n\n return True", "def test_create_new_campaign_by_unauthenticated_user_fails(self):\n response = self.client.post(\n self.endpoint_url,\n json={\n \"logo\": None,\n \"name\": CAMPAIGN_NAME,\n \"organisations\": [self.test_org.id],\n \"url\": None,\n },\n )\n self.assertEqual(response.status_code, 401)", "def get_campaign(self, campaign_id: str) -> dict:\n return self.http_request(\"GET\", f'/campaign/{campaign_id}')", "def campaign_id(request):\n\n user = None\n response = ApiJsonResponse()\n try:\n user = MyUser.objects.get(pk=request.user.pk)\n except ObjectDoesNotExist:\n return Response({\n \"msg\": _('MSG_USER_NOT_EXIST'),\n \"status\": 404\n }, status=404)\n try:\n company = Company.objects.get(owner=user)\n except:\n return Response({\n \"msg\": _('MSG_COMPANY_NOT_EXIST'),\n \"status\": 404\n }, status=404)\n try:\n promotions = Promotion.objects.filter(company=company)\n except ObjectDoesNotExist:\n response.set_data(\"[]\")\n response.set_result_code(200)\n response.set_result_msg(\"MSG_PROMOTIONS_NOT_FOUNDED\")\n return JsonResponse(response.get_dict())\n list_of_promotions = []\n for promotion in promotions:\n list_of_promotions.append({'name': promotion.campaign_name, 'id': promotion.pk})\n return Response({\n \"msg\": _('MSG_PROMOTION_FOUNDED'),\n \"list_of_promotions\": list_of_promotions,\n \"status\": 200\n }, status=200)", "def test_update_existent_campaign_by_non_admin_fails(self):\n response = self.client.patch(\n f\"{self.endpoint_url}{self.test_campaign.id}/\",\n json={\n \"logo\": None,\n \"name\": NEW_CAMPAIGN_NAME,\n \"organisations\": [self.test_org.id],\n \"url\": None,\n },\n headers={\"Authorization\": self.non_admin_token},\n )\n response_body = response.get_json()\n self.assertEqual(response.status_code, 403)\n self.assertEqual(\n response_body[\"Error\"], \"CampaignsRestAPI PATCH: User not a Org Manager\"\n )\n self.assertEqual(response_body[\"SubCode\"], \"UserNotPermitted\")", "def check_vulnerability_in_result(context):\n json_data = context.response.json()\n\n if \"component_analyses\" in json_data:\n vulnerabilities = json_data['component_analyses']['vulnerability']\n for vulnerability in vulnerabilities:\n assert \"cvss\" in vulnerability\n assert \"is_private\" in vulnerability\n assert \"vendor_cve_ids\" in vulnerability", "def get(self, campaign_id):\n campaign = Campaign.query.filter_by(mailchimp_id=campaign_id).first()\n if campaign is None:\n return {\"message\": \"Campaign could not be found.\"}, HTTPStatus.NOT_FOUND\n return self.schema.dump(campaign), HTTPStatus.OK", "def test_params_passed(self, mock_field_validator, mock_error_builder):\n\n ChargeTypeValidator.validate(CHARGE_TYPE)\n\n calls = [\n call(CHARGE_TYPE, 'charge-type', 'Charge type', mock_error_builder(),\n summary_message='Choose one option', inline_message='Choose one option'),\n call().is_required()\n ]\n mock_field_validator.assert_has_calls(calls)", "def getresult(request):\r\n\r\n form = forms.SearchForm(request.POST)\r\n form.find_minmax_criteria(request.POST)\r\n\r\n if form.is_valid():\r\n\r\n headers, results = search.query(form.to_criteria(), form.cleaned_data['sector'], form.cleaned_data['exchange'], form.cleaned_data['show_result'])\r\n\r\n # show result in response\r\n return render_to_response('search/result.html', {\r\n 'headers': headers,\r\n 'results': results\r\n })\r\n\r\n else:\r\n # show error message in response\r\n return render_to_response('search/result-error.html', {\r\n 'message': 'Please enter details correctly.',\r\n 'form': form\r\n })", "def validate(self):\n if self.tba_key is None:\n self.log.error(\n \"You are missing the TBA-Key field. Please check https://github.com/team4099/scouting-data-ingest#tba for more information.\"\n )\n return False\n\n self.check_internet_connection()\n\n if self.year is None:\n self.log.error(\n \"You are missing the Year field. Please add one in the style shown below.\"\n )\n year_example = \"\"\"\n {\n \"Year\": \"2020\"\n }\n \"\"\"\n console.print(Syntax(year_example, \"json\"))\n console.print(\n \"Reference https://github.com/team4099/scouting-data-ingest#configuration for more information.\"\n )\n return False\n\n if self.google_credentials is None:\n self.log.error(\n \"You are missing the Google-Credentials field. Please check https://github.com/team4099/scouting-data-ingest#google-service-account-credentials-file for more information.\"\n )\n return False\n elif not os.path.isfile(f\"config/{self.google_credentials}\"):\n self.log.error(\n \"The file listed in the Google-Credentials field does not exist in the config folder. Please place it inside the config folder.\"\n )\n return False\n else:\n try:\n gc = gspread.service_account(f\"./config/{self.google_credentials}\")\n except ValueError as e:\n self.log.error(\n \"The file listed in the Google-Credentials Field is improper. See below for details.\"\n )\n self.log.error(e)\n return False\n\n if self.spreadsheet is None:\n self.log.error(\n \"You are missing the Spreadsheet field. Please check https://github.com/team4099/scouting-data-ingest#spreadsheet for more information.\"\n )\n return False\n else:\n try:\n gc.open(f\"{self.spreadsheet}\").get_worksheet(0)\n except gspread.exceptions.SpreadsheetNotFound:\n self.log.error(\n \"The file listed in the Spreadsheets field has not been shared with the service account. Please make sure it is.\"\n )\n return False\n\n if self.db_user is None:\n self.log.error(\n \"You are missing the Database User field. Please check https://github.com/team4099/scouting-data-ingest#mysql for more information.\"\n )\n return False\n\n if self.db_pwd is None:\n self.log.error(\n \"You are missing the Database Password field. Please check https://github.com/team4099/scouting-data-ingest#mysql for more information.\"\n )\n return False\n\n try:\n create_engine(\n f\"mysql+pymysql://{self.db_user}:{self.db_pwd}@{self.db_host}/scouting\"\n )\n except pymysql.err.OperationalError:\n self.log.error(\n \"Your Database user name and/or password is not correct. Please verify them.\"\n )\n\n if self.event is None:\n self.log.error(\n \"You are missing the Event field. Please check https://github.com/team4099/scouting-data-ingest#event for more information.\"\n )\n return False\n\n if (\n requests.get(\n f\"https://www.thebluealliance.com/api/v3/event/{self.year}{self.event}\",\n headers={\"X-TBA-Auth-Key\": self.tba_key},\n ).status_code\n == 404\n ):\n self.log.error(\n \"The event listed in the TBA-Key field is not valid. Please ensure the event key and year are correct.\"\n )\n return False\n\n if self.simulation:\n if self.simulator_url is None:\n self.log.error(\n \"You are missing the Simulator URL field. Please check https://github.com/team4099/scouting-data-ingest#tba for more information.\"\n )\n return False\n\n try:\n simulator_status = requests.get(\n f\"{self.simulator_url}/matches\"\n ).status_code\n except (\n ConnectionRefusedError,\n urllib3.exceptions.NewConnectionError,\n requests.exceptions.ConnectionError,\n ):\n self.log.error(\n \"The simulator may not be running or it's at a different url than the one provided.\"\n )\n return False\n\n if simulator_status == 401:\n self.log.error(\n \"The simulator may not be running. Please make sure it is and that it is up-to-date.\"\n )\n return False\n\n if self.simulator_spreadsheet is None:\n self.log.error(\n \"You are missing the Simulator Spreadsheet field. Please check https://github.com/team4099/scouting-data-ingest#spreadsheet for more information.\"\n )\n return False\n else:\n try:\n gc.open(f\"{self.simulator_spreadsheet}\").get_worksheet(0)\n except gspread.exceptions.SpreadsheetNotFound:\n self.log.error(\n \"The file listed in the Simulator Spreadsheet field has not been shared with the service account. Please make sure it is. Please also make sure the name entered is correct.\"\n )\n return False\n\n return True", "def is_field_available(request):\n if request.method == \"GET\":\n get = request.GET.copy()\n if get.has_key('username'):\n name = get['username']\n if User.objects.filter(username__iexact=name):\n return HttpResponse(False)\n else:\n return HttpResponse(True)\n if get.has_key('email'):\n email = get['email']\n if User.objects.filter(email__iexact=email):\n return HttpResponse(False)\n else:\n return HttpResponse(True)\n\n return HttpResponseServerError(_(\"Requires username or email to test\"))", "async def _show_form(self, errors=None):\n data_schema = vol.Schema(\n {\n vol.Optional(\n CONF_LATITUDE, default=self.hass.config.latitude\n ): cv.latitude,\n vol.Optional(\n CONF_LONGITUDE, default=self.hass.config.longitude\n ): cv.longitude,\n vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS): cv.positive_int,\n }\n )\n\n return self.async_show_form(\n step_id=\"user\", data_schema=data_schema, errors=errors or {}\n )", "def check_for_exposed(context):\n json_data = context.response.json()\n if \"exploitable_vulnerabilities_count\" in json_data:\n raise Exception(\"Field exploitable_vulnerabilities_count Exposed in\"\n \" Free user result\")\n if \"vendor_package_link\" in json_data:\n raise Exception(\"Field vendor_package_link has been exposed for free user\")", "def test_view_form_valid_lettings_method(self, google):\n google.return_value = GeocoderMock()\n\n form_data = super(BaseSearchPageViewTestCase, self).get_data_lettings()\n\n view = super(BaseSearchPageViewTestCase, self).initialize(BaseSearchPageView(), None)\n\n view.form_class = SearchForm\n\n form = SearchForm(form_data)\n\n form.is_valid()\n\n response = view.form_valid(form)\n\n (url, query) = super(BaseSearchPageViewTestCase, self).parse_url(response)\n\n form_data = super(BaseSearchPageViewTestCase, self).get_data_extra(form_data)\n\n for key, value in form_data.iteritems():\n self.assertTrue(key in query and query[key] == str(value))\n\n # Check we are dealing with a redirect and path as expected is lettings/search\n self.assertIsInstance(response, HttpResponseRedirect)\n self.assertEqual(url.path, '/lettings/search/')", "def validate_view(self):\n if self.view and self.get_custom_view() is None:\n raise ValidationError(\"Custom view couldn't be loaded: {}\".\n format(self.view))", "def list_campaigns(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), params=kwargs)", "def __call__(self, data=None):\n data_combiner = DataCombiner(self.instance, data)\n\n meta = self.instance._meta\n errors = defaultdict(list)\n\n # direct required fields\n for field_name in self.REQUIRED_FIELDS:\n field = meta.get_field(field_name)\n\n if isinstance(field, models.ManyToManyField):\n value = data_combiner.get_value_to_many(field_name)\n else:\n value = data_combiner.get_value(field_name)\n\n if not value:\n errors[field_name] = [self.message]\n\n # extra validators\n extra_errors = self._run_extra_validators(data)\n for field, field_errors in extra_errors.items():\n errors[field] += field_errors\n\n if errors:\n raise ValidationError(errors)", "def validate(self, consentDict):\n if(consentDict == None):\n return self.composeResult(API.STATUS_CODE.FAILED, errorMessage=\"Invalid consent dictionary object (null) passed!\")\n\n missingAttrList = []\n tableAttr = CustomerConsentDataTable.TABLE.ATTRIBUTE\n self.__validate_checkAndAddToMissingAttrList(consentDict, missingAttrList, tableAttr.CUSTOMER_MK)\n self.__validate_checkAndAddToMissingAttrList(consentDict, missingAttrList, tableAttr.SOURCE_MARKET)\n self.__validate_checkAndAddToMissingAttrList(consentDict, missingAttrList, tableAttr.COUNTRY)\n self.__validate_checkAndAddToMissingAttrList(consentDict, missingAttrList, tableAttr.CONTACT_TYPE)\n self.__validate_checkAndAddToMissingAttrList(consentDict, missingAttrList, tableAttr.CONSENT_STATUS)\n self.__validate_checkAndAddToMissingAttrList(consentDict, missingAttrList, tableAttr.CONSENT_DATETIME)\n\n contactType = consentDict.get(tableAttr.CONTACT_TYPE)\n contactTypeKey = self.contactTypeKeys.get(contactType)\n self.__validate_checkAndAddToMissingAttrList(consentDict, missingAttrList, contactTypeKey)\n\n if(len(missingAttrList) > 0):\n errorMessage = f\"Consent is missing mandatory attribute(s): {missingAttrList}!\"\n return self.composeResult(API.STATUS_CODE.FAILED, errorMessage=errorMessage)\n\n return self.composeResult(API.STATUS_CODE.SUCCESS, data=consentDict)", "def test_context_data_with_valid_search_and_no_results(self):\n response = self.client.get(self.get_url(self.study.pk), {'description': 'test'})\n context = response.context\n self.assertIn('form', context)\n self.assertTrue(context['has_results'])\n self.assertIsInstance(context['results_table'], tables.SourceTraitTableFull)", "def validate(self, data):\n if data.has_key('site'):\n if FieldSightXF.objects.filter(\n xf__id=data['xf'], is_staged=False, is_scheduled=True, site=data['site']).exists():\n raise serializers.ValidationError(\"Form Already Exists, Duplicate Forms Not Allowded\")\n elif data.has_key('project'):\n if FieldSightXF.objects.filter(\n xf__id=data['xf'], is_staged=False, is_scheduled=True, project=data['project']).exists():\n raise serializers.ValidationError(\"Form Already Exists, Duplicate Forms Not Allowded\")\n return data", "def API_company(request):\n query = request.GET\n if any(key for key in query if key not in API_COMPANY_VALIDKEYS):\n #print([(key,key not in API_COMPANY_VALIDKEYS) for key in query])\n return django.http.HttpResponseBadRequest(\"Invalid query\")\n if \"search\" in query:\n return API_companysearch(request)\n elif \"po\" in query:\n return API_companypo(request)\n return django.http.Http404()", "def clean_form(self):\n\n for condition in self.conditional_rules:\n\n all_conditions_passed = True\n\n # Only check if the field is required\n field_required = condition['required']\n if field_required:\n action = condition['action']\n\n for cond in condition['conditions']:\n condition_rule = cond['rule']\n condition_value = cond['value']\n condition_field_value = self.cleaned_data.get(cond['field_name'], None)\n\n all_conditions_passed = conditions_passed(condition_rule, condition_value, condition_field_value)\n if not all_conditions_passed:\n break\n\n if not all_conditions_passed and action == consts.FIELD_ACTION_SHOW:\n if condition['field_name'] in self._errors:\n del self._errors[condition['field_name']]\n elif all_conditions_passed and action == consts.FIELD_ACTION_HIDE:\n if condition['field_name'] in self._errors:\n del self._errors[condition['field_name']]\n\n return self.cleaned_data", "def validate(self, view, data):\n\n with self._lock:\n return view and (data.get('id') in self.request_ids[view.id()])", "def test_context_data_with_valid_search_and_no_results(self):\n response = self.client.get(self.get_url(), {'description': 'test'})\n context = response.context\n self.assertIn('form', context)\n self.assertTrue(context['has_results'])\n self.assertIsInstance(context['results_table'], tables.SourceDatasetTableFull)", "def post(self):\n json_data = request.get_json()\n json_data[\"sender_id\"] = current_user.id\n try:\n new_campaign = self.schema.load(json_data)\n except ValidationError as err:\n return {\"message\": err.messages}, HTTPStatus.BAD_REQUEST\n if Campaign.query.filter_by(mailchimp_id=new_campaign.mailchimp_id).first() is not None:\n return {\"message\": \"Campaign already exists.\"}, HTTPStatus.CONFLICT\n db.session.add(new_campaign)\n db.session.commit()\n return self.schema.dump(new_campaign), HTTPStatus.CREATED", "def _check_required_fields(self):\n assert self.title\n assert self.format", "def validate_all_fields(self):\n\n if self.validate_byr() and \\\n self.validate_iyr() and \\\n self.validate_eyr() and \\\n self.validate_hgt() and \\\n self.validate_hcl() and \\\n self.validate_ecl() and \\\n self.validate_pid() and \\\n self.validate_cid():\n return True\n return False", "def send_validation_requested(cls, validation_request, activity):\n\n template_id = settings.SENDINBLUE[\n \"REFERRAL_ANSWER_VALIDATION_REQUESTED_TEMPLATE_ID\"\n ]\n\n contact = validation_request.validator\n\n # Get the first unit from referral linked units the user is a part of.\n # Having a user in two different units both assigned on the same referral is a very\n # specific edge case and picking between those is not an important distinction.\n referral = validation_request.answer.referral\n unit = referral.units.filter(members__id=activity.actor.id).first()\n\n # Get the path to the referral detail view from the unit inbox\n link_path = FrontendLink.unit_referral_detail(\n unit=unit.id, referral=referral.id\n )\n\n data = {\n \"params\": {\n \"case_number\": referral.id,\n \"created_by\": activity.actor.get_full_name(),\n \"link_to_referral\": f\"{cls.location}{link_path}\",\n \"referral_users\": referral.get_users_text_list(),\n \"title\": referral.title or referral.object,\n \"topic\": referral.topic.name,\n \"unit_name\": unit.name,\n },\n \"replyTo\": cls.reply_to,\n \"templateId\": template_id,\n \"to\": [{\"email\": contact.email}],\n }\n\n cls.send(data)", "def __ui_choose_search_criteria_for_activities(self):\n print(\"By which criteria do you want to search activities?\\n\"\n \" 1. By date\\n\"\n \" 2. By description\\n\")\n user_choice = input(\"Type your option: \").strip()\n if user_choice == \"1\":\n self.__ui_search_activities_by_date()\n elif user_choice == \"2\":\n self.__ui_search_activities_by_description()\n else:\n print(\"Invalid option!\\n\")\n return", "def test_fields_presence(self):\n form = DCEventRequestForm()\n fields_left = set(form.fields.keys())\n fields_right = set([\n 'name', 'email', 'affiliation', 'location', 'country',\n 'conference', 'preferred_date', 'language', 'workshop_type',\n 'approx_attendees', 'attendee_domains', 'attendee_domains_other',\n 'data_types', 'data_types_other', 'attendee_academic_levels',\n 'attendee_data_analysis_level', 'cover_travel_accomodation',\n 'understand_admin_fee', 'fee_waiver_request',\n 'travel_reimbursement', 'travel_reimbursement_other',\n 'comment', 'privacy_consent', 'captcha',\n ])\n self.assertEqual(fields_left, fields_right)", "def clean(self):\n data = super().clean()\n if (str(self.instance.resource_type.id) in data['license'].resource_types\n and data['access_policy'] == data['license'].access_policy):\n return data\n\n raise forms.ValidationError('Invalid policy license combination.')", "def test_create_new_campaign_by_non_admin_fails(self):\n non_admin = create_canned_user()\n non_admin_token = generate_encoded_token(non_admin.id)\n response = self.client.post(\n self.endpoint_url,\n json={\n \"logo\": None,\n \"name\": CAMPAIGN_NAME,\n \"organisations\": [self.test_org.id],\n \"url\": None,\n },\n headers={\"Authorization\": non_admin_token},\n )\n response_body = response.get_json()\n self.assertEqual(response.status_code, 403)\n self.assertEqual(\n response_body[\"Error\"], \"CampaignsAllAPI POST: User not a Org Manager\"\n )\n self.assertEqual(response_body[\"SubCode\"], \"UserNotPermitted\")", "def test_context_data_with_valid_search_and_no_results(self):\n response = self.client.get(self.get_url(), {'description': 'test'})\n context = response.context\n self.assertIn('form', context)\n self.assertTrue(context['has_results'])\n self.assertIsInstance(context['results_table'], tables.HarmonizedTraitTable)", "def is_country_selection_criteria_field_present_in_vendor_profile_destinations_page(self):\n return self.is_specific_selection_criteria_filter_present(self.vendor_profile_destinations_page_div_id, self.country_label_name)", "def test_fields_presence(self):\n form = SWCEventRequestForm()\n fields_left = set(form.fields.keys())\n fields_right = set([\n 'name', 'email', 'affiliation', 'location', 'country',\n 'conference', 'preferred_date', 'language', 'workshop_type',\n 'approx_attendees', 'attendee_domains', 'attendee_domains_other',\n 'attendee_academic_levels', 'attendee_computing_levels',\n 'cover_travel_accomodation', 'understand_admin_fee',\n 'travel_reimbursement', 'travel_reimbursement_other',\n 'admin_fee_payment', 'comment', 'captcha', 'privacy_consent',\n ])\n self.assertEqual(fields_left, fields_right)", "def validate(self, data):\n\t\tvalidated_data = super(BoxSerializer, self).validate(data)\n\t\tuser = self.context['request'].user\n\t\tcheck_constraint_util = CheckConstraintsUtil(user, validated_data, self.instance) \n\t\treturn check_constraint_util.check_constraints()", "def serve(self, request, *args, **kwargs):\n\n template = self.get_template(request)\n\n if request.method == 'POST':\n\n form = self.get_form(request.POST, page=self, user=request.user)\n\n if form.is_valid():\n self.process_form_submission(form)\n return HttpResponseRedirect(self.url + '?thank=you')\n\n else:\n\n thanks = request.GET.get('thank', False)\n if thanks:\n form = None\n template = self.get_landing_page_template(request)\n if self.thanks_page_title:\n self.title = self.thanks_page_title\n else:\n form = self.get_form(page=self, user=request.user)\n\n context = self.get_context(request)\n context['form'] = form\n if form:\n context['conditional_rules'] = json.dumps(form.conditional_rules)\n\n return render(\n request,\n template,\n context\n )", "def _get_req_data(kwargs):\n if request.data:\n req = json.loads(request.data, encoding='utf-8')\n else:\n req = request.args or request.form\n\n if 'coid' not in kwargs:\n required_args = [\n 'name',\n 'consrc'\n ]\n\n for arg in required_args:\n if arg not in req or req[arg] == '':\n return True, make_json_response(\n status=410,\n success=0,\n errormsg=gettext(\n \"Could not find the required parameter ({}).\"\n ).format(arg)\n ), req\n return False, '', req", "def test_positive_validation_decision(self, form_field_name, user_data):\n self.assertValidationDecision(\n {form_field_name: user_data},\n {form_field_name: ''}\n )", "def is_country_selection_criteria_field_present_in_vendor_profile_page(self):\n return self.is_specific_selection_criteria_filter_present(self.vendor_profile_page_div_id, self.country_label_name)", "def validate(self) -> None:\n\n if self.field not in self.model.table_fields:\n raise ValueError(f\"Value field {self.field} not present in {self.model.table}\")\n\n if self.pivot:\n if self.pivot not in self.model.table_fields:\n raise ValueError(\n f\"Pivot field {self.pivot} not present in {self.model.table}\"\n )\n\n if self.connector:\n if self.connector not in self.model.table_fields:\n raise ValueError(\n f\"Connector field {self.connector} not present in {self.model.table}\"\n )\n\n for field in self.selectors:\n if field not in self.model.table_fields:\n raise ValueError(f\"Selector field {field} not present in {self.model.table}\")", "def has_permission(self, request, view):\n return has_permission(request.user, Permissions.CAN_ADVANCE_SEARCH)", "def validate_club(self, club):\n request = self.context['request']\n # pylint: disable=no-member\n profile = UserProfile.objects.get(user=request.user)\n if club not in profile.get_club_privileges():\n raise serializers.ValidationError(\n \"You are not authorized to create workshops for this club\")\n return club", "def validate(self, instance=None, REQUEST=None,\n errors=None, data=None, metadata=None, predicates=()):\n if REQUEST:\n fieldset = REQUEST.form.get('fieldset', None)\n else:\n fieldset = None\n fields = []\n\n if fieldset is not None:\n schemata = instance.Schemata()\n fields = [(field.getName(), field)\n for field in schemata[fieldset].fields()\n if not [pred for pred in predicates if not pred(field)]\n ]\n\n else:\n if data:\n fields.extend([(field.getName(), field)\n for field in self.filterFields(isMetadata=0, *predicates)])\n if metadata:\n fields.extend([(field.getName(), field)\n for field in self.filterFields(isMetadata=1, *predicates)])\n\n if REQUEST:\n form = REQUEST.form\n else:\n form = None\n for name, field in fields:\n error = 0\n value = None\n widget = field.widget\n if form:\n result = widget.process_form(instance, field, form,\n empty_marker=_marker)\n else:\n result = None\n if result is None or result is _marker:\n accessor = field.getEditAccessor(instance) or field.getAccessor(instance)\n if accessor is not None:\n value = accessor()\n else:\n # can't get value to validate -- bail\n continue\n else:\n value = result[0]\n\n res = field.validate(instance=instance,\n value=value,\n errors=errors,\n REQUEST=REQUEST)\n if res:\n errors[field.getName()] = res\n return errors", "def test_get_non_existent_campaigns_returns_empty_list(self):\n response = self.client.get(self.endpoint_url)\n response_body = response.get_json()\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response_body, {\"campaigns\": []})", "def validate(self, request):\n values = {\n 'robot_match_comments':request.POST['robot_match_comments'],\n 'did_foul':'did_foul' in request.POST,\n 'did_technical_foul':'did_technical_foul' in request.POST,\n 'foul_description':request.POST['foul_description'],\n 'did_shoot':'did_shoot' in request.POST,\n 'auto_1':request.POST['auto_1'],\n 'auto_2':request.POST['auto_2'],\n 'auto_3':request.POST['auto_3'],\n 'auto_miss':request.POST['auto_miss'],\n 'teleop_1':request.POST['teleop_1'],\n 'teleop_2':request.POST['teleop_2'],\n 'teleop_3':request.POST['teleop_3'],\n 'teleop_5':request.POST['teleop_5'],\n 'teleop_miss':request.POST['teleop_miss'],\n 'shooting_description':request.POST['shooting_description'],\n 'did_climb':'did_climb' in request.POST,\n 'climb_start':request.POST['climb_start'],\n 'climb_finish':request.POST['climb_finish'],\n 'level_reached':request.POST.get('level_reached'),\n 'frisbees_dumped':request.POST['frisbees_dumped'],\n 'climbing_description':request.POST['climbing_description'],\n 'did_human_load':'did_human_load' in request.POST,\n 'did_ground_load':'did_ground_load' in request.POST,\n 'auto_frisbees_ground_loaded':\\\n request.POST['auto_frisbees_ground_loaded'],\n 'loading_description':request.POST['loading_description'],\n }\n if ((values['did_foul'] or values['did_technical_foul']) and\n not values['foul_description']):\n new_values = self.__dict__.copy()\n new_values.update(values)\n raise ValidationError(\n 'Please enter a description of the foul(s) the robot committed',\n new_values\n )\n if values['did_shoot']:\n try:\n values['auto_1'] = int(values['auto_1'])\n values['auto_2'] = int(values['auto_2'])\n values['auto_3'] = int(values['auto_3'])\n values['auto_miss'] = int(values['auto_miss'])\n values['teleop_1'] = int(values['teleop_1'])\n values['teleop_2'] = int(values['teleop_2'])\n values['teleop_3'] = int(values['teleop_3'])\n values['teleop_5'] = int(values['teleop_5'])\n values['teleop_miss'] = int(values['teleop_miss'])\n except ValueError:\n raise ValidationError(\n 'You must enter a number for all of the shooting numbers',\n self.__dict__.copy().update(values)\n )\n if values['did_climb']:\n try:\n values['climb_start'] = int(values['climb_start'])\n values['climb_finish'] = int(values['climb_finish'])\n try:\n values['level_reached'] = int(values['level_reached'])\n except TypeError:\n new_values = self.__dict__.copy()\n new_values.update(values)\n raise ValidationError(\n 'You must select a level the robot climbed too',\n new_values\n )\n values['frisbees_dumped'] = int(values['frisbees_dumped'])\n except ValueError:\n new_values = self.__dict__.copy()\n new_values.update(values)\n raise ValidationError(\n 'All climbing related numbers must be numbers',\n new_values\n )\n if values['did_ground_load']:\n try:\n values['auto_frisbees_ground_loaded'] = int(\n values['auto_frisbees_ground_loaded'])\n except ValueError:\n new_values = self.__dict__.copy()\n new_values.update(values)\n raise ValidationError(\n 'All numbers of frisbees ground loaded must be numbers',\n new_values\n )\n return values", "def validate(self, data):\r\n budgets = Budget.objects.filter(\r\n campaign__pk=self.context['view'].kwargs['campaign_pk']\r\n )\r\n covered = budgets.values_list('start_date', 'end_date')\r\n for each in covered:\r\n sd_fail = (\r\n data['start_date'] >= each[0] and\r\n data['start_date'] <= each[1]\r\n )\r\n ed_fail = (\r\n data['end_date'] >= each[0] and\r\n data['end_date'] <= each[1]\r\n )\r\n over_fail = (\r\n each[0] >= data['start_date'] and\r\n each[1] <= data['end_date']\r\n )\r\n if sd_fail:\r\n raise serializers.ValidationError({\r\n 'start_date': \"Start date conflicts with existing budget\"\r\n })\r\n if ed_fail:\r\n raise serializers.ValidationError({\r\n 'end_date': \"End date conflicts with existing budget\"\r\n })\r\n if over_fail:\r\n raise serializers.ValidationError(\r\n \"Date conflicts with existing budget\"\r\n )\r\n return data", "def clean(self):\n cleaned_data = super(AuthorizenetSurveyPurchaseForm, self).clean()\n if cleaned_data.get(\"purchase_code\"):\n return cleaned_data\n\n for f in [\"card_number\", \"card_expiry\", \"card_ccv\"]:\n if not cleaned_data.get(f):\n self.add_error(f, \"Required for card payments\")\n return cleaned_data", "def test_census_area_create_agency_restriction(client, user_staff, superuser):\n client.force_login(user_staff)\n url = reverse('census-areas-create')\n staff_response = client.get(url)\n assert staff_response.status_code == 200\n\n field = staff_response.context['form'].fields['restrict_by_agency']\n assert field.initial is True\n assert isinstance(field.widget, HiddenInput)\n\n client.force_login(superuser)\n sup_response = client.get(url)\n assert sup_response.status_code == 200\n\n field = sup_response.context['form'].fields['restrict_by_agency']\n assert field.initial is True\n assert isinstance(field.widget, CheckboxInput)", "def test_context_data_with_valid_search_and_some_results(self):\n factories.SourceTraitFactory.create(i_description='lorem ipsum')\n response = self.client.get(self.get_url(), {'description': 'lorem'})\n qs = searches.search_source_traits(description='lorem')\n context = response.context\n self.assertIn('form', context)\n self.assertTrue(context['has_results'])\n self.assertIsInstance(context['results_table'], tables.SourceTraitTableFull)\n self.assertQuerysetEqual(qs, [repr(x) for x in context['results_table'].data])" ]
[ "0.5706519", "0.5386401", "0.5315525", "0.530953", "0.51058286", "0.51058286", "0.48647565", "0.4818601", "0.47097164", "0.46946898", "0.46497676", "0.4634922", "0.46242386", "0.45637044", "0.449959", "0.44929436", "0.44728902", "0.44613764", "0.44438702", "0.4442745", "0.44330058", "0.4426822", "0.4408353", "0.43987724", "0.43857944", "0.43857944", "0.43846133", "0.43533403", "0.4345847", "0.43383792", "0.43270802", "0.43234354", "0.43206164", "0.43179658", "0.42991236", "0.42925334", "0.42923397", "0.4290313", "0.42749974", "0.42627463", "0.42603013", "0.4256027", "0.42371538", "0.42315382", "0.42258146", "0.42215025", "0.42154354", "0.4211992", "0.42083305", "0.42079517", "0.42048466", "0.419826", "0.41880703", "0.41840306", "0.41818509", "0.4161982", "0.41619077", "0.41527897", "0.41422802", "0.4139899", "0.4136058", "0.41333628", "0.4132349", "0.41303486", "0.4127781", "0.4127455", "0.4119001", "0.41152492", "0.4115241", "0.41133142", "0.4113177", "0.41109475", "0.4098598", "0.40985823", "0.4094262", "0.4093622", "0.40880835", "0.4077336", "0.40758267", "0.40706965", "0.4070667", "0.406262", "0.40612164", "0.40598866", "0.40581584", "0.4052234", "0.40511668", "0.404885", "0.4047613", "0.40466687", "0.40380368", "0.4037637", "0.40376276", "0.40332305", "0.40329728", "0.40325627", "0.402533", "0.40234828", "0.40219554", "0.40188754" ]
0.62158144
0
this test only works if ANs fall within separate bins,
def test_iter_bins_API_input_missing_bin(pqo_STRING, args_dict, foreground, background, enrichment_method): # foreground, background, enrichment_method = fixture_fg_bg_iter_bins fg = format_for_REST_API(foreground[foreground.notnull()]) bg = format_for_REST_API(background.loc[background.background.notnull(), "background"]) in_ = format_for_REST_API(background.loc[background.intensity.notnull(), "intensity"]) # ui = userinput.REST_API_input(pqo=pqo_STRING, foreground_string=fg, background_string=bg, background_intensity=in_, num_bins=NUM_BINS, enrichment_method=enrichment_method) args_dict_temp = args_dict.copy() args_dict_temp.update({"foreground":fg, "background":bg, "intensity":in_, "num_bins":NUM_BINS, "enrichment_method":enrichment_method}) ui = userinput.REST_API_input(pqo_STRING, args_dict=args_dict_temp) counter = 0 for ans, weight_fac in ui.iter_bins(): # every weighting factor is a float assert isinstance(weight_fac, float) or isinstance(weight_fac, int) counter += 1 # since integers instead of floats are being used for test data, the number of unique bins can be determined by sets num_min_iterations_expected = len({int(ele) for ele in ui.foreground["intensity"].tolist()}) assert counter >= num_min_iterations_expected
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_bins(self):\n\n \n for filename in ['data/population_padang_1.asc', \n 'data/test_grid.asc']: \n \n R = read_coverage(filename)\n \n min, max = R.get_extrema() #use_numeric=True)\n \n for N in [2,3,5,7,10,16]:\n linear_intervals = R.get_bins(N=N, quantiles=False) \n \n assert linear_intervals[0] == min\n assert linear_intervals[-1] == max \n \n d = (max-min)/N\n for i in range(N):\n assert numpy.allclose(linear_intervals[i], min + i*d) \n \n \n quantiles = R.get_bins(N=N, quantiles=True)\n\n A = R.get_data(nan=True).flat[:] \n \n mask = numpy.logical_not(numpy.isnan(A)) # Omit NaN's\n l1 = len(A)\n A = A.compress(mask) \n l2 = len(A)\n \n if filename == 'data/test_grid.asc':\n # Check that NaN's were removed\n \n assert l1 == 35\n assert l2 == 30\n \n \n # Assert that there are no NaN's \n assert not numpy.alltrue(numpy.isnan(A))\n \n number_of_elements = len(A)\n average_elements_per_bin = number_of_elements/N\n \n # Count elements in each bin and check\n\n i0 = quantiles[0]\n for i1 in quantiles[1:]:\n count = numpy.sum((i0 < A) & (A < i1))\n if i0 == quantiles[0]:\n refcount = count\n \n \n if i1 < quantiles[-1]:\n # Number of elements in each bin must vary by no more than 1\n assert abs(count - refcount) <= 1 \n assert abs(count - average_elements_per_bin) <= 3\n \n \n else:\n # The last bin is allowed vary by more\n pass\n \n i0 = i1", "def test_bins(self):\n\n for filename in ['%s/population_padang_1.asc' % TESTDATA,\n '%s/test_grid.asc' % TESTDATA]:\n\n R = read_layer(filename)\n rmin, rmax = R.get_extrema()\n\n for N in [2, 3, 5, 7, 10, 16]:\n linear_intervals = R.get_bins(N=N, quantiles=False)\n\n assert linear_intervals[0] == rmin\n assert linear_intervals[-1] == rmax\n\n d = (rmax - rmin) / N\n for i in range(N):\n assert numpy.allclose(linear_intervals[i], rmin + i * d)\n\n quantiles = R.get_bins(N=N, quantiles=True)\n A = R.get_data(nan=True).flat[:]\n\n mask = numpy.logical_not(numpy.isnan(A)) # Omit NaN's\n l1 = len(A)\n A = A.compress(mask)\n l2 = len(A)\n\n if filename == '%s/test_grid.asc' % TESTDATA:\n # Check that NaN's were removed\n assert l1 == 35\n assert l2 == 30\n\n # Assert that there are no NaN's\n assert not numpy.alltrue(numpy.isnan(A))\n\n number_of_elements = len(A)\n average_elements_per_bin = number_of_elements / N\n\n # Count elements in each bin and check\n i0 = quantiles[0]\n for i1 in quantiles[1:]:\n count = numpy.sum((i0 < A) & (A < i1))\n if i0 == quantiles[0]:\n refcount = count\n\n if i1 < quantiles[-1]:\n # Number of elements in each bin must vary by no\n # more than 1\n assert abs(count - refcount) <= 1\n assert abs(count - average_elements_per_bin) <= 3\n else:\n # The last bin is allowed vary by more\n pass\n\n i0 = i1", "def test_output_range(self):\n byt = bytscl(self.array1)\n outside = (byt < 0) | (byt > 255)\n total = numpy.sum(outside)\n self.assertEqual(total, 0)", "def test_numpy_bins(self):\n # Load the data from the fixture\n data = load_occupancy(return_dataset=True)\n X, y = data.to_numpy()\n\n visualizer = BalancedBinningReference()\n visualizer.fit(y)\n visualizer.finalize()\n self.assert_images_similar(visualizer, tol=0.5)", "def bin_stats(x,y,xbins,stat='average'):\n nbins=len(xbins)\n if stat=='average' or stat=='mean': func=mean\n elif stat=='median': func=median\n elif stat=='rms' or stat=='std' : func=std\n elif stat=='std_robust' or stat=='rms_robust': func=std_robust\n elif stat=='mean_robust': func=mean_robust\n elif stat=='median_robust': func=median_robust\n elif stat=='sum': func=sum\n results=[]\n for i in range(nbins):\n if i<nbins-1:\n good=(greater_equal(x,xbins[i])\n *less(x,xbins[i+1]))\n else: good=(greater_equal(x,xbins[-1]))\n if sum(good)>1.: results.append(func(compress(good,y)))\n else:\n results.append(0.)\n print('Bin starting at xbins[%i] has %i points' % (i,sum(good)))\n return array(results)", "def azAverage(self,rads,vals,nbins=50):\n\t\ttry:\n\t\t\tavVals = []\n\t\t\tbins = np.linspace(0,self.annulus,nbins)\n\t\t\tfor i, bin in enumerate(bins[:-1]):\n\t\t\t\tav = np.max(vals[(rads>bins[i]) & (rads<=bins[i+1])])\n\t\t\t\tavVals.append(av)\n\t\texcept:\n\t\t\t#if bin size is too small, and some bins have no particles, make bins bigger\n\t\t\tnbins=25\t\t\t\n\t\t\tavVals = []\n\t\t\tbins = np.linspace(0,self.annulus,nbins)\n\t\t\tfor i, bin in enumerate(bins[:-1]):\n\t\t\t\ttry:\n\t\t\t\t\tav = np.max(vals[(rads>bins[i]) & (rads<=bins[i+1])])\n\t\t\t\texcept:\n\t\t\t\t\tav = 0\n\t\t\t\tavVals.append(av)\n\t\treturn bins[:-1], avVals", "def __ptBin(self,pt):\n if len(self._ptbins)>0:\n return reduce(lambda x,y:x+y,map(lambda x:pt>x,self._ptbins))\n else:\n return 0", "def test_nonmonotonic_bins(self):\n\n with pytest.raises(ValueError) as verr:\n avg.median2D(self.testInst, np.array([0., 300., 100.]), 'longitude',\n np.array([0., 24., 13.]), 'mlt',\n ['dummy1', 'dummy2', 'dummy3'], auto_bin=False)\n\n estr = 'bins must be monotonically increasing or decreasing'\n assert str(verr).find(estr) >= 0\n\n return", "def bininator(magbins, dlt, mags, err):\n meds = np.zeros_like(magbins)\n for i in range(len(magbins)):\n ind = (mags > magbins[i] - dlt) & (mags <= magbins[i] + dlt)\n ind = ind & (err < 10.)\n meds[i] = np.median(err[ind])\n return meds", "def test_nonmonotonic_bins(self):\n\n with pytest.raises(ValueError) as verr:\n avg.median1D(self.testInst, [0, 13, 5], self.test_label,\n self.test_data, auto_bin=False)\n\n estr = 'bins must be monotonically increasing or decreasing'\n assert str(verr).find(estr) >= 0\n\n return", "def test_bin_edges(self):\n with Pandas() as pd:\n if pd is None:\n return\n with Numpy() as np:\n if numpy is None:\n return\n sys.stderr.write(\"\\n\")\n\n df1 = pd.DataFrame({'A': [0, 1, 2, 3, 4, 3, 2, 1, 1, 1]})\n df2 = pd.DataFrame({'A': [2, 3, 4, 5, 7, 4, 6, 5, 7, 8]})\n\n # building test histograms\n hist2 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist3 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist4 = hg.Bin(num=10, low=0.0, high=10., quantity=unit('A'))\n hist5 = hg.Bin(num=10, low=0.0, high=10., quantity=unit('A'))\n hist6 = hg.Bin(num=201, low=0.0, high=1.005)\n\n # fill them\n hist2.fill.numpy(df1)\n hist3.fill.numpy(df2)\n hist4.fill.numpy(df1)\n hist5.fill.numpy(df2)\n\n import numpy as np\n np.testing.assert_array_equal(hist2.bin_edges(), [0., 1., 2., 3., 4., 5.])\n np.testing.assert_array_equal(hist3.bin_edges(), [2., 3., 4., 5., 6., 7., 8., 9.])\n np.testing.assert_array_equal(hist4.bin_edges(), [0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])\n np.testing.assert_array_equal(hist5.bin_edges(), [0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])\n\n np.testing.assert_array_equal(hist2.bin_edges(low=2.1, high=11.9), [\n 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12.])\n np.testing.assert_array_equal(hist3.bin_edges(low=1.1, high=6), [1., 2., 3., 4., 5., 6.])\n np.testing.assert_array_equal(hist4.bin_edges(low=2.1, high=11.9), [\n 2., 3., 4., 5., 6., 7., 8., 9., 10.])\n np.testing.assert_array_equal(hist5.bin_edges(low=1.1, high=5.4), [1., 2., 3., 4., 5., 6.])\n\n assert len(hist6.bin_edges()) == 202\n assert len(hist6.bin_edges(low=0.2089, high=0.9333)) == 147\n assert len(hist6.bin_edges(low=0.205, high=0.935)) == 147", "def bincalc(nbin=0.1,bmin=5,bmax=2000):\n\n logbmin=np.log10(bmin)\n logbmax=np.log10(bmax)\n\n logbins=np.arange(logbmin,logbmax,nbin)\n\n bins=10**logbins\n\n #bins=np.linspace(bmin,bmax,60)\n return (bins)", "def test_bins(self):\n min_val = 0\n max_val = 1\n buckets = 10\n values_per_bucket = 10\n\n import numpy\n\n data = list(numpy.linspace(min_val, max_val, buckets * values_per_bucket))\n bins = numpy.linspace(min_val, max_val + sys.float_info.epsilon, buckets + 1)\n digitized = numpy.digitize(data, bins)\n counts = numpy.bincount(digitized)\n self.assertEqual(buckets + 1, len(counts))\n self.assertEqual(0, counts[0])\n for bucket in counts[1:]:\n self.assertEqual(values_per_bucket, bucket)", "def test_bin_entries(self):\n with Pandas() as pd:\n if pd is None:\n return\n with Numpy() as np:\n if numpy is None:\n return\n sys.stderr.write(\"\\n\")\n\n df1 = pd.DataFrame(\n {'A': [0, 1, 2, 3, 4, 3, 2, 1, 1, 1], 'C': ['f1', 'f3', 'f4', 'f3', 'f4', 'f2', 'f2', 'f1', 'f3', 'f4']})\n df2 = pd.DataFrame(\n {'A': [2, 3, 4, 5, 7, 4, 6, 5, 7, 8], 'C': ['f7', 'f3', 'f5', 'f8', 'f9', 'f2', 'f3', 'f6', 'f7', 'f7']})\n\n # building 1d-, 2d-, and 3d-histogram (iteratively)\n hist0 = hg.Categorize(unit('C'))\n hist1 = hg.Categorize(unit('C'))\n hist2 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist3 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist4 = hg.Bin(num=10, low=0.0, high=10., quantity=unit('A'))\n hist5 = hg.Bin(num=10, low=0.0, high=10., quantity=unit('A'))\n\n # fill them\n hist0.fill.numpy(df1)\n hist1.fill.numpy(df2)\n hist2.fill.numpy(df1)\n hist3.fill.numpy(df2)\n hist4.fill.numpy(df1)\n hist5.fill.numpy(df2)\n\n labels0 = hist0.bin_labels()\n labels1 = hist1.bin_labels()\n centers2 = hist2.bin_centers()\n centers3 = hist3.bin_centers()\n centers = hist4.bin_centers()\n\n import numpy as np\n np.testing.assert_array_equal(hist0.bin_entries(), [2., 2., 3., 3.])\n np.testing.assert_array_equal(hist1.bin_entries(), [1., 2., 1., 1., 3., 1., 1.])\n np.testing.assert_array_equal(hist0.bin_entries(labels=labels1), [2., 3., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist1.bin_entries(labels=labels0), [0., 1., 2., 0.])\n\n np.testing.assert_array_equal(hist2.bin_entries(), [1., 4., 2., 2., 1.])\n np.testing.assert_array_equal(hist3.bin_entries(), [1., 1., 2., 2., 1., 2., 1.])\n np.testing.assert_array_equal(hist4.bin_entries(), [1., 4., 2., 2., 1., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist5.bin_entries(), [0., 0., 1., 1., 2., 2., 1., 2., 1., 0.])\n\n np.testing.assert_array_equal(hist2.bin_entries(xvalues=centers3), [2., 2., 1., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist3.bin_entries(xvalues=centers2), [0., 0., 1., 1., 2.])\n np.testing.assert_array_equal(hist2.bin_entries(xvalues=centers), [\n 1., 4., 2., 2., 1., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist3.bin_entries(xvalues=centers), [\n 0., 0., 1., 1., 2., 2., 1., 2., 1., 0.])\n\n np.testing.assert_array_equal(hist2.bin_entries(low=2.1, high=11.9), [\n 2., 2., 1., 0., 0., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist3.bin_entries(low=1.1, high=5.4), [0., 1., 1., 2., 2.])\n np.testing.assert_array_equal(hist4.bin_entries(low=2.1, high=11.9), [2., 2., 1., 0., 0., 0., 0., 0.])\n np.testing.assert_array_equal(hist5.bin_entries(low=1.1, high=5.4), [0., 1., 1., 2., 2.])", "def hist_bin_opt (x, minbin=20, maxbin=600, spacing=10, N_trials=1):\n bin_checks = np.arange(minbin, maxbin, spacing)\n # bin_checks = np.linspace(150, 300, 16)\n costs = np.zeros(len(bin_checks))\n i = 0\n # this might be vectorizable in np\n for n_bins in bin_checks:\n # use np.histogram to do the numerical minimization\n pdf, bin_edges = np.histogram(x, n_bins)\n # calculate bin width\n # some discrepancy here but should be fine\n w_bin = np.unique(np.diff(bin_edges))\n if len(w_bin) > 1: w_bin = w_bin[0]\n # calc mean and var\n kbar = np.mean(pdf)\n kvar = np.var(pdf)\n # calc cost\n costs[i] = (2.*kbar - kvar) / (N_trials * w_bin)**2.\n i += 1\n # find the bin size corresponding to a minimization of the costs\n bin_opt_list = bin_checks[costs.min() == costs]\n bin_opt = bin_opt_list[0]\n return bin_opt", "def metric_iaf_binned(self, x, bins=10): \n data = np.asarray(x['data'])\n iaf = [10.0] * data.shape[0]\n for ch, ch_data in enumerate(data):\n pxx, freqs = mlab.psd(ch_data, Fs=128.0, NFFT=256)\n alpha_mask = np.abs(freqs - 10) <= 2.0\n alpha_pxx = 10*np.log10(pxx[alpha_mask])\n alpha_pxx = scipy.signal.detrend(alpha_pxx)\n # iaf[ch] = alpha_pxx.shape\n iaf[ch] = freqs[alpha_mask][np.argmax(alpha_pxx)]\n iaf = float(np.mean(iaf))\n bin = int(np.digitize((iaf,), np.linspace(8,12, bins+1)))\n return (bin, iaf)", "def bins_match (a, b):\n return 0 == (\n np.sum ((a.xbins - b.xbins)**2)\n + np.sum ((a.ybins - b.ybins)**2) )", "def conceptcover(bin_arr, limit=1, uncovered=0.1):\n arr = np.copy(bin_arr)\n arr_sum = np.sum(arr)\n result = []\n while True:\n k = kernel(arr)\n i = intent(bin_arr, k)\n e = extent(bin_arr, i)\n if len(e)*len(i) < limit or (e, i) in result: break\n result.append((e, i))\n arr = removed(arr, e, i)\n if np.sum(arr)/arr_sum < uncovered: break\n return result", "def __check(self):\n if len(self._data)!=len(self._ptbins)+1: \n raise IndexError('Pt bins mismatch')\n for ptbin in self._data:\n if len(ptbin)!=len(self._etabins)+1:\n raise IndexError('Eta bins mismatch')", "def bineval(ratings, gold, pred, increment=50, bins = [0, 400, 500, 600, 700, 800, 900, 1000, 1100, 1200, 1300, 1400, 1500, 1600, 1700, 1800, 1900, 2000, 2100, 2200, 2300, 2400, 2500, 2600, 2700, 2800]):\n\n pw_accs = [(r, int(b==c)) for r,b,c in sorted(zip(ratings, gold, pred))]\n \n binned_accs = []\n current_acc = 0\n denom = 0\n bin_id = 0\n \n for r, acc in pw_accs:\n while r > bins[bin_id] + increment:\n bin_id += 1\n if denom == 0:\n binned_accs.append(float(\"NaN\"))\n else:\n binned_accs.append(current_acc/float(denom))\n current_acc = 0\n denom = 0\n current_acc += acc\n denom += 1\n\n if denom == 0:\n binned_accs.append(float(\"NaN\"))\n else:\n binned_accs.append(current_acc/float(denom))\n\n for b, acc in zip(bins, binned_accs):\n print b, acc", "def test_pandas_bins(self):\n # Load the data from the fixture\n data = load_occupancy(return_dataset=True)\n X, y = data.to_pandas()\n\n visualizer = BalancedBinningReference()\n visualizer.fit(y)\n visualizer.finalize()\n self.assert_images_similar(visualizer, tol=0.5)", "def bin_statistics(data,bin_against,bin_edges,data_signal=[]):\n\n assert isinstance(data, pd.DataFrame), 'data must be of type pd.DataFram' \n try: bin_against = np.asarray(bin_against) \n except: 'bin_against must be of type np.ndarray'\n try: bin_edges = np.asarray(bin_edges)\n except: 'bin_edges must be of type np.ndarray' \n\n # Determine variables to analyze\n if len(data_signal)==0: # if not specified, bin all variables\n data_signal=data.columns.values\n else:\n assert isinstance(data_signal, list), 'must be of type list'\n\n # Pre-allocate list variables\n bin_stat_list = []\n bin_std_list = []\n\n # loop through data_signal and get binned means\n for signal_name in data_signal:\n # Bin data\n bin_stat = binned_statistic(bin_against,data[signal_name],\n statistic='mean',bins=bin_edges)\n # Calculate std of bins\n std = []\n stdev = pd.DataFrame(data[signal_name])\n stdev.set_index(bin_stat.binnumber,inplace=True)\n for i in range(1,len(bin_stat.bin_edges)):\n try:\n temp = stdev.loc[i].std(ddof=0)\n std.append(temp[0])\n except:\n std.append(np.nan)\n bin_stat_list.append(bin_stat.statistic)\n bin_std_list.append(std)\n \n # Convert to DataFrames\n bin_mean = pd.DataFrame(np.transpose(bin_stat_list),columns=data_signal)\n bin_std = pd.DataFrame(np.transpose(bin_std_list),columns=data_signal)\n\n # Check for nans \n if bin_mean.isna().any().any():\n print('Warning: some bins may be empty!')\n\n return bin_mean, bin_std", "def test_range_argument(self):\n # we test with range from 0 to 2, with 3 bins -> [0, 0.66, 1.33, 2] exp. bins\n bins_range = (0, 2)\n bins_exp = np.array([0, 2 / 3, 1 + 1 / 3, 2])\n hist_exp = np.array([1, 2, 0])\n\n bin_edges, hist, _, _ = hist_w_unc(\n self.input,\n bins=self.n_bins,\n bins_range=bins_range,\n normed=False,\n )\n\n # check if we end up with the same bin edges anyway\n np.testing.assert_array_almost_equal(bins_exp, bin_edges)\n np.testing.assert_array_almost_equal(hist_exp, hist)", "def test_binops(self):", "def __call__(self, n_bins, segment, elements):\n\n # n_bins\n assert type(n_bins) is int\n assert n_bins > 0\n\n # segment\n assert type(segment) is list or type(segment) is tuple\n assert len(segment) == 2\n assert np.isscalar(segment[0]) and np.isscalar(segment[1])\n assert segment[0] < segment[1]\n\n # elements\n assert type(elements) is np.ndarray, f\"elements should be an np.ndarray, instead of {type(elements)}\"\n assert elements.dtype == np.number\n\n raise NotImplemented", "def test_range_argument_ignored(self):\n bins_range = (1, 2)\n\n bin_edges, hist, _, _ = hist_w_unc(\n self.input,\n bins=self.bin_edges,\n bins_range=bins_range,\n normed=False,\n )\n\n # check if we end up with the same bin edges anyway\n np.testing.assert_array_almost_equal(self.bin_edges, bin_edges)\n np.testing.assert_array_almost_equal(self.hist, hist)", "def test_irregular(self):\n import numpy as np\n import histogrammar\n\n h = histogrammar.IrregularlyBin([0, 10, 20, 40, 100])\n h.fillnumpy([-5, 5, 5, 50, 10, 100, 1000, 50, 50])\n\n np.testing.assert_array_equal(h.bin_entries(), [1., 2., 1., 0., 3., 2.])\n np.testing.assert_array_equal(h.bin_edges(), [float('-inf'), 0., 10., 20., 40., 100., float('inf')])\n np.testing.assert_array_equal(h.bin_centers(), [float('-inf'), 5., 15., 30., 70., float('inf')])\n assert h.num_bins() == 6\n assert h.n_bins == 6\n np.testing.assert_almost_equal(h.mpv, 70.)\n\n np.testing.assert_array_equal(h.bin_entries(10, 40), [1., 0.])\n np.testing.assert_array_equal(h.bin_edges(10, 40), [10., 20., 40.])\n np.testing.assert_array_equal(h.bin_centers(10, 40), [15., 30.])\n assert h.num_bins(10, 40) == 2\n\n np.testing.assert_array_equal(h.bin_entries(5, 110), [2., 1., 0., 3., 2.])\n np.testing.assert_array_equal(h.bin_edges(5, 110), [0., 10., 20., 40., 100., float('inf')])\n np.testing.assert_array_equal(h.bin_centers(5, 110), [5., 15., 30., 70., float('inf')])\n assert h.num_bins(5, 110) == 5", "def _get_optimal_threshold(arr, num_bins=1001, num_quantized_bins=255):\n if not isinstance(arr, np.ndarray):\n raise TypeError('get_optimal_threshold only supports input type of np.ndarray,'\n ' while received type=%s' % (str(type(arr))))\n min_val = np.min(arr)\n max_val = np.max(arr)\n th = max(abs(min_val), abs(max_val))\n\n hist, hist_edges = np.histogram(arr, bins=num_bins, range=(-th, th))\n zero_bin_idx = num_bins // 2\n num_half_quantized_bins = num_quantized_bins // 2\n assert np.allclose(hist_edges[zero_bin_idx] + hist_edges[zero_bin_idx + 1],\n 0, rtol=1e-5, atol=1e-7)\n\n thresholds = np.zeros(num_bins // 2 + 1 - num_quantized_bins // 2)\n divergence = np.zeros_like(thresholds)\n quantized_bins = np.zeros(num_quantized_bins, dtype=np.int32)\n # i means the number of bins on half axis excluding the zero bin.\n for i in range(num_quantized_bins // 2,\n num_bins // 2 + 1):\n p_bin_idx_start = zero_bin_idx - i\n p_bin_idx_stop = zero_bin_idx + i + 1\n thresholds[i - num_half_quantized_bins] = hist_edges[p_bin_idx_stop]\n sliced_nd_hist = hist[p_bin_idx_start:p_bin_idx_stop]\n\n # generate reference distribution p\n p = sliced_nd_hist.copy()\n assert p.size % 2 == 1\n assert p.size >= num_quantized_bins\n # put left outlier count in p[0]\n left_outlier_count = np.sum(hist[0:p_bin_idx_start])\n p[0] += left_outlier_count\n # put right outlier count in p[-1]\n right_outlier_count = np.sum(hist[p_bin_idx_stop:])\n p[-1] += right_outlier_count\n # is_nonzeros[k] indicates whether hist[k] is nonzero\n is_nonzeros = (sliced_nd_hist != 0).astype(np.int32)\n\n # calculate how many bins should be merged to generate quantized distribution q\n num_merged_bins = p.size // num_quantized_bins\n # merge hist into num_quantized_bins bins\n for j in range(num_quantized_bins):\n start = j * num_merged_bins\n stop = start + num_merged_bins\n quantized_bins[j] = sliced_nd_hist[start:stop].sum()\n quantized_bins[-1] += sliced_nd_hist[num_quantized_bins * num_merged_bins:].sum()\n # expand quantized_bins into p.size bins\n q = np.zeros(p.size, dtype=np.float32)\n for j in range(num_quantized_bins):\n start = j * num_merged_bins\n if j == num_quantized_bins - 1:\n stop = -1\n else:\n stop = start + num_merged_bins\n norm = is_nonzeros[start:stop].sum()\n if norm != 0:\n q[start:stop] = float(quantized_bins[j]) / float(norm)\n q[sliced_nd_hist == 0] = 0\n p = _smooth_distribution(p)\n # There is a chance that q is an invalid probability distribution.\n try:\n q = _smooth_distribution(q)\n except ValueError:\n divergence[i - num_half_quantized_bins] = float(\"inf\")\n else:\n divergence[i - num_half_quantized_bins] = stats.entropy(p, q)\n quantized_bins[:] = 0\n\n min_divergence_idx = np.argmin(divergence)\n min_divergence = divergence[min_divergence_idx]\n opt_th = thresholds[min_divergence_idx]\n return min_val, max_val, min_divergence, opt_th", "def bins_match (a, b):\n return np.sum ((a.bins - b.bins)**2) == 0", "def __call__(self, n_bins, segment, elements):\n\n # n_bins\n assert type(n_bins) is int\n assert n_bins > 0\n\n # segment\n assert type(segment) is list or type(segment) is tuple\n assert len(segment) == 2\n assert np.isscalar(segment[0]) and np.isscalar(segment[1])\n assert segment[0] < segment[1]\n\n # elements\n assert type(elements) is np.ndarray, f\"elements should be an np.ndarray, instead of {type(elements)}\"\n assert elements.dtype == np.number\n\n sorted_elements = np.sort(elements)\n\n bin_card = int(floor(elements.shape[0]/n_bins))\n\n bin_boundaries = [segment[0]]\n\n for i in range(1, n_bins):\n boundary_l = sorted_elements[i*bin_card - 1]\n boundary_r = sorted_elements[i * bin_card]\n boundary = (boundary_l+boundary_r)/2\n\n bin_boundaries.append(boundary)\n\n bin_boundaries.append(segment[1])\n\n return np.array(bin_boundaries)", "def __init__(self, x, bin_edges, Nsamp):\n raw_vals, bin_edges = np.histogram(x, bins=bin_edges, normed=False)\n self.bin_edges = bin_edges\n self.bin_widths = np.diff(self.bin_edges)\n self.bin_centers = 0.5*(self.bin_edges[:-1] + self.bin_edges[1:])\n \n P, low, high = np.array([BinomialErrors(v, Nsamp) for v in raw_vals]).T\n self.raw_vals = P\n self.raw_low = low\n self.raw_high = high\n self.complete_vals = None\n self.malm_vals = None\n return", "def binarize(adata, copy=False):\n threshold, upper, lower = 1.0, 1.0, 0.0\n admatrix = adata.X\n admatrix = np.where(admatrix>threshold, upper, lower)\n if copy:\n adata2 = adata.copy()\n adata2.X = admatrix\n return(adata2)\n else:\n adata.X = admatrix", "def amp_bin(raw, depth, low, high):\n\n max_in_depth = 2 ** depth\n bin_range = np.linspace(low, high, max_in_depth)\n data = []\n for b in raw:\n i = 0\n while i <= (max_in_depth - 2):\n if (bin_range[i] <= b < bin_range[i+1]):\n data.append(i)\n i += 1\n break\n elif (b <= low):\n data.append(0)\n break\n elif (b >= high):\n data.append(max_in_depth - 1)\n break\n else:\n i += 1\n return np.array(data)", "def test_num_bins(self):\n with Pandas() as pd:\n if pd is None:\n return\n with Numpy() as np: # noqa\n if numpy is None:\n return\n sys.stderr.write(\"\\n\")\n\n df1 = pd.DataFrame({'A': [0, 2, 4, 5, 7, 9, 11, 13, 13, 15]})\n df2 = pd.DataFrame({'A': [2, 4, 4, 6, 8, 7, 10, 14, 17, 19]})\n\n # building 1d-, 2d-, and 3d-histogram (iteratively)\n hist2 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist3 = hg.SparselyBin(origin=0.0, binWidth=1.0, quantity=unit('A'))\n hist4 = hg.Bin(num=20, low=0.0, high=20., quantity=unit('A'))\n hist5 = hg.Bin(num=20, low=0.0, high=20., quantity=unit('A'))\n hist6 = hg.Bin(num=201, low=0.0, high=1.005)\n\n # fill them\n hist2.fill.numpy(df1)\n hist3.fill.numpy(df2)\n hist4.fill.numpy(df1)\n hist5.fill.numpy(df2)\n\n assert hist2.num_bins() == 16\n assert hist3.num_bins() == 18\n assert hist4.num_bins() == 20\n assert hist5.num_bins() == 20\n assert hist6.num_bins() == 201\n\n assert hist2.num_bins(low=10, high=25) == 15\n assert hist3.num_bins(low=10, high=25) == 15\n assert hist4.num_bins(low=10, high=25) == 10\n assert hist5.num_bins(low=10, high=25) == 10\n assert hist6.num_bins(low=0.2089, high=0.9333) == 146\n\n assert hist2.num_bins(low=-10, high=28) == 38\n assert hist3.num_bins(low=-10, high=28) == 38\n assert hist4.num_bins(low=-10, high=28) == 20\n assert hist5.num_bins(low=-10, high=28) == 20\n assert hist6.num_bins(low=0.205, high=0.935) == 146", "def __etaBin(self,eta):\n if len(self._etabins)>0:\n return reduce(lambda x,y:x+y,map(lambda x:abs(eta)>x,self._etabins))\n else:\n return 0", "def test08(self):\n a = np.arange(1, 11)\n b = bcolz.carray(a)\n ul = [v for v in a if v <= 5]\n u = b.where(a <= 5)\n wl = [v for v in a if v <= 6]\n w = b.where(a <= 6)\n self.assertEqual(ul, list(u))\n self.assertEqual(wl, list(w))", "def autobin_stats(x,y,n_bins=8,stat='average',n_points=None):\n \n if not ascend(x):\n ix=argsort(x)\n x=take(x,ix)\n y=take(y,ix)\n n=len(x)\n if n_points==None: \n #This throws out some points\n n_points=n/n_bins\n else: \n n_bins=n/n_points\n #if there are more that 2 points in the last bin, add another bin\n if n%n_points>2: n_bins=n_bins+1\n \n if n_points<=1:\n print('Only 1 or less points per bin, output will be sorted input vector with rms==y')\n return x,y\n xb,yb=[],[]\n \n #print 'stat', stat\n if stat=='average' or stat=='mean': func=mean\n elif stat=='median': func=median\n elif stat=='rms' or stat=='std' : func=std\n elif stat=='std_robust' or stat=='rms_robust': func=std_robust\n elif stat=='mean_robust': func=mean_robust\n elif stat=='median_robust': func=median_robust\n elif stat=='p2p': func=p2p # --DC\n elif stat=='min': func=min # --DC\n elif stat=='max': func=max # --DC\n \n for i in range(n_bins):\n xb.append(mean(x[i*n_points:(i+1)*n_points]))\n if func==std and n_points==2:\n print('n_points==2; too few points to determine rms')\n print('Returning abs(y1-y2)/2. in each bin as rms')\n yb.append(abs(y[i*n_points]-y[i*n_points+1])/2.)\n else:\n yb.append(func(y[i*n_points:(i+1)*n_points]))\n if i>2 and xb[-1]==xb[-2]: \n yb[-2]=(yb[-2]+yb[-1])/2.\n xb=xb[:-1]\n yb=yb[:-1]\n return array(xb),array(yb)", "def affect(self, bin_boundaries, element):\n\n # bin_boundaries\n assert type(bin_boundaries) is np.ndarray\n\n # element\n assert isinstance(element, (int, float, np.number)), \\\n \"element = {} should be of a numeric type, not {}.\".format(element, type(element))\n assert bin_boundaries[0] <= element <= bin_boundaries[-1]\n\n n_bins = len(bin_boundaries) - 1\n m = floor(element * n_bins) if floor(element * n_bins) < n_bins else n_bins - 1\n\n return m", "def __init__(self, bins):\n self.bins = bins", "def test_hgbat(self):\n fun = get_problem('hgbat', self.dimension, -100, 100)\n self.assertAlmostEqual(fun(self.array10), 61.91502622129181, delta=60)", "def compute_acc_bin(conf_thresh_lower, conf_thresh_upper, conf, pred, true):\n filtered_tuples = [x for x in zip(pred, true, conf) if x[2] > conf_thresh_lower and x[2] <= conf_thresh_upper]\n if len(filtered_tuples) < 1:\n return 0,0,0\n else:\n correct = len([x for x in filtered_tuples if x[0] == x[1]]) # How many correct labels\n len_bin = len(filtered_tuples) # How many elements falls into given bin\n avg_conf = sum([x[2] for x in filtered_tuples]) / len_bin # Avg confidence of BIN\n accuracy = float(correct)/len_bin # accuracy of BIN\n return accuracy, avg_conf, len_bin", "def compute_acc_bin(conf_thresh_lower, conf_thresh_upper, conf, pred, true):\n filtered_tuples = [x for x in zip(pred, true, conf) if x[2] > conf_thresh_lower and x[2] <= conf_thresh_upper]\n if len(filtered_tuples) < 1:\n return 0,0,0\n else:\n correct = len([x for x in filtered_tuples if x[0] == x[1]]) # How many correct labels\n len_bin = len(filtered_tuples) # How many elements falls into given bin\n avg_conf = sum([x[2] for x in filtered_tuples]) / len_bin # Avg confidence of BIN\n accuracy = float(correct)/len_bin # accuracy of BIN\n return accuracy, avg_conf, len_bin", "def II_eq_counts(tobin_series, num_bins):\n num_pbin = int(len(tobin_series) / num_bins)\n obs_list = tobin_series.sort_values().tolist()\n upper_bounds = [obs_list[(i + 1) * num_pbin] for i in range(num_bins)]\n lower_bounds = [0]\n lower_bounds += upper_bounds[:-1]\n return pd.IntervalIndex.from_arrays(lower_bounds, upper_bounds), upper_bounds", "def bin_data(bins, data2bin, bindata, mode='mean', nbinned=False):\n assert mode in ['mean', 'median', 'std', 'max', 'min'], \"mode not recognized: {}\".format(mode)\n digitized = np.digitize(bindata, bins)\n binned = np.zeros(len(bins)) * np.nan\n if nbinned: \n numbinned = np.zeros(len(bins))\n\n if mode == 'mean':\n for i, _ in enumerate(bins):\n binned[i] = np.nanmean(data2bin[np.logical_and(np.isfinite(bindata), digitized == i+1)])\n if nbinned:\n numbinned[i] = np.count_nonzero(np.logical_and(np.isfinite(data2bin), digitized == i+1))\n elif mode == 'median':\n for i, _ in enumerate(bins):\n binned[i] = np.nanmedian(data2bin[np.logical_and(np.isfinite(bindata), digitized == i+1)])\n if nbinned:\n numbinned[i] = np.count_nonzero(np.logical_and(np.isfinite(data2bin), digitized == i+1))\n elif mode == 'std':\n for i, _ in enumerate(bins):\n binned[i] = np.nanstd(data2bin[np.logical_and(np.isfinite(bindata), digitized == i+1)])\n if nbinned:\n numbinned[i] = np.count_nonzero(np.logical_and(np.isfinite(data2bin), digitized == i+1))\n elif mode == 'max':\n for i, _ in enumerate(bins):\n binned[i] = np.nanmax(data2bin[np.logical_and(np.isfinite(bindata), digitized == i+1)])\n if nbinned:\n numbinned[i] = np.count_nonzero(np.logical_and(np.isfinite(data2bin), digitized == i+1))\n elif mode == 'min':\n for i, _ in enumerate(bins):\n binned[i] = np.nanmin(data2bin[np.logical_and(np.isfinite(bindata), digitized == i+1)])\n if nbinned:\n numbinned[i] = np.count_nonzero(np.logical_and(np.isfinite(data2bin), digitized == i+1))\n else:\n raise ValueError('mode must be mean, median, std, max, or min')\n \n if nbinned:\n return np.array(binned), np.array(numbinned)\n else:\n return np.array(binned)", "def binarize(X, *, threshold=..., copy=...):\n ...", "def process_pain(x, lb, ub):\n x = x.abs()\n x.loc[(x > ub)] = 8\n x.loc[(x < lb) | (x > ub)] = np.nan\n return x", "def calculate_bin_edges(n_bins, geo):\n #Gefittete offsets: x,y,factor: factor*(x+x_off)\n #[6.19, 0.064, 1.0128]\n \n #print \"Reading detector geometry in order to calculate the detector dimensions from file \" + fname_geo_limits\n #geo = np.loadtxt(fname_geo_limits)\n\n # derive maximum and minimum x,y,z coordinates of the geometry input [[first_OM_id, xmin, ymin, zmin], [last_OM_id, xmax, ymax, zmax]]\n geo_limits = np.nanmin(geo, axis = 0), np.nanmax(geo, axis = 0)\n #print ('Detector dimensions [[first_OM_id, xmin, ymin, zmin], [last_OM_id, xmax, ymax, zmax]]: ' + str(geo_limits))\n\n x_bin_edges = np.linspace(geo_limits[0][1] - 9.95, geo_limits[1][1] + 9.95, num=n_bins[0] + 1) #try to get the lines in the bin center 9.95*2 = average x-separation of two lines\n y_bin_edges = np.linspace(geo_limits[0][2] - 9.75, geo_limits[1][2] + 9.75, num=n_bins[1] + 1) # Delta y = 19.483\n z_bin_edges = np.linspace(geo_limits[0][3] - 4.665, geo_limits[1][3] + 4.665, num=n_bins[2] + 1) # Delta z = 9.329\n\n #offset_x, offset_y, scale = [6.19, 0.064, 1.0128]\n #x_bin_edges = (x_bin_edges + offset_x )*scale\n #y_bin_edges = (y_bin_edges + offset_y )*scale\n\n #calculate_bin_edges_test(geo, y_bin_edges, z_bin_edges) # test disabled by default. Activate it, if you change the offsets in x/y/z-bin-edges\n\n return x_bin_edges, y_bin_edges, z_bin_edges", "def _loop_over_entries(x_bins, y_bins, used_eff, n_out_bins):\n sys.stdout.write('finding eff at rejection... ')\n sys.stdout.flush()\n\n valid_x = (x_bins >= 0) & (x_bins < n_out_bins) \n valid_y = (y_bins >= 0) & (y_bins < n_out_bins)\n\n valid_indices = np.flatnonzero(valid_x & valid_y)\n\n x_bins = x_bins[valid_indices]\n y_bins = y_bins[valid_indices]\n used_eff = used_eff[valid_indices]\n\n eff_array = np.ones((n_out_bins,n_out_bins)) * -1\n for x_bin, y_bin, z in zip(x_bins, y_bins, used_eff): \n # y_bin comes first because that's what imshow wants... \n eff_array[y_bin,x_bin] = max(z, eff_array[y_bin,x_bin])\n\n sys.stdout.write('done\\n')\n return eff_array", "def __call__(self, n_bins, segment, elements):\n\n # n_bins\n assert type(n_bins) is int\n assert n_bins > 0\n\n # segment\n assert type(segment) is list or type(segment) is tuple\n assert len(segment) == 2\n assert np.isscalar(segment[0]) and np.isscalar(segment[1])\n assert segment[0] < segment[1]\n\n # elements\n assert type(elements) is np.ndarray, f\"elements should be an np.ndarray, instead of {type(elements)}\"\n assert elements.dtype == np.number\n\n return np.array([segment[0] + i / n_bins * (segment[1] - segment[0])\n for i in range(n_bins)]\n + [float(segment[1])])", "def bin_data(y, num_bins, std_away):\n mean = np.mean(y)\n std = np.std(y)\n pitch_shifts = np.arange(-num_bins, num_bins + 1)\n thresholds = (std * std_away) * pitch_shifts + mean\n\n result = []\n for point in y:\n if point < thresholds[0]:\n result.append(pitch_shifts[0] - 1)\n elif point > thresholds[-1]:\n result.append(pitch_shifts[-1] + 1)\n else:\n for i in range(len(thresholds) - 1):\n if point >= thresholds[i] and point < thresholds[i + 1]:\n result.append(i - num_bins)\n return np.array(result)", "def _determine_histogram_bins(self, ma_maps):\n if isinstance(ma_maps, list):\n ma_values = self.masker.transform(ma_maps)\n elif isinstance(ma_maps, np.ndarray):\n ma_values = ma_maps.copy()\n else:\n raise ValueError(f\"Unsupported data type '{type(ma_maps)}'\")\n\n # Determine bins for null distribution histogram\n # Remember that numpy histogram bins are bin edges, not centers\n # Assuming values of 0, .001, .002, etc., bins are -.0005-.0005, .0005-.0015, etc.\n INV_STEP_SIZE = 100000\n step_size = 1 / INV_STEP_SIZE\n max_ma_values = np.max(ma_values, axis=1)\n # round up based on resolution\n max_ma_values = np.ceil(max_ma_values * INV_STEP_SIZE) / INV_STEP_SIZE\n max_poss_ale = self.compute_summarystat(max_ma_values)\n # create bin centers\n hist_bins = np.round(np.arange(0, max_poss_ale + (1.5 * step_size), step_size), 5)\n self.null_distributions_[\"histogram_bins\"] = hist_bins", "def _bin(self, X):\n H = np.linspace(0, 1, self.Nbin)\n return np.maximum(1 - (abs(X[..., None] - H)) / (H[1] - H[0]) , 0)", "def mu_law_bins(num_bins):\n #all edges\n bins_edge = np.linspace(-1, 1, num_bins + 1)\n #center of all edges\n bins_center = np.linspace(-1 + 1.0 / num_bins, 1 - 1.0 / num_bins, num_bins)\n #get the right edges\n bins_trunc = bins_edge[1:]\n #if sample >= right edges, it might be assigned to the next bin, add 0.1 to avoid this\n bins_trunc[-1] += 0.1\n #convert edges and centers to mu-law scale\n bins_edge_mu = np.multiply(np.sign(bins_trunc), (num_bins ** np.absolute(bins_trunc) - 1) / (num_bins - 1))\n bins_center_mu = np.multiply(np.sign(bins_center), (num_bins ** np.absolute(bins_center) - 1) / (num_bins - 1))\n \n return (bins_edge_mu, bins_center_mu)", "def test_bin_larvaemutattion():\n \n larvae = np.array([[0, 0, 0, 0, 0, 0, 0, 0],\n [1, 1, 1, 1, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 1, 1, 1]])\n \n pos = np.array([[0, 3, 5]])\n mode = 'bin'\n \n larvaemutation_function = get_larvaemutation_function(mode)\n larvaemutated = larvaemutation_function(larvae, pos, seed=13)\n \n goodsol = np.array([[1, 0, 0, 0, 0, 0, 0, 0],\n [1, 1, 1, 0, 0, 0, 0, 0],\n [0, 0, 0, 0, 1, 0, 1, 1]])\n\n np.testing.assert_almost_equal(larvaemutated, goodsol)", "def modelOnBetaGrid(sample,bins,N,l,u):\r\n\r\n betaGrid=np.linspace(l,u,N)\r\n traces=[]\r\n WAIC=dict()\r\n index=0\r\n\r\n for beta in betaGrid:\r\n trace=intensityLogGauss(sample,bins,beta)\r\n traces.append(trace['intensity'])\r\n WAIC[index]=trace\r\n index+=1\r\n\r\n df=pm.compare(WAIC,ic='WAIC')\r\n\r\n return betaGrid,df,traces", "def ANN_binned_tagged_jets_hist(datalist, model, discriminant_cuts, CSV_cuts, bins, nbins, mode=\"pT_jet\",Save=False,addFeature=False):\n title = \"binned_tagged_jets_vs_\"+mode\n\tdiscriminant = \"ANN\"\n AllJetsHistlist = []\n CSVHistlist = []\n DiscriminantHistlist = []\n if mode == \"pT_hadron\":\n feature = 2\n elif mode == \"pT_jet\":\n feature = 3\n elif mode == \"decay_vx\":\n feature = 4\n for n,data in enumerate(datalist):\n\t\tdatatitle = data[3]\n print \"working on\",datatitle\n ran = data[4]\n\t\tCSV = data[2]\n\t\tpT = data[1]\n\t\tx_data = data[0]\n AllJetsHistlist.append(rt.TH1D(datatitle+\"_AllJets\",datatitle+\"_\"+title,nbins,ran[0],ran[1]))\n AllJetsHistlist[n].SetLineColor(4)\n CSVHistlist.append(rt.TH1D(datatitle+\"_CSV\",datatitle+\"_\"+title,nbins,ran[0],ran[1]))\n CSVHistlist[n].SetLineColor(3)\n DiscriminantHistlist.append(rt.TH1D(datatitle+\"_Discriminant\",datatitle+\"_\"+title,nbins,ran[0],ran[1]))\n DiscriminantHistlist[n].SetLineColor(2)\n\t\n\t\tif addFeature == False:\n\t\t\tpred_y = model.predict(ANN_functional_shape(x_data))\n\t\telif addFeature == \"pT\":\n\t\t\tpred_y = model.predict(ANN_functional_shape(x_data)+[pT/200])\n\t\telif addFeature == \"PV\":\n\t\t\tassert x_data.shape[1] == 21, \"wrong x_data format: PV cannot be found\"\n\t\t\tpred_y = model.predict(ANN_functional_shape(x_data)+[x_data[:,-1]/10.])\n\t\telse:\n\t\t\tprint \"invalid feature input\"\n\t\t\treturn None\n\t\tbin_numbers = ANN_bin_selection(pT,bins)\n\n\t for i,pT_value in enumerate(pT):\n\t if bin_numbers[i] == -100: continue\n\t\t\tAllJetsHistlist[n].Fill(pT_value)\n\t if pred_y[i] >= discriminant_cuts[bin_numbers[i]]: DiscriminantHistlist[n].Fill(pT_value)\n\t if CSV[i] >= CSV_cuts[bin_numbers[i]]: CSVHistlist[n].Fill(pT_value)\n\n canvaslist = []\n legendlist = []\n Tfilelist = []\n for n,data in enumerate(datalist):\n\t\tdatatitle = data[3]\n canvaslist.append(rt.TCanvas(datatitle+\"_canvas\",\"canvas\",600,600))\n canvaslist[n].SetTitle(datatitle+\"_\"+title)\n rt.gStyle.SetOptStat(0)\n legendlist.append(rt.TLegend(0.9,0.9,0.65,0.75))\n legendlist[n].AddEntry(AllJetsHistlist[n], \"All jets\")\n legendlist[n].AddEntry(CSVHistlist[n], \"CSV\")\n legendlist[n].AddEntry(DiscriminantHistlist[n], discriminant)\n AllJetsHistlist[n].GetXaxis().SetTitle(mode)\n AllJetsHistlist[n].GetYaxis().SetTitle('# jets')\n AllJetsHistlist[n].GetYaxis().SetTitleOffset(1.5)\n #AllJetsHistlist[n].Draw()\n #CSVHistlist[n].Draw(\"SAME\")\n #DiscriminantHistlist[n].Draw(\"SAME\")\n #legendlist[n].Draw()\n if Save:\n #canvaslist[n].SaveAs(title+\"_\"+datatitle+discriminant+\".png\")\n Tfilelist.append(rt.TFile(\"Thesis_Plots/root_files/\"+title+\"_\"+datatitle+discriminant+\".root\",\"recreate\"))\n print \"saved histogram as Thesis_Plots/root_files/\"+title+\"_\"+datatitle+discriminant+\".root\"\n AllJetsHistlist[n].Write()\n CSVHistlist[n].Write()\n DiscriminantHistlist[n].Write()", "def bin_definition(n_bins_gammaness, n_bins_theta2):\n max_gam = 0.9\n max_th2 = 0.05 * u.deg * u.deg\n min_th2 = 0.005 * u.deg * u.deg\n\n gammaness_bins = np.linspace(0, max_gam, n_bins_gammaness)\n theta2_bins = np.linspace(min_th2, max_th2, n_bins_theta2)\n\n return gammaness_bins, theta2_bins", "def __init__(self, bin_edges, **kwargs):\n super().__init__(**kwargs)\n self.bin_edges = bin_edges\n self.num_bins = bin_edges.size + 1\n self.sigma = Parameter(1.0, transform=positive())", "def test_bin_qual_scores(self):\r\n\r\n qual_data = {'seq1': [10, 20, 30, 40], 'seq2': [11, 21, 31],\r\n 'seq3': [12, 22]}\r\n\r\n expected_bins = [[10, 11, 12], [20, 21, 22], [30, 31], [40]]\r\n\r\n actual_bins = bin_qual_scores(qual_data)\r\n\r\n # Because of arbritrary dictionary order, need to sort results\r\n for bin in range(len(actual_bins)):\r\n actual_bins[bin].sort()\r\n\r\n self.assertEqual(actual_bins, expected_bins)", "def sat_in_range(arr, args):\n # find number of saturated pixels, args[0] is the pixel saturation limit\n sat_pixels = (arr > args[0]).sum()\n # args[1] is a limit of saturated pixels\n res = sat_pixels < args[1]\n result = ct.Result(res, 'saturation_in_range')\n return result", "def can_binarize(a, r, phrase_index):\n if r.arity() <= 2:\n return 1\n if r.arity() > 3:\n raise ValueError(\"4-ary rules and above not supported yet\")\n\n fvars = [x for x in r.fpos if type(x) is tuple]\n for (fi,fj) in phrase_index:\n if fi <= fvars[0][0] and fvars[1][1] <= fj <= fvars[2][0]:\n return 1\n if fvars[0][1] <= fi <= fvars[1][0] and fvars[2][1] <= fj:\n return 1\n\n return 0", "def bin_discretize(self, variables=[], bins=3,\n min_const_samples_bin_size=1.0/3):\n self.edges=np.zeros((self.arity.size,bins+1))\n for i in variables:\n un_cnt=np.unique(self.data[:,i],return_counts=True)\n constvals=un_cnt[0][un_cnt[1]>self.data.shape[0]*min_const_samples_bin_size]\n mask=np.ones(self.data.shape[0],dtype=bool)\n if constvals.size>0:\n for j,cv in enumerate(constvals):\n mask*=(self.data[:,i]!=cv)\n self.data[self.data[:,i]==cv,i]=j\n\n size=np.sum(mask)/bins\n sorted_i=np.argsort(self.data[mask,i])\n edges=[self.data[mask,i][sorted_i[int(size*num)-1]] for num in range(1,bins)]\n self.edges[i]=[self.data[mask,i][sorted_i[0]]]+edges+[self.data[mask,i][sorted_i[-1]]]\n self.data[mask,i]=np.searchsorted(edges,self.data[mask,i])+constvals.size\n self.arity[i]=len(edges)+1+constvals.size", "def test_bin_right(self):\n list_val=[0,1,2,3,4,5,6]\n low=0\n high= len(list_val)-1\n target=1\n self.assertEqual(bin_search(1,0,len(list_val)-1, list_val),1)", "def __len__(self):\n return 9 # logsfr_ratios has 6 bins", "def test_run_a_scan_on_sdp_subarray_in_low():", "def computeAC(bins):\n halfn = len(bins)/2\n Npos = sum(bins[halfn:])\n Nneg = sum(bins[:halfn])\n return float(Npos-Nneg)/(Npos+Nneg)", "def freedman_diaconis_bins(self, arr):\n # From https://stats.stackexchange.com/questions/798/\n if len(arr) < 2:\n return 1\n # Calculate the iqr ranges.\n self.iqr(arr)\n # Calculate the h\n h = 2 * (self.q3 - self.q1) / (len(arr) ** (1 / 3))\n # fall back to sqrt(a) bins if iqr is 0\n if h == 0:\n return int(np.sqrt(arr.size))\n else:\n return int(np.ceil((arr.max() - arr.min()) / h))", "def test_bin_data_type(self):\n\n with pytest.raises(TypeError) as verr:\n avg.median2D(self.testInst, ['1', 'a', '23', '10'], 'longitude',\n ['0', 'd', '24', 'c'], 'mlt',\n ['dummy1', 'dummy2', 'dummy3'], auto_bin=False)\n\n estr = \"Cannot cast array data from\"\n assert str(verr).find(estr) >= 0\n\n return", "def SetPRBinConstraint(self, model ) :\n tot = np.multiply(self.wish, self.dispo)\n for val in tot :\n if not val : continue\n if self.bound>0 : model += val <= self.valBound\n elif self.bound<0 : model += val >= self.valBound", "def correct_ann_outliers(obj_tmp, ann_width, sig, med_neig, std_neig, cy, cx, \n min_thr, max_thr, rand_arr, stddev, half_res_y=False): \n \n if True:#no_numba: \n def _correct_ann_outliers(obj_tmp, ann_width, sig, med_neig, std_neig, \n cy, cx, min_thr, max_thr, rand_arr, stddev, \n half_res_y=False): \n n_y, n_x = obj_tmp.shape\n rand_arr = 2*(np.random.rand(n_y, n_x)-0.5)\n obj_tmp_corr = obj_tmp.copy()\n bpix_map = np.zeros([n_y,n_x])\n for yy in range(n_y):\n for xx in range(n_x):\n if half_res_y:\n rad = np.sqrt((2*(cy-yy))**2+(cx-xx)**2)\n else:\n rad = np.sqrt((cy-yy)**2+(cx-xx)**2)\n rr = int(rad/ann_width)\n dev = max(stddev,min(std_neig[rr],med_neig[rr]))\n \n # check min_thr\n if obj_tmp[yy,xx] < min_thr:\n bpix_map[yy,xx] = 1\n obj_tmp_corr[yy,xx] = med_neig[rr] + \\\n np.sqrt(np.abs(med_neig[rr]))*rand_arr[yy,xx]\n \n # check max_thr\n elif obj_tmp[yy,xx] > max_thr:\n bpix_map[yy,xx] = 1\n obj_tmp_corr[yy,xx] = med_neig[rr] + \\\n np.sqrt(np.abs(med_neig[rr]))*rand_arr[yy,xx]\n \n elif (obj_tmp[yy,xx] < med_neig[rr]-sig*dev or \n obj_tmp[yy,xx] > med_neig[rr]+sig*dev):\n bpix_map[yy,xx] = 1\n obj_tmp_corr[yy,xx] = med_neig[rr] + \\\n np.sqrt(np.abs(med_neig[rr]))*rand_arr[yy,xx]\n return obj_tmp_corr, bpix_map\n else:\n @njit \n def _correct_ann_outliers(obj_tmp, ann_width, sig, med_neig, std_neig, \n cy, cx, min_thr, max_thr, rand_arr, stddev, \n half_res_y=False): \n n_y, n_x = obj_tmp.shape\n rand_arr = 2*(np.random.rand(n_y, n_x)-0.5)\n obj_tmp_corr = obj_tmp.copy()\n bpix_map = np.zeros([n_y,n_x])\n for yy in range(n_y):\n for xx in range(n_x):\n if half_res_y:\n rad = np.sqrt((2*(cy-yy))**2+(cx-xx)**2)\n else:\n rad = np.sqrt((cy-yy)**2+(cx-xx)**2)\n rr = int(rad/ann_width)\n dev = max(stddev,min(std_neig[rr],med_neig[rr]))\n \n # check min_thr\n if obj_tmp[yy,xx] < min_thr:\n bpix_map[yy,xx] = 1\n obj_tmp_corr[yy,xx] = med_neig[rr] + \\\n np.sqrt(np.abs(med_neig[rr]))*rand_arr[yy,xx]\n \n # check max_thr\n elif obj_tmp[yy,xx] > max_thr:\n bpix_map[yy,xx] = 1\n obj_tmp_corr[yy,xx] = med_neig[rr] + \\\n np.sqrt(np.abs(med_neig[rr]))*rand_arr[yy,xx]\n \n elif (obj_tmp[yy,xx] < med_neig[rr]-sig*dev or \n obj_tmp[yy,xx] > med_neig[rr]+sig*dev):\n bpix_map[yy,xx] = 1\n obj_tmp_corr[yy,xx] = med_neig[rr] + \\\n np.sqrt(np.abs(med_neig[rr]))*rand_arr[yy,xx]\n return obj_tmp_corr, bpix_map\n \n return _correct_ann_outliers(obj_tmp, ann_width, sig, med_neig, std_neig, \n cy, cx, min_thr, max_thr, rand_arr, stddev, \n half_res_y=False)", "def test_avalanche_warning_by_region_obs(self):\n pass", "def eqf_binning(t, n_bins):\n t_bins= []\n t= sorted(t)\n n_items= int(len(t)/n_bins)\n\n for i in range(1, n_bins):\n t_bins.append(t[int(i*n_items)])\n t_bins.append(np.max(t) + 0.01)\n t_binning= np.digitize(t, t_bins)\n return t_binning", "def othersn(ax):", "def get_individual_manipulated_feature_centered(record,sensor,bins=100):\r\n \r\n #accesses the record's motion sensor\r\n ana=Analysis()\r\n ana.processRecord(record) \r\n motion=MotionProfileV2.extract(record)\r\n m = motion.vpsInDistance.toArray(sensor)\r\n \r\n #initializes variables\r\n my_range = np.linspace(-1,25,bins)\r\n d = np.zeros((len(my_range),1))\r\n prev=0\r\n index=0\r\n \r\n #iterates through the linspace vector\r\n for i in range(0,len(my_range)): \r\n cp=np.zeros((len(m),2))\r\n count=0\r\n \r\n #makes a copy of the values that fall within the given bin\r\n for j in range(0,len(m)):\r\n if m[j][0]+ ((25-record.motion.vehicleLength)/2-m[0][0]) >= my_range[i] and m[j][0]+ ((25-record.motion.vehicleLength)/2-m[0][0]) <= my_range[i+1]:\r\n cp[count][0]=m[j][0] + ((25-record.motion.vehicleLength)/2-m[0][0])\r\n cp[count][1]=m[j][1]\r\n count+=1\r\n\r\n #if there ARE changes within the bin (sensor switches from 0 or 1)\r\n if cp[0][0] != 0:\r\n \r\n #if there is ONLY ONE switch within the bin\r\n if cp[1][0] == 0:\r\n \r\n #if the sensor switches from 1 to 0\r\n if prev == 1:\r\n #finds the area\r\n d[index] = 1 - ((my_range[i+1] - cp[0][0])/(my_range[i+1]-my_range[i]))\r\n #increments the index and updates 'prev' accordingly\r\n index+=1\r\n prev=cp[0][1]\r\n \r\n #if the sensor switches from 0 to 1 \r\n else:\r\n #finds the are\r\n d[index] = ((my_range[i+1] - cp[0][0])/(my_range[i+1]-my_range[i]))\r\n #increments the index and updates 'prev' accordingly\r\n index+=1\r\n prev=cp[0][1]\r\n \r\n #if there are MORE than one switch within the bin \r\n else:\r\n value=0 \r\n #if the sensor switches from 1 to 0 then back any number of times\r\n if cp[0][1] == 1:\r\n #iterates through the copied matrix\r\n for j in range(0,len(cp),2):\r\n \r\n #finds the cumulative area\r\n if j+1<len(cp):\r\n if cp[j+1][0] == 0 and cp[j][0] != 0:\r\n value += my_range[i+1]-cp[j][0]\r\n prev=cp[j][1]\r\n else:\r\n value += cp[j+1][0] - cp[j][0]\r\n \r\n #adds the total area within the bin to the vector \r\n d[index] = value/(my_range[i+1]-my_range[i])\r\n index+=1\r\n \r\n #if the sensor switches from 0 to 1 then back any number of times \r\n else: \r\n #iterates through the copied matrix\r\n for j in range(0,len(cp),2):\r\n \r\n #finds the cumulative area\r\n if j+1<len(cp):\r\n if j == 0:\r\n value += cp[j][0] - my_range[i]\r\n prev=cp[j][1]\r\n elif cp[j][0] == 0 and cp[j-1][0] != 0:\r\n value += my_range[i+1]-cp[j-1][0]\r\n prev=cp[j-1][1]\r\n else:\r\n value += cp[j][0] - cp[j-1][0]\r\n \r\n #adds the total area within the bin to the vector \r\n d[index] = value/(my_range[i+1]-my_range[i])\r\n index+=1\r\n \r\n #if there ARE NOT changes within the bin (sensor stays either 0 or 1)\r\n elif cp[0][0] == 0:\r\n \r\n #changes the 'prev' variable accordingly and increments the index \r\n if prev == 0:\r\n d[index] = 0\r\n index+=1\r\n elif prev == 1:\r\n d[index] = 1\r\n index+=1\r\n \r\n #returns the individual sensor feature vector\r\n return(d)", "def test_histogram_with_varying_number_of_bin(self):\n # this data use number of bins less than the max limit\n df1 = pd.Series([1, 2, 3, 4]).apply(str)\n profiler1 = FloatColumn(df1.name)\n profiler1.max_histogram_bin = 50\n profiler1.update(df1)\n num_bins = len(profiler1.profile['histogram']['bin_counts'])\n self.assertEqual(num_bins, 4)\n\n # this data uses large number of bins, which will be set to\n # the max limit\n df2 = pd.Series([3.195103249264023e+18, 9999995.0, 9999999.0,\n 0.0, -10 ** 10]).apply(str)\n profiler2 = FloatColumn(df2.name)\n profiler2.max_histogram_bin = 50\n profiler2.update(df2)\n num_bins = len(profiler2.profile['histogram']['bin_counts'])\n self.assertEqual(num_bins, 50)\n\n # max number of bin is increased to 10000\n profiler2 = FloatColumn(df2.name)\n profiler2.max_histogram_bin = 10000\n profiler2.update(df2)\n num_bins = len(profiler2.profile['histogram']['bin_counts'])\n self.assertEqual(num_bins, 10000)", "def check_binning(comparefn):\n @wraps(comparefn)\n def wrapper(*args, **kwargs):\n # args[0] is self, since comparefn is member of a class\n if args[2].GetNbinsX() == args[1].GetNbinsX():\n return comparefn(*args, **kwargs)\n # FIXME: this check is incomplete, bin boundaries can also\n # be different: see TH1::CheckConsistency() (cannot use\n # this though, protected member)\n else:\n warning('Histograms with different number of bins.')\n # raise ValueError('Histograms with unequal number of bins')\n return ComparisonFunction.create_error_dict()\n return wrapper", "def aggregate_behavior(Z):\n nsamp, nsen = Z.shape\n median_trace = np.median(Z, axis=1)\n dev = np.std(Z - np.repeat(np.matrix(median_trace).transpose(),\n nsen, axis=1), axis=1)\n cmpr_high_variability = [(Z[:, sen_i] > median_trace + 2 * dev\n ).sum()/nsamp > 0.5 for sen_i in range(nsen)]\n return nsamp, nsen, cmpr_high_variability, median_trace, dev", "def _allowed_bands():\n pass", "def get_mean_accuracy(all_means, nbins=10):\n ## Add a columns of bin assignments\n # bins = np.linspace(0, all_means['posterior'].max(), nbins)\n bins = np.linspace(0, 1, nbins)\n all_means['bin'] = np.digitize(all_means['posterior'], bins)\n\n ## Add upper bound to right-most bin\n all_means.replace(to_replace={'bin':{nbins: nbins-1}}, inplace=True)\n\n ## Bin ancestors by mean bootstrapped probability, adding columns for\n ## whether they were the true generating ancestor, and the number of\n ## ancestors in each bin\n bin_count = lambda x: len(x)\n binned = all_means[['generator', 'bin']].pivot_table(index='bin',\n aggfunc=[np.mean, bin_count], fill_value=0)\n binned.columns = [['observed_prob', 'bin_count']]\n binned['n_successes'] = binned['observed_prob'].values * \\\n binned['bin_count'].values\n\n ## Estimate means and confidence intervals as sampling from a binomial\n ## distribution, with a uniform prior on success rates - Done using\n ## a beta distribution\n binned['alpha'] = binned['n_successes'] + 1\n binned['beta'] = binned['bin_count'].values - binned['n_successes'].values + 1\n beta_mean = lambda row: beta.mean(float(row['alpha']), float(row['beta']))\n binned['posterior_mean'] = binned.apply(beta_mean, axis=1)\n\n ## Add confidence intercals\n beta_025CI = lambda row: beta.ppf(0.025, float(row['alpha']), float(row['beta']))\n beta_975CI = lambda row: beta.ppf(0.975, float(row['alpha']), float(row['beta']))\n binned['CI2.5'] = binned.apply(beta_025CI, axis=1)\n binned['CI97.5'] = binned.apply(beta_975CI, axis=1)\n\n ## Convert to values relative to mean, to fit plotting convention\n binned['CI2.5'] = binned['posterior_mean'].values - binned['CI2.5'].values\n binned['CI97.5'] = binned['CI97.5'].values - binned['posterior_mean'].values\n\n ## Add column with bin centre for plotting\n binned['bin_centre'] = all_means[['posterior', 'bin']].groupby('bin').mean()\n\n return binned", "def plot_completeness(cat_name,output_name,name_plot,mag_lims,binning_mag,plot,second_cat='no'):\n\n cat=ascii.read('%s.txt' % cat_name)\n mag_bins=np.arange(mag_lims[0],mag_lims[1],binning_mag)\n\n mask=cat['detected']==1\n mag_binned_tot=np.digitize(cat['MAG'],mag_bins,right=True)\n mag_binned_det=np.digitize(cat[mask]['MAG'],mag_bins,right=True)\n\n nb_mag=np.array([ len(np.where(mag_binned_tot==i)[0]) for i in range(1,len(mag_bins)) ])\n nb_mag_det = np.array([ len(np.where(mag_binned_det==i)[0]) for i in range(1,len(mag_bins)) ])\n #mag_tot= np.array([stuff_cat['MAG'][mag_binned_tot == i].mean() for i in range(1, len(mag_bins))])\n #mag_det= np.array([stuff_cat[mask]['MAG'][mag_binned_det == i].mean() for i in range(1, len(mag_bins))])\n print (nb_mag)\n print (nb_mag_det)\n\n #Write completeness result in text file\n np.savetxt('%s.txt' % output_name, list(zip(mag_bins,nb_mag,nb_mag_det)),fmt='%.2f %d %d')\n\n\n mag_bin_plot=(mag_bins[:-1]+mag_bins[1:])/2\n\n import matplotlib.pyplot as plt\n\n # the histogram of the input sources\n n, bins, patches = plt.hist(cat['MAG'], mag_bins, normed=0, facecolor='green', alpha=0.75)\n plt.xlabel('Magnitude')\n plt.ylabel('Nb of sources')\n plt.xlim([mag_bins[0],mag_bins[-1]])\n plt.savefig('results/plots/hist_sources.png')\n #plt.show()\n\n plt.clf()\n plt.plot(mag_bin_plot,nb_mag_det/nb_mag)\n plt.xlabel('Magnitude AB')\n plt.ylabel('Efficiency')\n plt.grid(True)\n plt.savefig('%s.png' % output_name)\n if plot: plt.show()\n\n\n if second_cat != 'no':\n cat2=ascii.read('%s.txt' % second_cat)\n mag_bins2=np.arange(mag_lims[0],mag_lims[1],binning_mag)\n\n mask2=cat2['detected']==1\n mag_binned_tot2=np.digitize(cat2['MAG'],mag_bins2,right=True)\n mag_binned_det2=np.digitize(cat2[mask2]['MAG'],mag_bins2,right=True)\n\n nb_mag2=np.array([ len(np.where(mag_binned_tot2==i)[0]) for i in range(1,len(mag_bins2)) ])\n nb_mag_det2 = np.array([ len(np.where(mag_binned_det2==i)[0]) for i in range(1,len(mag_bins2)) ])\n\n mag_bin_plot2=(mag_bins2[:-1]+mag_bins2[1:])/2\n #print (mag_bin_plot)\n #plt.plot(mag_bin_plot,nb_mag_det/nb_mag,label='seeing=0.7\"',color='red')\n #plt.plot(mag_bin_plot2,nb_mag_det2/nb_mag2,label='seeing=0.1\"',color='green')\n plt.plot(mag_bin_plot,nb_mag_det/nb_mag,label='5.9',color='red')\n plt.plot(mag_bin_plot2,nb_mag_det2/nb_mag2,label='5',color='green')\n plt.xlabel('Magnitude AB')\n plt.ylabel('Efficiency')\n #plt.yscale('log')\n #plt.xscale('log')\n plt.grid(True)\n plt.legend()\n plt.savefig('results/plots/completeness_comp.png')\n if plot: plt.show()", "def binning(S, bands):\n B = np.zeros((S.shape[0], len(bands)), dtype=S.dtype)\n for i, b in enumerate(bands):\n B[:, i] = np.mean(S[:, b[0] : b[1]], axis=1)\n\n return B", "def __init__(self,ptbins,etabins,data=None):\n self._ptbins = ptbins\n self._etabins = etabins\n if data is not None:\n self._data = data\n else:\n self._data = [ [ (0,0) for i in range(len(self._etabins)+1) ] for i in range(len(self._ptbins)+1) ]\n self.__check()", "def test_am_threshold(Simulator, plt, seed, rng):\n d = 64\n vocab = Vocabulary(d, pointer_gen=rng)\n vocab.populate('A; B; C; D')\n\n d2 = int(d / 2)\n vocab2 = Vocabulary(d2, pointer_gen=rng)\n vocab2.populate('A; B; C; D')\n\n def input_func(t):\n return '0.49 * A' if t < 0.1 else '0.8 * B'\n\n with spa.Network('model', seed=seed) as m:\n m.am = ThresholdingAssocMem(\n threshold=0.5, input_vocab=vocab, output_vocab=vocab2,\n function=filtered_step_fn, mapping='by-key')\n m.stimulus = spa.Transcode(input_func, output_vocab=vocab)\n m.stimulus >> m.am\n\n in_p = nengo.Probe(m.am.input)\n out_p = nengo.Probe(m.am.output, synapse=0.03)\n\n with Simulator(m) as sim:\n sim.run(0.3)\n t = sim.trange()\n below_th = t < 0.1\n above_th = t > 0.25\n\n plt.subplot(2, 1, 1)\n plt.plot(t, similarity(sim.data[in_p], vocab))\n plt.ylabel(\"Input\")\n plt.subplot(2, 1, 2)\n plt.plot(t, similarity(sim.data[out_p], vocab2))\n plt.plot(t[above_th], np.ones(t.shape)[above_th] * 0.9, c='g', lw=2)\n plt.ylabel(\"Output\")\n\n assert np.mean(sim.data[out_p][below_th]) < 0.01\n assert_sp_close(t, sim.data[out_p], vocab2['B'], skip=0.25, duration=0.05)", "def rebin (self, bins, tol=1e-4):\n\n bins = np.copy (np.sort (bins))\n for (i, b) in enumerate (bins):\n misses = np.abs (b - self.bins)\n j = np.argmin (misses)\n closest = np.min (misses)\n if closest > tol:\n raise ValueError (\n '{0} is not among current bin edges'.format (b))\n bins[i] = self.bins[j]\n if bins[0] != self.bins[0]:\n raise ValueError (\n 'binning startpoint should match ({0} vs {1})'.format (\n bins[0], self.bins[0]))\n if bins[-1] != self.bins[-1]:\n raise ValueError (\n 'binning endpoint should match ({0} vs {1})'.format (\n bins[-1], self.bins[-1]))\n\n n_newbins = len (bins) - 1\n newbin_indices = np.digitize (self.bins, bins)[:-1] - 1\n values = np.array ([\n np.sum (self.values[newbin_indices == i])\n for i in range (n_newbins)\n ])\n if self.errors is not None:\n errors = np.array ([\n np.sqrt (np.sum (self.errors[newbin_indices == i]**2))\n for i in range (n_newbins)\n ])\n else:\n errors = None\n return Hist (bins, values, errors)", "def anoise(this, *args, **kargs):\n\t\t\n\t\t# Arguments\n\t\tif not args: args = [50]\n\t\t\n\t\t# Kernel's retrieval\n\t\tanoisek = this._ANOISEK\n\t\tif anoisek is None: return None\n\t\t\n\t\t# More magic\n\t\tbin = this._BINARY\n\t\tfor thresh in args:\n\t\t\tbin[:,:] = (cv2.filter2D(bin, -1, anoisek) / 2.55 > thresh) * 255\n\t\treturn True", "def test_WIMP_cut_region_on_true_data(bolo_name, mass, analysis):\n\t\n\n\t#Load 2D PDF\n\tfWIMP2D, f = PyRPl.open_ROOT_object(\"./ROOT_files/WIMP_PDF2D_\" + analysis + \".root\", \"WIMP_\" + mass + \"_GeV\")\n\n\t#Load cut value on PDF for 95% WIMP box\n\tcut_val_90, cut_val_99 = 0,0\n\twith open (\"./Text_files/WIMP_PDF_90_and_99_cut_value_\" + analysis + \".txt\", \"r\") as fcut:\n\t\tstuff = [elem.rstrip().split(\",\") for elem in fcut.readlines()]\n\t\tfor elem in stuff:\n\t\t\tmass_val = elem[0]\n\t\t\tif int(mass)==int(mass_val):\n\t\t\t\tcut_val_90 = float(elem[1])\n\t\t\t\tcut_val_99 = float(elem[2])\n\t\n\n\tdata_path = \"/home/irfulx204/mnt/tmain/Desktop/Run308_Analyse_ERA/Fond_ERA_merged/\"\n\tfilou = TFile(data_path + bolo_name + \"_\" + analysis + \"_fond.root\", \"read\")\n\ttree = filou.Get(\"data\")\n\tnum_pass_cut =0\n\n\thpass = TH2F(\"hpass\", \"hpass\", 100, 0, 15, 100, 0, 15)\n\n\t# #T Check that the events are found where expected\n\t# arr1 = np.random.uniform(0,15,size=(200000,2))\n\t# for i in range(arr1.shape[0]):\n\t# \tPDF_val = fWIMP2D.Eval(arr1[i][0], arr1[i][1])\n\t# \tif (cut_val_99<PDF_val<cut_val_90):\n\t# \t# if (cut_val_99<PDF_val<cut_val_90):\n\t# \t\tnum_pass_cut+=1\n\t# \t\thpass.Fill(arr1[i][0], arr1[i][1])\t\t\n\n\t# hpass.Draw()\n\t# raw_input()\n\n\tfor k in range(tree.GetEntries()):\n\t\ttree.GetEntry(k)\n\t\tER=(1+8./3)*0.5*(tree.EC1+tree.EC2)-0.33*(1.5*tree.EIA+4*tree.EIB+1.5*tree.EIC+4*tree.EID)\n\t\tPDF_val = fWIMP2D.Eval(ER, 0.5*(tree.EIB+tree.EID))\n\t\tif (cut_val_99<PDF_val<cut_val_90 and 0.5*(tree.EIB+tree.EID)>0.7):\n\t\t# if (cut_val_99<PDF_val<cut_val_90):\n\t\t\tnum_pass_cut+=1\n\t\t\thpass.Fill(0.5*(tree.EC1+tree.EC2), 0.5*(tree.EIB+tree.EID))\n\n\tprint num_pass_cut\n\thpass.Draw()\n\traw_input()", "def check(self):\n self.lower_bound(5e-4)\n self.upper_bound(5e2)", "def permutation_test(overlap_bins, nonoverlap_bins, thresh, ntrials):\n X = num_top_snps(I(overlap_bins.values()), thresh)\n if X == 0:\n return thresh, 0, 0, 0, 1, 0, 0\n overlap_counts = {k: len(overlap_bins[k]) for k in overlap_bins}\n Y = [num_top_snps(match(overlap_counts, nonoverlap_bins), thresh) for _ in range(ntrials)]\n mean, variance = moments(Y)\n anderson, critical_values, _ = scipy.stats.anderson(Y)\n exact_p = (1 + len([y for y in Y if y >= X])) / (1 + ntrials)\n return thresh, X, mean, variance, exact_p, anderson, critical_values[2]", "def potential_energy_bins(xy, BL, bins, bo=1.0, kL=1.0):\n bL = bond_length_list(xy, BL)\n pe = 0.5 * kL * (bL - bo) ** 2\n # for each bin bins[kk], find incidices i of bL for which BL[i] is connected to at least one particle in bins[kk].\n pe_bins = np.zeros(len(bins), dtype=float)\n kk = 0\n for bin in bins:\n mask = np.logical_or(np.in1d(BL[:, 0], bin), np.in1d(BL[:, 1], bin))\n pe_bins[kk] = np.sum(pe[mask])\n kk += 1\n return pe_bins", "def binned_AUC(func_predict, X, y, X_kin, VARS_kin, pt_edges, eta_edges, label):\n\n AUC = np.zeros((len(pt_edges)-1, len(eta_edges)-1))\n\n\n # ** Compute predictions **\n if type(X) is list: # Evaluate one by one\n y_pred = np.zeros(len(X))\n for k in range(len(y_pred)):\n y_pred[k] = func_predict(X[k])\n else:\n y_pred = func_predict(X)\n\n\n # Loop over bins\n for i in range(len(pt_edges) - 1):\n for j in range(len(eta_edges) - 1):\n\n pt_range = [ pt_edges[i], pt_edges[i+1]]\n eta_range = [eta_edges[j], eta_edges[j+1]]\n\n # Indices\n ind = np.logical_and(aux.pick_ind(X_kin[:, VARS_kin.index('trk_pt')], pt_range),\n aux.pick_ind(X_kin[:, VARS_kin.index('trk_eta')], eta_range))\n\n print(f'\\nEvaluate classifier <{label}> ...')\n print(f'*** pT = [{pt_range[0]:.3f},{pt_range[1]:.3f}], eta = [{eta_range[0]:.3f},{eta_range[1]:.3f}] ***')\n \n if np.sum(ind) > 0: # Do we have any events in this cell\n\n # Evaluate metric\n met = aux.Metric(y_true = y[ind], y_soft = y_pred[ind])\n print('AUC = {:.5f}'.format(met.auc))\n AUC[i,j] = met.auc\n\n else:\n print('No events found in this (eta,pt) cell!')\n \n # Evaluate total performance\n met = aux.Metric(y_true = y, y_soft = y_pred)\n fig,ax = plot_auc_matrix(AUC, pt_edges, eta_edges)\n ax.set_title('{}: Integrated AUC = {:.3f}'.format(label, met.auc))\n\n return fig,ax,met", "def contact_probability(summary, results, contacts, bins, feature):\r\n\r\n # prepare sampling interval size\r\n bin_width = bins[2]\r\n n_bins = int(np.ceil(bins[1]/bin_width))\r\n # bin bounds\r\n end_bins = np.arange(bin_width,bin_width*(n_bins+1), bin_width)\r\n # prepare arrays\r\n # two counts will be made, for each assumption. r (contact radius), t (contact time)\r\n activated_counts_r = np.zeros(n_bins, dtype=int)\r\n activated_counts_t = np.zeros(n_bins, dtype=int)\r\n total_counts_r = np.zeros(n_bins, dtype=int)\r\n total_counts_t = np.zeros(n_bins, dtype=int)\r\n n_cells = len(summary.cell_ID)\r\n\r\n for i, cell in summary.iterrows():\r\n if cell.QC == 'good':\r\n trig_contacts = contacts.loc[(contacts.cell_ID == cell.cell_ID) & (contacts['time_to_Ca [s]'] == 0) & (contacts['contact'] == 'CCZ'), :]\r\n if len(trig_contacts) > 0:\r\n max_r_ = np.max(trig_contacts['radius [um]'].values)\r\n max_r = np.max(trig_contacts.loc[trig_contacts['radius [um]'] == max_r_, feature])\r\n max_t_ = np.max(trig_contacts['contact_time [s]'].values)\r\n max_t = np.max(trig_contacts.loc[trig_contacts['contact_time [s]'] == max_t_, feature])\r\n else:\r\n max_r = 0\r\n max_t = 0\r\n if feature == 'radius [um]':\r\n max_r = max_r*1000\r\n max_t = max_t*1000\r\n activated_counts_r += end_bins >= max_r\r\n activated_counts_t += end_bins >= max_t\r\n total_counts_r += 1\r\n total_counts_t += 1\r\n\r\n elif cell.QC == 'good_noCa':\r\n last_frame = np.amax(results.loc[results.cell_ID == cell.cell_ID, 'frame'])\r\n last_contacts = contacts.loc[(contacts.cell_ID == cell.cell_ID) & (contacts['frame'] == last_frame) & (contacts['contact'] == 'CCZ'), :]\r\n max_r_ = np.max(last_contacts['radius [um]'].values)\r\n max_r = np.max(last_contacts.loc[last_contacts['radius [um]'] == max_r_, feature])\r\n max_t_ = np.max(last_contacts['contact_time [s]'].values)\r\n max_t = np.max(last_contacts.loc[last_contacts['contact_time [s]'] == max_t_, feature])\r\n if feature == 'radius [um]':\r\n max_r = max_r*1000\r\n max_t = max_t*1000\r\n total_counts_r += end_bins-bin_width < max_r\r\n total_counts_t += end_bins-bin_width < max_t \r\n\r\n return end_bins, activated_counts_r, activated_counts_t, total_counts_r, total_counts_t", "def bin_binarise(self):\n pass", "def correlation_bins(shred):\n return 0", "def SetPRBinCatConstraint( self, model ) :\n tot = np.dot( self.wish.T, self.dispo )\n for val in tot :\n if not val : continue\n if self.bound>0 : model += val <= self.valBound\n elif self.bound<0 : model += val >= self.valBound", "def uniform_binning(ts, bins):\n symb = np.asarray(bins * (ts - ts.min()) / (ts.max() - ts.min() + 1e-12), dtype=int)\n return symb", "def eqw_binning(t, n_bins):\n \n t_diff= (np.max(t) - np.min(t))/n_bins\n t_bins= np.hstack([np.array([np.min(t) + t_diff*i for i in range(1, n_bins)]), [np.max(t) + 0.01]])\n t_binning= np.digitize(t, t_bins)\n return t_binning", "def get_selected_bin_low_median(self, hist, bin_edges, assigned_digits, original_values):\n PRINT_OUTPUT = False\n\n bin_max_index = np.where(hist == hist.max())\n refactored_index = bin_max_index[0].max()\n\n # special case: refactor maximum index, when there are multiple equal maximum values in histogram\n max_histo_value = hist[refactored_index]\n all_max_indices = []\n\n for hist_index, hist_value in enumerate(hist):\n if hist_value == max_histo_value:\n all_max_indices.append(hist_index)\n\n\n # adapt the index for special case, take the middle value in the distribution\n\n # my_custom_median_index = np.where(all_max_indices == np.median(all_max_indices))\n\n new_refactored_index = int(stats.median_low(all_max_indices))\n\n # additional condition for setting breakpoint\n if refactored_index != new_refactored_index:\n refactored_index = new_refactored_index\n\n values_in_this_bin = []\n for index_ad, ad in enumerate(assigned_digits):\n ad -= 1\n if ad == refactored_index:\n values_in_this_bin.append(original_values[index_ad])\n\n\n mean_val = np.mean(values_in_this_bin)\n final_mean = int(np.round(mean_val))\n return final_mean\n\n # detected_edges = bin_edges[bin_max_index]\n # detected_edge = detected_edges[0]\n\n # if PRINT_OUTPUT is True:\n # print('maxbin', detected_edge)\n\n # return detected_edge", "def _analyze_series(self, series):\n # bin series by analysis time\n # only analyze the last bin\n ts = array([si['timestamp'] for si in series])\n ds = diff(ts)\n\n # tolerance_seconds = 60 * 60 * self._bin_hours\n # ds = diff(ts) > tolerance_seconds\n # bounds = where(ds)[0]\n # itemidx = bounds[-1] if bounds else 0\n # series = series[itemidx:]\n\n for ci in self._conditionals:\n ret = self._execute_conditional(ci, series, ds)\n if ret:\n return ret", "def astats(npa, do_bits=True):\n mean = float(np.mean(npa))\n std = float(np.std(npa))\n amax = float(np.amax(npa))\n amin = float(np.amin(npa))\n quant1_3 = np.quantile(npa, [0.25, 0.75])\n iqr = quant1_3[1] - quant1_3[0]\n weak_min = (npa < quant1_3[0] - 1.5 * iqr)\n weak_max = (npa > quant1_3[1] + 1.5 * iqr)\n strong_min = (npa < quant1_3[0] - 3 * iqr)\n strong_max = (npa > quant1_3[1] + 3 * iqr)\n weak_count = int((weak_min | weak_max).sum())\n strong_count = int((strong_min|strong_max).sum())\n if weak_count:\n min_out = float(np.min(np.abs(npa[weak_min|weak_max])))\n if strong_count:\n max_out = float(np.max(np.abs(npa[strong_min|strong_max])))\n else:\n max_out = float(np.max(np.abs(npa[weak_min|weak_max])))\n else:\n min_out = max_out = 0\n\n ret = {\n 'mean': mean,\n 'std': std,\n 'min': amin,\n 'max': amax,\n 'size': npa.size,\n 'wols': weak_count,\n 'sols': strong_count,\n 'min_out' : min_out,\n 'max_out' : max_out,\n }\n if do_bits:\n ret['ibits'] = bits(amax, amin)\n return ret", "def ahistogram (inarray,numbins=10,defaultlimits=None,printextras=1):\r\n inarray = N.ravel(inarray) # flatten any >1D arrays\r\n if (defaultlimits <> None):\r\n lowerreallimit = defaultlimits[0]\r\n upperreallimit = defaultlimits[1]\r\n binsize = (upperreallimit-lowerreallimit) / float(numbins)\r\n else:\r\n Min = N.minimum.reduce(inarray)\r\n Max = N.maximum.reduce(inarray)\r\n estbinwidth = float(Max - Min)/float(numbins) + 1e-6\r\n binsize = (Max-Min+estbinwidth)/float(numbins)\r\n lowerreallimit = Min - binsize/2.0 #lower real limit,1st bin\r\n bins = N.zeros(numbins)\r\n extrapoints = 0\r\n for num in inarray:\r\n try:\r\n if (num-lowerreallimit) < 0:\r\n extrapoints = extrapoints + 1\r\n else:\r\n bintoincrement = int((num-lowerreallimit) / float(binsize))\r\n bins[bintoincrement] = bins[bintoincrement] + 1\r\n except: # point outside lower/upper limits\r\n extrapoints = extrapoints + 1\r\n if (extrapoints > 0 and printextras == 1):\r\n print '\\nPoints outside given histogram range =',extrapoints\r\n return (bins, lowerreallimit, binsize, extrapoints)", "def plot_bin_means(X,Y,bin_edges=None,mean='median',error='sem',color=None,\n style='errorbar',minimum_n=25,bin_style='equal'):\n \n assert(X.shape == Y.shape)\n \n X,Y = nonan_pairs(X,Y)\n \n # Flatten if not vectors\n if X.ndim > 1:\n X = X.flatten()\n Y = Y.flatten()\n \n if type(bin_edges) == int:\n if bin_style == 'equal':\n X_min = X.min()\n X_max = X.max() \n bin_edges = np.linspace(X_min,X_max,num=bin_edges)\n elif bin_style == 'percentile':\n bin_edges = np.percentile(nonans(X),np.linspace(0,100,num=bin_edges))\n print(bin_edges)\n else:\n raise ValueError\n \n which_bin = np.digitize(X,bin_edges)\n Nbins = len(bin_edges)-1\n means = np.zeros(Nbins)\n stds = np.zeros(Nbins)\n \n \n # bin_centers = np.zeros(Nbins)\n \n bin_centers = (bin_edges[:-1] + bin_edges[1:])/2\n \n for b in range(Nbins):\n y = Y[which_bin == b+1]\n # bin_centers[b] = (bin_edges[b] + bin_edges[b+1]) / 2\n # Suppress noisy bins\n if len(y) < minimum_n:\n means[b] = np.nan\n stds[b] = np.nan\n else:\n # Mean or median\n if mean == 'mean':\n means[b] = np.nanmean(y)\n elif mean == 'median':\n print(f'{y.shape}')\n means[b] = np.nanmedian(y)\n \n if error == 'sem':\n stds[b] = np.nanstd(y) / np.sqrt(len(y))\n elif error == 'std':\n stds[b] = y.std()\n\n # Plot\n if style == 'errorbar':\n plt.errorbar(bin_centers,means,stds,color=color)\n elif style == 'fill':\n plt.plot(bin_centers, means, color=color)\n plt.fill_between(bin_centers, means-stds, means+stds,\n color=color,alpha=0.5)\n \n return means" ]
[ "0.67321396", "0.6631882", "0.612074", "0.60527307", "0.5962843", "0.5934379", "0.5913258", "0.5909307", "0.57914376", "0.5731966", "0.5723782", "0.5706618", "0.56629616", "0.5644072", "0.5630398", "0.562626", "0.562407", "0.561191", "0.5604084", "0.55954075", "0.5590886", "0.5554225", "0.55534935", "0.5552162", "0.55136806", "0.5511174", "0.5506232", "0.545142", "0.5450618", "0.54437184", "0.5439406", "0.5431434", "0.54301965", "0.54184574", "0.5406317", "0.54031926", "0.5395762", "0.5393106", "0.538843", "0.53681767", "0.5346323", "0.5346323", "0.5346243", "0.53274703", "0.53243524", "0.53147036", "0.5313107", "0.53098446", "0.5309488", "0.5308276", "0.53024334", "0.5300855", "0.52987635", "0.52970725", "0.5296807", "0.52911365", "0.52909", "0.5283605", "0.5277589", "0.52750313", "0.52549684", "0.52505416", "0.5249979", "0.52467877", "0.52461255", "0.52322", "0.523018", "0.5223921", "0.5220508", "0.52138805", "0.5212118", "0.5211606", "0.5207827", "0.52061635", "0.52025187", "0.5200061", "0.51975733", "0.519718", "0.51887125", "0.51882815", "0.5187752", "0.5185588", "0.51841825", "0.5180243", "0.51763", "0.51717883", "0.51697206", "0.5164345", "0.5159885", "0.5157851", "0.51545936", "0.51545775", "0.51444185", "0.51341486", "0.5132037", "0.5125338", "0.5122793", "0.511758", "0.51148385", "0.51115733", "0.5106939" ]
0.0
-1
This is the base Exception class for all step failures. It can be manually raised from recipe code to cause the build to turn red.
def StepFailure(self): return recipe_api.StepFailure
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def raise_step_error(self, error: Exception, step: str):\n error_message = \"{}\\nFailed: Error={}\".format(step, str(error))\n logging.error(error_message)\n self.slacker.send_thread_reply(error_message)\n raise Exception(error_message)", "def raise_on_error(self):\n if not self._status.success:\n cls = UrlApi.InfraHTTPError if self._infra_step else UrlApi.HTTPError\n raise cls('HTTP status (%d)' % (self.status_code,), self)", "def test_fails(self):\n raise FoolishError(\"I am a broken test\")", "def fail(self, msg=None):\r\n raise self.failureException(msg)", "def step(self):\n raise TaskError(\"Task %s: subclass should override step() method!\" %\n self)", "def raise_for_failure(self) -> None:\n if not self.is_success():\n raise exc.ExecutionError(self)", "def exception(self, *args, **kwargs):\n return super(Blueprint, self).exception(*args, **kwargs)", "def abort(self, message: str) -> None:\n message = f\"{Invocation.current.log} - {message}\"\n self.exception = StepException(message)\n global failure_aborts_build # pylint: disable=invalid-name\n global no_actions # pylint: disable=invalid-name\n if failure_aborts_build.value and not no_actions.value:\n no_additional_complaints()\n raise self.exception", "def setup_class(cls):\n try:\n super(BuildFailureTests, cls).setup_class()\n except CommandFailure:\n pass\n else:\n raise AssertionError('A failed build returned an exit code of 0.')", "def fail(self, msg=None):\n raise Exception, msg", "def failed(self):\n\t\tpass", "def raise_fail(*args, **kwargs):\n raise Exception(\"oops\")", "def test_second_step_strict(self):\n with self.assertRaises(Exception):\n self.run_step('S02-errors.py', allow_failure=False)", "def testRunException(self):\n class TestError(Exception):\n \"\"\"Unique test exception\"\"\"\n\n perform_mock = self.PatchObject(generic_stages.BuilderStage, 'PerformStage')\n perform_mock.side_effect = TestError('fail!')\n\n stage = self.ConstructStage()\n results_lib.Results.Clear()\n self.assertRaises(failures_lib.StepFailure, self._RunCapture, stage)\n\n results = results_lib.Results.Get()[0]\n self.assertTrue(isinstance(results.result, TestError))\n self.assertEqual(str(results.result), 'fail!')\n self.mock_cidb.StartBuildStage.assert_called_once_with(\n DEFAULT_BUILD_STAGE_ID)\n self.mock_cidb.FinishBuildStage.assert_called_once_with(\n DEFAULT_BUILD_STAGE_ID,\n constants.BUILDER_STATUS_FAILED)", "def failure_callback(self):\n error_filename = self.run_dir / \"eplusout.err\"\n if error_filename.exists():\n with open(error_filename, \"r\") as stderr:\n stderr_r = stderr.read()\n self.exception = EnergyPlusProcessError(\n cmd=self.cmd, stderr=stderr_r, idf=self.idf\n )\n self.cleanup_callback()", "def add_failure(self, task: Task, exception: Any) -> None: # noqa: DAR101\n super().add_failure(task, exception)\n self._add_summary(task, _TaskExitCode.FAIL)", "def test_badstageerror_raise(self, mock_isdir):\n # Set the mocked functions returned values\n mock_isdir.side_effect = [True]\n\n # Test execution\n wrong_kwargs = copy.copy(self.kwargs)\n wrong_kwargs[\"reconstruction_stage\"] = \"WRONG\"\n self.assertRaises(ValueError, recon_all, **wrong_kwargs)", "def fail(msg):\n\n # Not sure if simply raising the exception is clearer.\n raise CommandFailed(msg)", "def error(self):\n raise NotImplementedError(\"subclasses need to override this method\")", "def InfraFailure(self):\n return recipe_api.InfraFailure", "def __init__(self, message=\"\"):\n super(AutomationError, self).__init__(message)", "def test_class_errored(self, cls, exception):", "def report_unexpected_exception(self, *args, **kwargs):\n pass", "def test_runFailed(self):\n builder = BookBuilder()\n exc = self.assertRaises(\n CommandFailed, builder.run,\n [sys.executable, '-c', 'print \"hi\"; raise SystemExit(1)'])\n self.assertEquals(exc.exitStatus, 1)\n self.assertEquals(exc.exitSignal, None)\n self.assertEquals(exc.output, \"hi\\n\")", "def failure_exception(cls, state, exception):\r\n return PlatformMessage(method=\"__reply__\", kwargs={\"__result__\": \"fail\", \"state\": state, \"errcode\": -2,\r\n \"e\": exception})", "def test_fail(make_runner: Callable[..., TargetFunctionRunner]) -> None:\n runner = make_runner(target_failed, use_instances=True)\n run_info = TrialInfo(config=2, instance=\"test\", seed=0, budget=0.0)\n\n runner.submit_trial(run_info)\n run_info, run_value = next(runner.iter_results())\n\n # Make sure the traceback message is included\n assert \"traceback\" in run_value.additional_info\n assert \"RuntimeError\" in run_value.additional_info[\"traceback\"]", "def test_config_step_raises(self):\n\n run_step = self.ConfigStep.create({\n 'name': 'run_step',\n 'job_type': 'run_odoo',\n })\n\n create_step = self.ConfigStep.create({\n 'name': 'test_step',\n 'job_type': 'create_build',\n })\n\n config = self.Config.create({'name': 'test_config'})\n\n # test that the run_odoo step has to be the last one\n with self.assertRaises(UserError):\n config.write({\n 'step_order_ids': [\n (0, 0, {'sequence': 10, 'step_id': run_step.id}),\n (0, 0, {'sequence': 15, 'step_id': create_step.id}),\n ]\n })\n\n # test that the run_odoo step should be preceded by an install step\n with self.assertRaises(UserError):\n config.write({\n 'step_order_ids': [\n (0, 0, {'sequence': 15, 'step_id': run_step.id}),\n (0, 0, {'sequence': 10, 'step_id': create_step.id}),\n ]\n })", "def failure(self, input: str) -> enumFail:\n pass", "def test_raise_exception(self):\n with self.assertRaises(Exception):\n SshpassBaseCommandBuilder(COMMAND).to_build()", "def indicate_failure(self):\n pass", "def failure(self, message=''):\n print(colored(message, 'red'))", "def throw(self):\n pass", "def buildException(status, why):\n print >>sys.stderr, 'build error encountered:', why\n print >>sys.stderr, 'aborting build'\n status.code = 1\n reactor.callFromThread(reactor.stop)", "def unexpected_error(self, exception):", "def job_step_error(self, job_request_payload, message):\n payload = JobStepErrorPayload(job_request_payload, message)\n self.send(job_request_payload.error_command, payload)", "def error(self, msg, transfers):\n self.validation_exceptions.extend(self._create_exceptions(msg, transfers, ValidationType.ERROR))", "def print_failed(self):\n # Construct the message dynamically based on the instance_type\n msg = colored(\"FAIL\", \"red\") + f\" | [ERROR] {self.message}\"\n if self.instance_type == \"FILE\":\n msg += f\" [{self.instance_type}] {self.instance_location}/{self.instance_name}\"\n\n elif self.instance_type == \"HOST\":\n msg += f\" [{self.instance_type}] {self.instance_hostname}\"\n\n msg += f\" [PROPERTY] {':'.join(str(item) for item in self.absolute_path)}\"\n\n # print the msg\n print(msg)", "def raise_(err):\n raise err", "def fail(self):\n self.cleanup()\n self.runner.report_job_fail(self.id)", "def test_raise_exception2(self):\n with self.assertRaises(Exception):\n SshpassBaseCommandBuilder(COMMAND)\\\n .set_password(SERVER_PASSWORD)\\\n .set_file(PASSWORD_FILE)\\\n .to_build()", "def failure(self, cb: CircuitBreaker, exc: BaseException) -> None:", "def _message_failed_job(self):\n self.ensure_one()\n return _(\"Something bad happened during the execution of the job. \"\n \"More details in the 'Exception Information' section.\")", "def error():\r\n raise RuntimeError('admin ticket generator at your service')", "def error(self):\n pass", "def test_no_such_step(self):\n with self.assertRaises(Exception):\n self.run_step('FAKE-STEP.no-exists')", "def unexpectedException(self):", "def checkStep(rc, steps, run_status, prog_args):\n\n if (rc == FAILURE) or (rc == EXCEPTION):\n buildException(run_status, 'previous command failed')\n else:\n defer.maybeDeferred(lambda x: startNextStep(x,\n run_status, prog_args), steps)", "def task_failed(self, worker_name, error):\n self.status = 'failed'\n self.modification_time = current_millis()\n self.message = '{} (worker): {}'.format(worker_name, error)\n return self", "def raise_error(cls, *args):\n raise cls(cls.message)", "def failure(self, result):\r\n raise NotImplementedError", "def _failed(self, msg):\n self.log(msg)\n self.result.passed = False\n self.result.add_error(msg)\n self.log(u\"Failed\")", "def test_valid_python_raise_exception(self):\n \n data_file = testutils.DataFile(\"integration_module_valid_raise\")\n\n rtn = self.run_cmd(\"pm install --force --single module --install_name test_raise --name %s --auto\" % str(data_file))\n assert(rtn.return_code == 0)\n\n rtn = self.run_cmd(\"test_raise test\")\n\n assert(rtn.return_code == 246)", "def failed(self, message=None):\n doc = {self.STATE: self.STATE_FAILED}\n\n if message:\n doc.update({self.ERROR_MESSAGE: message})\n\n self.update(doc)", "def test_create_status_throws_exception():\n with pytest.raises(ValueError):\n BuildResults.BuildStatus.create(\"unknown_status\")", "def failure(self, target):\n print \"FAILED:\"\n self.show_target(target)\n self.failed += 1", "def fail(self, msg, *args):\n self.log.error(msg, *args)\n sys.exit(1)", "def exception(self, *args, **kwargs):", "def __init__(self, reason, stage=\"\"):\n super(ProjectConstructionException, self).__init__(\n datatype=\"Project\", stage=stage, context=reason)", "def on_saga_failure(self, failed_step: BaseStep, initial_failure_payload: dict):\n logger.info(f'Saga {self.saga_id} failed on \"{failed_step.name}\" step. \\n'\n f'Failure details: {initial_failure_payload}')", "def test_failure_result(self):\n dr = EventualResult(fail(RuntimeError()), None)\n self.assertRaises(RuntimeError, dr.wait, 0.1)", "def __repr__(self: \"Failed\") -> str:\n return \"Failed()\"", "def build_step(self):\n pass", "def build_step(self):\n pass", "def run(self, failure_info):\n signals = {}\n if not failure_info['failed'] or not failure_info['chromium_revision']:\n # Bail out if no failed step or no chromium revision.\n return signals\n\n # Bail out on infra failure\n if failure_info.get('failure_type') == failure_type.INFRA:\n return signals\n\n master_name = failure_info['master_name']\n builder_name = failure_info['builder_name']\n build_number = failure_info['build_number']\n\n for step_name in failure_info.get('failed_steps', []):\n if not waterfall_config.StepIsSupportedForMaster(step_name, master_name):\n # Bail out if the step is not supported.\n continue\n\n step = WfStep.Get(master_name, builder_name, build_number, step_name)\n if step and step.log_data:\n failure_log = step.log_data\n else:\n # TODO: do test-level analysis instead of step-level.\n # TODO: Use swarming test result instead of archived gtest results\n gtest_result = buildbot.GetGtestResultLog(\n master_name, builder_name, build_number, step_name)\n if gtest_result:\n failure_log = _GetReliableTestFailureLog(gtest_result)\n\n if gtest_result is None or failure_log == 'invalid':\n if not lock_util.WaitUntilDownloadAllowed(\n master_name): # pragma: no cover\n raise pipeline.Retry('Failed to pull log of step %s of master %s'\n % (step_name, master_name))\n try:\n failure_log = buildbot.GetStepLog(\n master_name, builder_name, build_number, step_name,\n self.HTTP_CLIENT)\n except ResponseTooLargeError: # pragma: no cover.\n logging.exception(\n 'Log of step \"%s\" is too large for urlfetch.', step_name)\n # If the stdio log of a step is too large, we don't want to pull it\n # again in next run, because that might lead to DDoS to the master.\n # TODO: Use archived stdio logs in Google Storage instead.\n failure_log = 'Stdio log is too large for urlfetch.'\n\n if not failure_log: # pragma: no cover\n raise pipeline.Retry('Failed to pull stdio of step %s of master %s'\n % (step_name, master_name))\n\n # Save step log in datastore and avoid downloading again during retry.\n if not step: # pragma: no cover\n step = WfStep.Create(\n master_name, builder_name, build_number, step_name)\n\n step.log_data = _ExtractStorablePortionOfLog(failure_log)\n\n try:\n step.put()\n except Exception as e: # pragma: no cover\n # Sometimes, the step log is too large to save in datastore.\n logging.exception(e)\n\n # TODO: save result in datastore?\n if step.isolated:\n try:\n json_failure_log = (\n json.loads(failure_log) if failure_log != 'flaky' else {})\n except ValueError: # pragma: no cover\n json_failure_log = {}\n logging.warning('failure_log %s is not valid JSON.' % failure_log)\n\n signals[step_name] = {\n 'tests': {}\n }\n step_signal = FailureSignal()\n\n for test_name, test_failure_log in json_failure_log.iteritems():\n signals[step_name]['tests'][test_name] = extractors.ExtractSignal(\n master_name, builder_name, step_name, test_name,\n base64.b64decode(test_failure_log)).ToDict()\n\n # Save signals in test failure log to step level.\n step_signal.MergeFrom(signals[step_name]['tests'][test_name])\n\n signals[step_name]['files'] = step_signal.files\n signals[step_name]['keywords'] = step_signal.keywords\n else:\n signals[step_name] = extractors.ExtractSignal(\n master_name, builder_name, step_name, None, failure_log).ToDict()\n\n return signals", "def exception(self, e):\n pass", "def error_handler(self, step_name: str = None, state_name=None):\n if state_name:\n warnings.warn(\n \"The state_name parameter is deprecated. Use step_name instead\",\n # TODO: In 0.7.0 do changes in examples & demos In 0.9.0 remove\n PendingDeprecationWarning,\n )\n step_name = step_name or state_name\n if not step_name:\n raise MLRunInvalidArgumentError(\"Must specify step_name\")\n self.on_error = step_name\n return self", "def error(self):\n ...", "def failure(self, error):\n print \"comm failed Reason:\", error\n return error", "def on_failure(self, exc: BaseException) -> None:", "def td_error(self, states, actions, rewards, next_states):\n raise NotImplemented", "def context_errored(self, cls, example, exception):", "def error(self, cause = None, annotations = {}):\n self.set_outcome(Result.ERROR, cause, annotations)", "def handle_build_error(error):\n sys.stderr.write('Error running command `%s`. Returned %s.\\n' % (\n ' '.join(error.argv), str(error.error_code)))", "def _create_failure_entry(self):\r\n # view task entry for task failure\r\n progress = {'message': TEST_FAILURE_MESSAGE,\r\n 'exception': TEST_FAILURE_EXCEPTION,\r\n }\r\n return self._create_entry(task_state=FAILURE, task_output=progress)", "def exception(self):\n raise Exception(\"Exception test\")", "def RaiseToolException(problems, error_message=None):\n RaiseException(problems, calliope_exceptions.ToolException, error_message)", "def fail(self, key, **kwargs):\n return fail(self, key, **kwargs)", "def raise_error(Err):\n raise Err()", "def throw(self, type, value=None, traceback=None):\n pass", "def op_exception(self) -> Optional[BaseException]:\n exc = self._step_execution_context.step_exception\n\n if isinstance(exc, RetryRequestedFromPolicy):\n return exc.__cause__\n\n return exc", "def test_handles_job_exception(self):\n from furious.async import Async\n from furious.context._execution import _ExecutionContext\n from furious.processors import run_job\n from furious.processors import AsyncException\n\n work = Async(target=dir, args=[1, 2])\n\n with _ExecutionContext(work):\n self.assertRaises(TypeError, run_job)\n\n self.assertIsInstance(work.result.payload, AsyncException)", "def test_failed():\n build()\n sh(\"%s %s --last-failed\" % (PYTHON, RUNNER_PY))", "def test_raise_exception(self):\n with self.assertRaises(Exception):\n SshCommandBuilder(SERVER_USER, COMMAND).to_build()", "def rollback(self, exc):\n USER.info('%s: Rolling Back Failed Build', self.recipe.name)\n cascade = False\n if isinstance(exc, AssertionError):\n logging.error('Error during verify() of %s', self.recipe.name)\n cascade = True\n if cascade or isinstance(exc, PakitLinkError):\n if not cascade:\n logging.error('Error during linking of %s', self.recipe.name)\n walk_and_unlink(self.recipe.install_dir, self.recipe.link_dir)\n cascade = True\n if cascade or (not isinstance(exc, PakitLinkError) and\n not isinstance(exc, AssertionError)):\n if not cascade:\n logging.error('Error during build() of %s', self.recipe.name)\n try:\n Command('rm -rf ' + self.recipe.install_dir).wait()\n except PakitCmdError: # pragma: no cover\n pass", "def error(reason, order):\n return ResultProxy(TaskResult(TaskError(reason), order))", "def indicate_error(self):\n pass", "def error(self, *args, **kwargs):", "def test_exception_class_hierarchy(self) -> None:\n\n try:\n raise CustomDerivedError(state=\"test\")\n except CustomDerivedError as cex:\n assert type(cex) is CustomDerivedError\n assert \"test\" == cex.state\n except CustomError as cex:\n assert False, \"CustomDerivedError should have caught the exception.\"\n except:\n assert False, f\"Unhandled exception: {sys.exc_info()[0]}\"", "def test_error(self) -> None:\n context: Dict[str, ArtifactDescriptor] = dict()\n cmd = ModuleCommand(\n package_id='error', \n command_id='error',\n arguments=[],\n packages=None\n )\n controller = FakeWorkflowController()\n self.backend.execute_async(\n task=TaskHandle(\n task_id='000',\n project_id=self.PROJECT_ID,\n controller=controller\n ),\n command=cmd,\n artifacts=context\n )\n time.sleep(2)\n self.assertEqual(controller.task_id, '000')\n self.assertEqual(controller.state, 'ERROR')\n self.assertEqual(len(controller.outputs.stdout), 0)\n self.assertNotEqual(len(controller.outputs.stderr), 0)", "def build_step(self):\n\n pass", "def render_revalidation_failure(self,failed_step): \n return self.redirect(self.get_step_url(failed_step))", "def test_task_failed(self):\n\n task1 = FailedTask(mock.Mock(), total_retries=0)\n task2 = mock.Mock(execute_after=0)\n\n g = TaskDependencyGraph(MockWorkflowContext())\n seq = g.sequence()\n seq.add(task1, task2)\n\n with limited_sleep_mock():\n self.assertRaisesRegex(WorkflowFailed, 'failtask', g.execute)\n self.assertTrue(task1.is_terminated)\n self.assertFalse(task2.apply_async.called)", "def error_handler(self, failure):\n log.error(failure)", "def fail(self, cause = None, annotations = {}):\n self.set_outcome(Result.FAIL, cause, annotations)", "def set_test_failed(self):\n self.set_result(Status.FAILED)", "def _handle_error(self, failure, item, spider):\n # do nothing, just log\n log.err(failure)", "def test_failure(self):\n\n @sync_performer\n def fail(dispatcher, intent):\n raise intent\n\n dispatcher = lambda _: fail\n self.assertThat(\n sync_perform(\n dispatcher, Effect(ValueError(\"oh dear\")).on(error=lambda e: e)\n ),\n MatchesException(ValueError(\"oh dear\")),\n )", "def creation_error(src_dict: Dict[str, List[str]], e: str):\n return \"LED Group error in %s: %s\\n)\" % (json.dumps(src_dict), e)", "def error(self, messages=None):\n return StateError(\n created_at=self.created_at,\n started_at=self.started_at,\n messages=messages\n )", "def test_broken_error_descriptor(self):\r\n with self.assertRaises(TestException):\r\n module = self.descriptor._xmodule" ]
[ "0.65778327", "0.6160697", "0.6118424", "0.6045226", "0.6028757", "0.59925586", "0.59890693", "0.5984237", "0.5979543", "0.5880542", "0.58220655", "0.58042306", "0.58023226", "0.57811123", "0.57735544", "0.56904095", "0.56897426", "0.5686951", "0.56869185", "0.5652853", "0.5650791", "0.5649762", "0.56389666", "0.5612642", "0.5602301", "0.55968726", "0.5593547", "0.5591498", "0.55746824", "0.5562834", "0.55435306", "0.5543076", "0.55419517", "0.5537418", "0.5518677", "0.5516668", "0.55114883", "0.5475968", "0.5459171", "0.54355115", "0.54351425", "0.54309916", "0.5422185", "0.5413536", "0.54118836", "0.54106736", "0.5407893", "0.5407813", "0.54001236", "0.5362275", "0.535933", "0.5357869", "0.53560364", "0.5346884", "0.53428936", "0.5342327", "0.5340739", "0.5338552", "0.5334603", "0.5333543", "0.5329987", "0.53285986", "0.53285986", "0.53211355", "0.53193825", "0.53033006", "0.5302605", "0.5301806", "0.5301303", "0.5289473", "0.52825433", "0.5278118", "0.52748775", "0.52738404", "0.5272844", "0.5271023", "0.5263156", "0.52626497", "0.5256101", "0.5249445", "0.5242914", "0.52420443", "0.5241046", "0.5232203", "0.5224821", "0.5222814", "0.5221684", "0.5216956", "0.5192028", "0.5178878", "0.51782656", "0.517177", "0.51460487", "0.5144369", "0.5142773", "0.5142699", "0.5142338", "0.5141423", "0.51404685", "0.5136248" ]
0.6969654
0
StepWarning is a subclass of StepFailure, and will translate to a yellow build.
def StepWarning(self): return recipe_api.StepWarning
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _warn(self, warning=None):\r\n debug.err('Warning: %s' % warning)\r\n\r\n if core.FW_conf['settings'].TestRun.ExecutionMode == 'Leader' and warning != None:\r\n executeInFollower(\"self.warn('%s')\" % (warning,))\r\n\r\n if type(warning) != types.ListType:\r\n warning = [warning]\r\n\r\n self.result.addStepWarning(warning)", "def warning(self, warning):\n pass", "def StepFailure(self):\n return recipe_api.StepFailure", "def warning(self, msg, *args, **kwargs):\n pass", "def warning(self, *args, **kwargs):", "def test_warning(self):\n self.p.compute_termination_criteria = True\n self.set_parameter_and_step(\"max_iter\", True, 5, \"ignore\")", "def warning ( self , message , *args , **kwargs ) :\n return self.logger.warning ( message , *args , **kwargs )", "def warning(msg):\n click.secho(msg, fg='yellow')", "def warning(self, *args, **kwargs):\n self.msg(logging.WARNING, *args, **kwargs)", "def success_failure_color(self, evaluation):\n return \"#60f979\" if evaluation.passes else \"#f96c60\"", "def warning(self, msg, transfers):\n self.validation_exceptions.extend(self._create_exceptions(msg, transfers, ValidationType.WARNING))", "def notice(self, warning):\n pass", "def warning(self, message):\n return self.log(\"WARNING\", message)", "def fail(self, message):\n logger.warning(message)\n g.failed = True", "def warning(self, *lines):\n if self.__debug_level >= DEBUG_LEVELS['warning']:\n self.print_lines(self.colored(('magenta', 'bold'), lines))", "def warning(self) -> str:\n return pulumi.get(self, \"warning\")", "def failure(self, message=''):\n print(colored(message, 'red'))", "def set_warning_message(msg):\n set_message(msg, TYPE_WARNING)", "def warning(self) -> Optional[pulumi.Input['AnyArgs']]:\n return pulumi.get(self, \"warning\")", "def warning(self, message, *, preprocessor=None):\n console.warning(message)", "async def warning(self, check, *, note=None):\n return await self.mark(check, \"warning\", note=note)", "def warning(self, msg):\r\n self.logger.warning(msg)", "def warning(self, msg, *args):\n if self.lvl<=logging.WARNING: return self._log(msg, *args)", "def WARNING(self, _strMessage=\"\"):\n self.edLogging.WARNING(_strMessage)", "def warning(self, _strMessage=\"\"):\n self.edLogging.warning(_strMessage)", "def warning(self, msg):\n\n self.logger.warning(msg)", "def report_step_progress(self, step):\n dot_status = self.dot_status[step.status.name]\n if step.status == Status.failed:\n if (step.exception and\n not isinstance(step.exception, AssertionError)):\n # -- ISA-ERROR: Some Exception\n dot_status = self.dot_status[\"error\"]\n step.feature = self.current_feature\n step.scenario = self.current_scenario\n self.failures.append(step)\n self.stream.write(dot_status)\n self.stream.flush()", "def log_check_warnings(self):\n pass", "def log_check_warnings(self):\n pass", "def log_check_warnings(self):\n pass", "def log_check_warnings(self):\n pass", "def log_check_warnings(self):\n pass", "def log_check_warnings(self):\n pass", "def log_check_warnings(self):\n pass", "def log_check_warnings(self):\n pass", "def warning(self, msg, *args, **kwargs):\n self._log(\"WARNING\", msg, args, kwargs)", "def warning(warning_message: str):\n logger.warning(warning_message)", "def warnings(self) -> List[Error]:", "def warning(self, msg: str):\n self._logger.warning(msg)", "def warn():\n pass", "def warning(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"warning\")", "def log(failure):\n return self._env.logger.warning('[ping] {}'.format(failure.getErrorMessage()))", "def render_revalidation_failure(self,failed_step): \n return self.redirect(self.get_step_url(failed_step))", "def get_warning(self) -> List[str]:\n return []", "def get_warning(self) -> List[str]:\n return []", "def set_warning(warning):\n impl.set_warning(**locals())", "def warning(self, msg):\n self.__logger.warning(msg)", "def print_failure_msg(msg):\n click.secho(msg, fg='red', file=sys.stderr)", "def warning(self, msg, *args, **kwargs):\n self._logger.warning(msg, *args, **kwargs)", "def warning(self) -> Optional[str]:\n return pulumi.get(self, \"warning\")", "def warning(self, msg, *args, **kwargs):\n logger = self.__get_logger()\n logger.warning(msg, *args, **kwargs)", "def warningglobal(self, *args, **kwargs):\n return self.logger.log(logging.WARNING+1, *args, **kwargs)", "def warn(self, msg):\n warning_msg = self._warning_color\n warning_msg += \"[SHOULDER_WARNING] \" + msg\n warning_msg += self._reset_color\n self.logger.warning(warning_msg)", "def warning(msg):\n log('WARNING', msg)", "def get_warning_text(self):\n \n to_print = []\n if self['skipped_subchannel'] > 0:\n to_print.append(\"Some event with large weight have been discarded.\"+\\\n \" This happens %s times.\" % self['skipped_subchannel'])\n if self['n_madloop_calls'] > 0:\n fraction = self['exceptional_points']/float(self['n_madloop_calls'])\n if fraction > 1.0e-4:\n to_print.append(\"Some PS with numerical instability have been set \"+\\\n \"to a zero matrix-element (%.3g%%)\" % (100.0*fraction))\n \n return ('\\n'.join(to_print)).replace(\"'\",\" \")", "def addFailure(self, test, err):\n\n super(ForceBalanceTestResult, self).addFailure(test,err)\n self.logger.warning(\"\\r\\x1b[31;1m\" + \"FAIL\" + \"\\x1b[0m \" + test.shortDescription() + \"\\n\")\n\n errorMessage = self.buildErrorMessage(test, err)\n\n for line in errorMessage.splitlines():\n self.logger.warning(\"\\t >\\t\" + line + \"\\n\")", "def worker_warning_captured(self, warning_message, when, item):\n # This hook as been removed in pytest 7.1, and we can remove support once we only\n # support pytest >=7.1.\n kwargs = dict(warning_message=warning_message, when=when, item=item)\n self.config.hook.pytest_warning_captured.call_historic(kwargs=kwargs)", "def warn(self, msg, line=None):\n sys.stderr.write('warning: ' + self.gen_error(msg, line) + '\\n')", "def print_warning(string: str, begin: str = '') -> str:\n return begin + Fore.RED + \"[WARNING] \" + string + Fore.RESET", "def show_warning(title, message, print_message=False):\n\n pass", "def worker_warning_recorded(self, warning_message, when, nodeid, location):\n kwargs = dict(\n warning_message=warning_message, when=when, nodeid=nodeid, location=location\n )\n self.config.hook.pytest_warning_recorded.call_historic(kwargs=kwargs)", "def test_normal_priority_warnings(self):\n self.__jenkins.contents = self.html\n self.assertEqual(4, self.__jenkins.nr_warnings(('job',), 'normal'))", "def warning(self, message, *args, **kwargs):\n\n self.logger.warning(message, *args, **kwargs)", "def warning(*args, noContext: bool=True, showLineNumber: bool=True, **kwargs)->None:\n pass", "def __nextWarning(self):\n self.activeWindow().nextWarning()", "def warning(self) -> 'outputs.AnyResponse':\n return pulumi.get(self, \"warning\")", "def warning(message):\n env = Environment()\n env.loader = FileSystemLoader(osp.join(CONFDIR_PATH, 'templates'))\n warning = env.get_template(\"warning.html\")\n return warning.render(css_path=CSS_PATH, text=message)", "def warn(msg):\n print(colored.yellow(\"[WARN]: {0}\".format(msg)))", "def add_warning(self, msg):\n self._add_message(msg, self._warnings)", "def warning(msg):\n log_msg(WARNING, msg)", "def warn(text):\n print(colored(\"WARNING:\", \"yellow\", attrs=(\"bold\",)), colored(text))", "def after_step(context, step):\n if context.config.userdata.getbool(\"debug\") and step.status == \"failed\":\n spost_mortem(step.exc_traceback)", "def print_warning(*args):\n print_message_with_title('WARNING', *args, c1='y', c2='k', style='b')", "def message_warning(msg, *a, **kwargs):\n return str(msg) + '\\n'", "def _get_warning_message(self, target_class):\n\n message = \"%s failed to create a %s\" % (\n self.__class__.__name__,\n target_class.__name__\n )\n\n return message", "def set_warning(warningTxt):\r\n if not core.does_item_exist(\"Warning##Warning\"):\r\n with simple.collapsing_header(\"Warning##Warning\", parent=\"##GroupStats\",\r\n default_open=True,\r\n closable=False,\r\n bullet=True):\r\n core.add_text(\"Warning\", default_value=warningTxt, color=(255, 255, 0, 255))", "def print_warning(text: str):\n templateName = \"{:s}\"\n print(bcolors.WARNING + templateName.format(text) + bcolors.ENDC,\n flush=True)", "def warning(message):\n global LAST_LOG\n LAST_LOG = message\n cprint('\\r[WRN] {0}'.format(message), 'yellow', file=sys.stderr)", "def handle_warning(self, api, command):\n return self.handle_log(api, command, level=logging.WARNING)", "def warning(self, *args, **kwargs): # real signature unknown\n pass", "def _failed(self, msg):\n self.log(msg)\n self.result.passed = False\n self.result.add_error(msg)\n self.log(u\"Failed\")", "def fail():\n sys.stdout.write('%s[ fail ]%s\\n' % (colors.RED, colors.RESET))", "def on_warning(self, warning):\n log.warning(\"Received stall warning: %s\", warning)", "def failure_cmd(self) -> str:\n return \"{} --enable=all -f -q {}\".format(\n self.conf.get_executable(), constants.ROOT_PATH + \"/data/cppcheck-152/trial-fail.cpp\"\n )", "def report_step_progress(self, step):\n pass", "def warning(self, tag, message, exc_info=False):\n \n self.log(logging.warning,tag, message, exc_info)", "def print_warning(msg):\n print('WARNING - %s' % (msg))\n sys.exit(1)", "def error(cls, message, *args, **kwargs):\n warnings.warn(\n cls.marker_theme.error() + cls.time() + cls.parse(message), *args, **kwargs\n )", "def logwarning(self, msg):\n self.logger.warning(msg)", "def warning(self, message, code=None):\n\n if code is None:\n code = ''\n self._add_message( message, self.WARNING, code=code )\n self.n_warnings += 1", "def warning(self, value='', line_before=False):\n self.warnings += 1\n if line_before:\n print('\\n')\n print(Fore.YELLOW + '--- ' + value)", "def warning(self, message: str) -> None:\n\n self.__add_log(self.WARNING, message)", "def get_name(self) -> str:\n return \"write_jenkins_warnings\"", "def warning(self) -> pulumi.Output[Optional['outputs.InfraAlertConditionWarning']]:\n return pulumi.get(self, \"warning\")", "def ReportWarning(text):\n print('warning: %s' % text)", "def print_failure(text):\n\n print(colorize(text, Colors.FAIL))", "def print_warning_msgs():\n for err in TypeWarning.warnings:\n print err", "def warn(self, message):\n return self.log(\"WARNING\", message)", "def warn(self, msg):\n\n self(msg, WARN)", "def warning(\n self,\n msg,\n color=None,\n light=None\n ) -> None:\n self.write(msg, level=logging.WARNING, color=color, light=light)" ]
[ "0.680333", "0.6467354", "0.6363371", "0.6182617", "0.6118767", "0.60774654", "0.6036392", "0.6019372", "0.6014528", "0.6010554", "0.5999503", "0.5906176", "0.5892281", "0.5887475", "0.5859568", "0.5821751", "0.5817227", "0.5793164", "0.5664179", "0.5653461", "0.5625123", "0.56036913", "0.55827785", "0.55620575", "0.55563116", "0.5553998", "0.5553989", "0.55525285", "0.55525285", "0.55525285", "0.55525285", "0.55525285", "0.55525285", "0.55525285", "0.55525285", "0.55445296", "0.5538642", "0.5519302", "0.5509324", "0.5508511", "0.54968333", "0.54736036", "0.54733646", "0.5466505", "0.5466505", "0.54664433", "0.5437949", "0.5428915", "0.5420023", "0.5418672", "0.5406106", "0.5404654", "0.5401399", "0.5391984", "0.5385157", "0.5382221", "0.5377263", "0.5369732", "0.53641987", "0.53622323", "0.5342601", "0.5337679", "0.5337286", "0.53355074", "0.5331821", "0.5325915", "0.5325645", "0.5321884", "0.53109646", "0.53076696", "0.53044534", "0.5296953", "0.52952737", "0.52904564", "0.5289009", "0.52872247", "0.5276126", "0.5271471", "0.52711165", "0.52624005", "0.52619725", "0.52572846", "0.52521104", "0.52434146", "0.52425337", "0.5233112", "0.52301854", "0.52271414", "0.5224837", "0.5218483", "0.521276", "0.52087796", "0.52010745", "0.51992345", "0.5195959", "0.51934665", "0.51890093", "0.5181308", "0.5180704", "0.5171982" ]
0.792026
0
InfraFailure is a subclass of StepFailure, and will translate to a purple build. This exception is raised from steps which are marked as `infra_step`s when they fail.
def InfraFailure(self): return recipe_api.InfraFailure
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _identify_fail(failure):\n logger.warning(failure.getErrorMessage())\n logger.warning(\"Failed to setup & obtain identity\")\n return", "def failure_exception(cls, state, exception):\r\n return PlatformMessage(method=\"__reply__\", kwargs={\"__result__\": \"fail\", \"state\": state, \"errcode\": -2,\r\n \"e\": exception})", "def infrastructure_failure(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"infrastructure_failure\")", "def infrastructure_failure(self) -> bool:\n return pulumi.get(self, \"infrastructure_failure\")", "def raise_on_error(self):\n if not self._status.success:\n cls = UrlApi.InfraHTTPError if self._infra_step else UrlApi.HTTPError\n raise cls('HTTP status (%d)' % (self.status_code,), self)", "def failure(self, cb: CircuitBreaker, exc: BaseException) -> None:", "def StepFailure(self):\n return recipe_api.StepFailure", "def handle_deploy_failure(self):\n step = \"Recovering From Deployment Error\"\n self.slacker.send_thread_reply(step)\n\n if self.has_down_time is True and self.migration_completed:\n return \"Skipped Automated Recovery: Requires Manual Intervention\"\n\n try:\n self.rollback_images()\n self.scale_up_deployments()\n error_handler_message = \"Successfully Rolled Back Deployment\"\n\n except Exception as e:\n error_handler_message = str(e)\n logging.error(error_handler_message)\n\n return error_handler_message", "def report_recoverable(self, payload, exception, callback_url):\n capture_exception(exception)\n\n if not callback_url:\n return\n\n payload[\"failure\"] = {\"type\": \"RECOVERABLE_FAILURE\", \"message\": str(exception)}\n\n data = GraphExportCallbackError().load(payload)\n with retry() as session:\n session.post(callback_url, data=data)", "def handle_unrecoverable_failure(self, node):\n if node.is_failed and node.exit_status < 400:\n self.report_error_handled(node, 'unrecoverable error, aborting...')\n return ProcessHandlerReport(True, self.exit_codes.ERROR_UNRECOVERABLE_FAILURE)", "def on_saga_failure(self, failed_step: BaseStep, initial_failure_payload: dict):\n logger.info(f'Saga {self.saga_id} failed on \"{failed_step.name}\" step. \\n'\n f'Failure details: {initial_failure_payload}')", "def failure_callback(self):\n error_filename = self.run_dir / \"eplusout.err\"\n if error_filename.exists():\n with open(error_filename, \"r\") as stderr:\n stderr_r = stderr.read()\n self.exception = EnergyPlusProcessError(\n cmd=self.cmd, stderr=stderr_r, idf=self.idf\n )\n self.cleanup_callback()", "def failure(self) -> 'outputs.EndConditionResponse':\n return pulumi.get(self, \"failure\")", "def _fail(self, exception):\n self.monitor_loop.stop()\n self._maintained.errback(exception)", "def failure(self, input: str) -> enumFail:\n pass", "def failure(cls, state, errcode=-1):\r\n return PlatformMessage(method=\"__reply__\", kwargs={\"__result__\": \"fail\", \"state\": state, \"errcode\": errcode})", "def logFailure(failure, msg='Unhandled exception in deferred:'):\n logging.error('%s\\n%s', msg, failure.getTraceback())", "def test_import_infra(self):\n project = Project.create()\n # Read an engine and check\n infra = import_infra(\"A320.xml\", \"engine\")\n self.assertEqual(len(infra.engines), 1)\n engine = infra.engines[0]\n self.assertEqual(engine.name, \"Machine 0\")\n self.assertEqual(engine.hauteur, 0.0)\n # Local frame:\n self.assertEqual(engine.position.x, 0.0)\n self.assertEqual(engine.position.y, 0.0)\n self.assertEqual(engine.position.z, 0.0)\n\n # Read a building and check\n infra = import_infra(\"Building.xml\", \"building\")\n self.assertEqual(len(infra.buildings), 1)\n building = infra.buildings[0]\n self.assertEqual(building.name, \"MyBuilding\")\n self.assertEqual(building.hauteur, 0.0)\n # Local frame:\n self.assertEqual(building.position.x, 0.0)\n self.assertEqual(building.position.y, 0.0)\n self.assertEqual(building.position.z, 0.0)\n\n # Check a no radiant building is refused:\n try:\n infra = import_infra(\"Building_no_radiant.xml\", \"building\")\n except:\n print(\"Ok, non radiant building is refused as expected.\")\n else:\n print(\"Non radiant building should be refused.\")\n sys.exit(-1)", "def add_failure(self, task: Task, exception: Any) -> None: # noqa: DAR101\n super().add_failure(task, exception)\n self._add_summary(task, _TaskExitCode.FAIL)", "def indicate_failure(self):\n pass", "def inject_failure(self):\n # Inject a failure only if there's a process running\n self.BqLog(\"Starting failure injection\")\n while len(self.circQ) > 0 or (self.currentProc and self.currentProc.workLeft > 0):\n t = time_to_failure()\n self.BqLog(\"Inject the next failure after %d seconds\" % (t))\n if t == 0:\n continue\n yield self.env.timeout(t)\n if len(self.circQ) >= 0 and \\\n self.currentProc.workLeft > 0:\n # Only break the machine if it is currently computing,\n # and if current proc is not restarting\n self.BqLog(\"Injecting a failure in %s\" % (self.currentProc.name))\n self.numFailures += 1\n self.process.interrupt(cause=\"failure\")", "def _logError(self, failure):\r\n try:\r\n failure.printTraceback()\r\n except:\r\n print('Could not print traceback of failure, print error '\r\n 'message instead:')\r\n print(failure.getErrorMessage())", "def failure(self):\n self.logger.debug(\"Logging failure for %s\", self.key)\n self.failures = self.driver.failure(self.key)", "def run(self, failure_info):\n signals = {}\n if not failure_info['failed'] or not failure_info['chromium_revision']:\n # Bail out if no failed step or no chromium revision.\n return signals\n\n # Bail out on infra failure\n if failure_info.get('failure_type') == failure_type.INFRA:\n return signals\n\n master_name = failure_info['master_name']\n builder_name = failure_info['builder_name']\n build_number = failure_info['build_number']\n\n for step_name in failure_info.get('failed_steps', []):\n if not waterfall_config.StepIsSupportedForMaster(step_name, master_name):\n # Bail out if the step is not supported.\n continue\n\n step = WfStep.Get(master_name, builder_name, build_number, step_name)\n if step and step.log_data:\n failure_log = step.log_data\n else:\n # TODO: do test-level analysis instead of step-level.\n # TODO: Use swarming test result instead of archived gtest results\n gtest_result = buildbot.GetGtestResultLog(\n master_name, builder_name, build_number, step_name)\n if gtest_result:\n failure_log = _GetReliableTestFailureLog(gtest_result)\n\n if gtest_result is None or failure_log == 'invalid':\n if not lock_util.WaitUntilDownloadAllowed(\n master_name): # pragma: no cover\n raise pipeline.Retry('Failed to pull log of step %s of master %s'\n % (step_name, master_name))\n try:\n failure_log = buildbot.GetStepLog(\n master_name, builder_name, build_number, step_name,\n self.HTTP_CLIENT)\n except ResponseTooLargeError: # pragma: no cover.\n logging.exception(\n 'Log of step \"%s\" is too large for urlfetch.', step_name)\n # If the stdio log of a step is too large, we don't want to pull it\n # again in next run, because that might lead to DDoS to the master.\n # TODO: Use archived stdio logs in Google Storage instead.\n failure_log = 'Stdio log is too large for urlfetch.'\n\n if not failure_log: # pragma: no cover\n raise pipeline.Retry('Failed to pull stdio of step %s of master %s'\n % (step_name, master_name))\n\n # Save step log in datastore and avoid downloading again during retry.\n if not step: # pragma: no cover\n step = WfStep.Create(\n master_name, builder_name, build_number, step_name)\n\n step.log_data = _ExtractStorablePortionOfLog(failure_log)\n\n try:\n step.put()\n except Exception as e: # pragma: no cover\n # Sometimes, the step log is too large to save in datastore.\n logging.exception(e)\n\n # TODO: save result in datastore?\n if step.isolated:\n try:\n json_failure_log = (\n json.loads(failure_log) if failure_log != 'flaky' else {})\n except ValueError: # pragma: no cover\n json_failure_log = {}\n logging.warning('failure_log %s is not valid JSON.' % failure_log)\n\n signals[step_name] = {\n 'tests': {}\n }\n step_signal = FailureSignal()\n\n for test_name, test_failure_log in json_failure_log.iteritems():\n signals[step_name]['tests'][test_name] = extractors.ExtractSignal(\n master_name, builder_name, step_name, test_name,\n base64.b64decode(test_failure_log)).ToDict()\n\n # Save signals in test failure log to step level.\n step_signal.MergeFrom(signals[step_name]['tests'][test_name])\n\n signals[step_name]['files'] = step_signal.files\n signals[step_name]['keywords'] = step_signal.keywords\n else:\n signals[step_name] = extractors.ExtractSignal(\n master_name, builder_name, step_name, None, failure_log).ToDict()\n\n return signals", "def error_handler(self, failure):\n log.error(failure)", "def on_failure(self, exc: BaseException) -> NoReturn:\n throw_new_error = self._breaker.open()\n\n if throw_new_error:\n error_msg = \"Trial call failed, circuit breaker opened\"\n raise CircuitBreakerError(error_msg).with_traceback(sys.exc_info()[2])\n else:\n raise exc", "def fail(self, msg=None):\r\n raise self.failureException(msg)", "def on_compensation_failure(self, initially_failed_step: BaseStep,\n initial_failure_payload: dict,\n compensation_failed_step: BaseStep,\n compensation_exception: BaseException):\n logger.info(f'Saga {self.saga_id} failed while compensating \"{compensation_failed_step.name}\" step.\\n'\n f'Error details: {format_exception_as_python_does(compensation_exception)} \\n \\n'\n f'Initial failure details: {initial_failure_payload}')", "def failure_detail(self) -> 'outputs.FailureDetailResponse':\n return pulumi.get(self, \"failure_detail\")", "def _authenticate_failed(self, e):\r\n if e.check(InvalidRequest):\r\n code = httpstatus.HTTP_STATUS_CODE_BAD_REQUEST[0]\r\n msg = e.getErrorMessage()\r\n elif e.check(UnauthorizedLogin):\r\n code = httpstatus.HTTP_STATUS_CODE_UNAUTHORIZED[0]\r\n msg = httpstatus.HTTP_STATUS_CODE_UNAUTHORIZED[1]\r\n else:\r\n e.printTraceback()\r\n code = httpstatus.HTTP_STATUS_CODE_INTERNAL_SERVER_ERROR[0]\r\n msg = httpstatus.HTTP_STATUS_CODE_INTERNAL_SERVER_ERROR[1]\r\n\r\n return Failure(HttpException(code, msg))", "def detail_errorback(self, failure):\n\t\tglobal exitCode\n\t\tif hasattr(failure.value,'reasons') and any(reason.type is OpenSSL.SSL.Error for reason in failure.value.reasons):\n\t\t\tmessage = 'SSL error on ' + failure.request.url\n\t\t\tif sys.platform == 'win32' and not os.environ.get('SSL_CERT_FILE'):\n\t\t\t\tmessage += '''\\nOn Windows, you may have to set environment variable \"SSL_CERT_FILE\" to the location of root certificates bundle.\nYou may find the location by running\n> import certifi\n> certifi.where()'''\n\t\t\t# https://github.com/pyca/pyopenssl/issues/823#issuecomment-468675241 explains On Windows pyOpenSSL doesn't ship with any trust roots\n\t\t\t# https://twistedmatrix.com/documents/current/api/twisted.internet.ssl.html#platformTrust read SSL_CERT_FILE environment variable.\n\n\t\t\tself.logger.error(message)\n\t\t\texitCode = GooglePlayAdvancedSearch.Errors.sslErrorCode\n\t\t\traise CloseSpider('SSL error on ' + failure.request.url)", "def report_unexpected_exception(self, *args, **kwargs):\n pass", "def report_unrecoverable(self, payload, exception, callback_url):\n capture_exception(exception)\n\n if not callback_url:\n return\n\n payload[\"failure\"] = {\"type\": \"UNRECOVERABLE_FAILURE\", \"message\": str(exception)}\n\n data = GraphExportCallbackError().load(payload)\n with retry() as session:\n session.post(callback_url, data=data)", "def test_add_failure(self):\n self.protocol.addFailure(\n self.test, pysubunit.RemoteError(compat._u(\"boo qux\")))\n self.assertEqual(\n self.io.getvalue(),\n compat._b(\n ('failure: %s [\\n' +\n _remote_exception_str + ': boo qux\\n]\\n')\n % self.test.id()))", "def testFailureReturnsInternalErrorCode(self):\n body = dumps({'id': 100, 'jsonrpc': '2.0', 'method': 'fail',\n 'params': {}})\n headers = Headers({'Content-Length': [str(len(body))],\n 'Content-Type': ['application/json']})\n request = FakeRequest(headers=headers, body=body)\n resource = TestResource(None, None)\n result = yield resource.deferred_render_POST(request)\n response = loads(result)\n self.assertEqual({'code': JSONRPC_INTERNAL_ERROR,\n 'message': 'Internal error.'},\n response['error'])\n self.assertIn('exceptions.RuntimeError', self.log.getvalue())", "def _failure_to_exception_tuple(failure):\n return (\n failure.value.__class__,\n failure.value,\n failure.getBriefTraceback(),\n )", "def emergency_recover_states_from_failure():\n _emergency_state_check()\n _emergency_iobuf_extract()", "def import_failure_reason(self) -> str:\n return pulumi.get(self, \"import_failure_reason\")", "def failure_detail(self) -> Optional[pulumi.Input['FailureDetailArgs']]:\n return pulumi.get(self, \"failure_detail\")", "def test_add_expected_failure(self):\n self.protocol.addExpectedFailure(\n self.test, pysubunit.RemoteError(compat._u(\"phwoar crikey\")))\n self.assertEqual(\n self.io.getvalue(),\n compat._b(('xfail: %s [\\n' +\n _remote_exception_str + \": phwoar crikey\\n\"\n \"]\\n\") % self.test.id()))", "def failure(self, error):\n print \"comm failed Reason:\", error\n return error", "def raise_for_failure(self) -> None:\n if not self.is_success():\n raise exc.ExecutionError(self)", "def test_original_failure(self):\n try:\n 1 / 0\n except ZeroDivisionError:\n f = Failure()\n dr = EventualResult(fail(f), None)\n self.assertIdentical(dr.original_failure(), f)", "def abort(self, message: str) -> None:\n message = f\"{Invocation.current.log} - {message}\"\n self.exception = StepException(message)\n global failure_aborts_build # pylint: disable=invalid-name\n global no_actions # pylint: disable=invalid-name\n if failure_aborts_build.value and not no_actions.value:\n no_additional_complaints()\n raise self.exception", "def testFailure():\n run(\"chariot-me\") #Start management-engine without initial deplflag\n egress()", "def inject_failure(self):\n while len(self.circQ):\n yield self.env.timeout(time_to_failure())\n if len(self.circQ) > 0 and \\\n not self.currentProc.broken and \\\n self.currentProc.workLeft > 0:\n # Only break the machine if it is currently computing,\n # and if current proc is not restarting\n # TODO: Allow errors to be thrown while restarting\n self.BqLog(\"Injecting a failure in %s\" % (self.currentProc.name))\n self.numFailures += 1\n self.process.interrupt(cause=\"failure\")", "def on_failure(self, exc: BaseException) -> None:", "def end_failure(self, error: Exception):\n if self.__tracer is not None:\n elapsed = (datetime.datetime.now().timestamp() * 1000) - self.__start\n self.__tracer.failure(self.__correlation_id, self.__component, self.__operation, error, round(elapsed))", "def failure(self, error, rc, msg):\n self.module.fail_json(msg=msg, rc=rc, err=error)", "def test_fails(self):\n raise FoolishError(\"I am a broken test\")", "def test_failure(self):\n\n @sync_performer\n def fail(dispatcher, intent):\n raise intent\n\n dispatcher = lambda _: fail\n self.assertThat(\n sync_perform(\n dispatcher, Effect(ValueError(\"oh dear\")).on(error=lambda e: e)\n ),\n MatchesException(ValueError(\"oh dear\")),\n )", "def _create_failure_entry(self):\r\n # view task entry for task failure\r\n progress = {'message': TEST_FAILURE_MESSAGE,\r\n 'exception': TEST_FAILURE_EXCEPTION,\r\n }\r\n return self._create_entry(task_state=FAILURE, task_output=progress)", "def _get_failure_from_exception(\n e: BaseException) -> TransactionResult.Failure:\n\n try:\n if isinstance(e, IconServiceBaseException):\n if e.code == ExceptionCode.SCORE_ERROR or isinstance(e, ScoreErrorException):\n Logger.warning(e.message, ICON_SERVICE_LOG_TAG)\n else:\n Logger.exception(e.message, ICON_SERVICE_LOG_TAG)\n\n code = int(e.code)\n message = str(e.message)\n else:\n Logger.exception(e, ICON_SERVICE_LOG_TAG)\n Logger.error(e, ICON_SERVICE_LOG_TAG)\n\n code: int = ExceptionCode.SERVER_ERROR.value\n message = str(e)\n except:\n code: int = ExceptionCode.SERVER_ERROR.value\n message = 'Invalid exception: code or message is invalid'\n\n return TransactionResult.Failure(code, message)", "def failure(self, message=''):\n print(colored(message, 'red'))", "def tc_fail(self, msg):\n self.recover()\n tc_fail(msg)", "def add_failure(self, test, exception_string):\n failure = E.failure(exception_string)\n self._add_test_report(test, result_description=failure,\n error=False, failure=True)", "def resolve_failure(self):\n\t\tpass", "def addFailure(self, test, err):\n\n super(ForceBalanceTestResult, self).addFailure(test,err)\n self.logger.warning(\"\\r\\x1b[31;1m\" + \"FAIL\" + \"\\x1b[0m \" + test.shortDescription() + \"\\n\")\n\n errorMessage = self.buildErrorMessage(test, err)\n\n for line in errorMessage.splitlines():\n self.logger.warning(\"\\t >\\t\" + line + \"\\n\")", "def test_launch_failures_hw(self):\n self.test_launch_failures()", "def extraction_failure(self, extraction_failure):\n self._extraction_failure = extraction_failure", "def _adapt_to_exception_tuple(failure):\n if isinstance(failure, Failure):\n return _failure_to_exception_tuple(failure)\n return failure", "def diagnostic_self_test_fail(reason='no errors found', additional_text='no errors found', severity_level='error'):\n\n SysTools.notify.error(RPD_EVENT_CONNECTIVITY_DIAGNOSTIC_SELF_TEST_FAIL[0],\n reason, additional_text, severity_level)", "def testRunException(self):\n class TestError(Exception):\n \"\"\"Unique test exception\"\"\"\n\n perform_mock = self.PatchObject(generic_stages.BuilderStage, 'PerformStage')\n perform_mock.side_effect = TestError('fail!')\n\n stage = self.ConstructStage()\n results_lib.Results.Clear()\n self.assertRaises(failures_lib.StepFailure, self._RunCapture, stage)\n\n results = results_lib.Results.Get()[0]\n self.assertTrue(isinstance(results.result, TestError))\n self.assertEqual(str(results.result), 'fail!')\n self.mock_cidb.StartBuildStage.assert_called_once_with(\n DEFAULT_BUILD_STAGE_ID)\n self.mock_cidb.FinishBuildStage.assert_called_once_with(\n DEFAULT_BUILD_STAGE_ID,\n constants.BUILDER_STATUS_FAILED)", "def make_expected_failure_message(todo, failure):\n exc_type, exc_value, exc_traceback = _adapt_to_exception_tuple(failure)\n return EXPECTED_FAILURE(\n todo=todo,\n exception=exc_type,\n reason=exc_value,\n traceback=exc_traceback,\n )", "def fail(self):\n self.cleanup()\n self.runner.report_job_fail(self.id)", "def test_failure_result(self):\n dr = EventualResult(fail(RuntimeError()), None)\n self.assertRaises(RuntimeError, dr.wait, 0.1)", "def failure(self, result):\r\n raise NotImplementedError", "def _reportError(self, failure):\r\n self._connection.reportError(failure.getErrorMessage())", "def log(failure):\n return self._env.logger.warning('[ping] {}'.format(failure.getErrorMessage()))", "def fail(msg):\n\n # Not sure if simply raising the exception is clearer.\n raise CommandFailed(msg)", "def on_failure(self, exc: BaseException) -> None:\n if self._breaker._state_storage.counter >= self._breaker.fail_max:\n throw_new_error = self._breaker.open()\n\n if throw_new_error:\n error_msg = \"Failures threshold reached, circuit breaker opened\"\n raise CircuitBreakerError(error_msg).with_traceback(sys.exc_info()[2])\n else:\n raise exc", "def on_trial_error(self, trial_runner, trial):\n\n raise NotImplementedError", "def test_fail(make_runner: Callable[..., TargetFunctionRunner]) -> None:\n runner = make_runner(target_failed, use_instances=True)\n run_info = TrialInfo(config=2, instance=\"test\", seed=0, budget=0.0)\n\n runner.submit_trial(run_info)\n run_info, run_value = next(runner.iter_results())\n\n # Make sure the traceback message is included\n assert \"traceback\" in run_value.additional_info\n assert \"RuntimeError\" in run_value.additional_info[\"traceback\"]", "def repr_failure(self, excinfo):\n if isinstance(excinfo.value, NbCellError):\n msg_items = [bcolors.FAIL + \"Notebook cell execution failed\" + bcolors.ENDC]\n formatstring = bcolors.OKBLUE + \"Cell %d: %s\\n\\n\" + \\\n \"Input:\\n\" + bcolors.ENDC + \"%s\\n\\n\" + \\\n bcolors.OKBLUE + \"Traceback:%s\" + bcolors.ENDC\n msg_items.append(formatstring % excinfo.value.args)\n return \"\\n\".join(msg_items)\n else:\n return \"pytest plugin exception: %s\" % str(excinfo.value)", "def _handle_failure(self, proc, test_case):\n if proc.returncode != 0:\n print('ERROR: Test execution failed: {}'.format(test_case.get_name()))\n stdout, stderr = proc.communicate()\n raise TestCaseFailure('Test case {} failed. stdout: {}, stderr: {}, '\n 'return code: {}.'.format(test_case.get_name(),\n stdout, stderr,\n proc.returncode))", "def unexpected_error(self, exception):", "def fail(self, failure):\n if self._state == self.State.transfering_no_waiters:\n self._get_deferreds = None\n self._free_deferreds = None\n self._state = self.State.freed\n \n elif self._state == self.State.transfering_waiters:\n for d in self._get_deferreds:\n d.errback(failure)\n self._get_deferreds = None\n self._free_deferreds = None\n self._state = self.State.freed\n \n elif self._state == self.State.transfering_waiters_free:\n for d in self._get_deferreds:\n d.errback(failure)\n self._get_deferreds = None\n self._state = self.State.freed\n for d in self._free_deferreds:\n d.callback(None)\n self._free_deferreds = None\n \n elif self._state == self.State.stored:\n raise ValueError(\"Attempt to set already set value\")\n \n elif self._state == self.State.freed:\n raise ValueError(\"This value instance should not be used anymore\")\n \n else:\n raise ValueError(\"Invalid state\")", "def addFailure(self, result):\n result.addFailure(self, (Exception, Exception(), None))\n # Since TAP will not provide assertion data, clean up the assertion\n # section so it is not so spaced out.\n test, err = result.failures[-1]\n result.failures[-1] = (test, \"\")", "def _response_failure(self, failure, msgID):\r\n if not self._status:\r\n # Can not help it if the response takes some time and in the mean\r\n # time the interface is disabled; therefore, don't raise an error\r\n # instead just skip sending the response\r\n return\r\n\r\n # TODO: Return something useful to the cloud here!\r\n print('Service call failed.')", "def _handle_error(self, failure, item, spider):\n # do nothing, just log\n log.err(failure)", "def TestCaseDidFail(self, request, context):\n LOGGER.info('Received request for TestCaseDidFail %s', request)\n for plugin in self.plugins:\n plugin.test_case_did_fail(request)\n return test_plugin_service_pb2.TestCaseDidFailResponse()", "def test_internal_server_error_beomces_remote_initiated_server_error(self):\n msg = \"The server has encountered an error\"\n error = ErrorParser().process_all(msg)\n assert isinstance(error, RemoteInitiatedServerError)\n eq_(BibliothecaAPI.SERVICE_NAME, error.service_name)\n eq_(502, error.status_code)\n eq_(msg, error.message)\n doc = error.as_problem_detail_document()\n eq_(502, doc.status_code)\n eq_(\"Integration error communicating with 3M\", doc.detail)", "def fail(self, exception):\n with self._lock:\n self._termination_manager.abort(self._local_failure)\n self._transmission_manager.abort(self._local_failure)\n self._ingestion_manager.abort()\n self._expiration_manager.abort()", "def renderHTTP_exception(request, failure):", "def device_stats_collection_failure_reason(self, device_stats_collection_failure_reason):\n\n self._device_stats_collection_failure_reason = device_stats_collection_failure_reason", "def test_ComputerPartition_error_HostingResource_DeliveredState(self):\n sequence_list = SequenceList()\n sequence_string = self.prepare_stopped_computer_partition_sequence_string + '\\\n SlapLoginCurrentComputer \\\n CheckSuccessComputerPartitionErrorCall \\\n Tic \\\n SlapLogout \\\n \\\n LoginDefaultUser \\\n CheckSalePackingListErrorText \\\n Logout \\\n LoginERP5TypeTestCase \\\n CheckSiteConsistency \\\n Logout \\\n '\n sequence_list.addSequenceString(sequence_string)\n sequence_list.play(self)", "def answerFailure( self, reason ):\n\t\tlog.warn( \n\t\t\t\"\"\"Unable to answer channel %r: %s\"\"\", \n\t\t\tself.agi.variables['agi_channel'], reason.getTraceback(),\n\t\t)\n\t\tself.agi.finish()", "def fail(self, cause = None, annotations = {}):\n self.set_outcome(Result.FAIL, cause, annotations)", "def Raise( self, Message ):\r\n UsbBootLoaderDriverError( Message )", "def _message_failed_job(self):\n self.ensure_one()\n return _(\"Something bad happened during the execution of the job. \"\n \"More details in the 'Exception Information' section.\")", "def _handle_tracker_error(self, failure):\n\n # Log the error\n logging.error(\"An error occured while running the '\"+self._service_id+\"' service, the service has been stopped: '\"+\n failure.getErrorMessage()+\"'\")\n # TODO: Log the error to the driver's state dictionary.\n\n # Stop the event loop just incase it's still running\n if self._tracking_update_loop.running:\n self._tracking_update_loop.stop()\n\n return False", "def test_remote_authentication_failed_becomes_remote_initiated_server_error(self):\n msg=self.sample_data(\"error_authentication_failed.xml\")\n error = ErrorParser().process_all(msg)\n assert isinstance(error, RemoteInitiatedServerError)\n eq_(BibliothecaAPI.SERVICE_NAME, error.service_name)\n eq_(\"Authentication failed\", error.message)", "def _get_error_type(self):\n\n error_type = AladdinUserFaultType.Unknown\n if not self.error_msg:\n return error_type.value\n\n error_msg = self.error_msg.lower()\n if 'unrecognized' in error_msg:\n error_type = AladdinUserFaultType.UnrecognizedArguments\n elif 'expected one argument' in error_msg or 'expected at least one argument' in error_msg \\\n or 'value required' in error_msg:\n error_type = AladdinUserFaultType.ExpectedArgument\n elif 'misspelled' in error_msg:\n error_type = AladdinUserFaultType.UnknownSubcommand\n elif 'arguments are required' in error_msg or 'argument required' in error_msg:\n error_type = AladdinUserFaultType.MissingRequiredParameters\n if '_subcommand' in error_msg:\n error_type = AladdinUserFaultType.MissingRequiredSubcommand\n elif '_command_package' in error_msg:\n error_type = AladdinUserFaultType.UnableToParseCommandInput\n elif 'not found' in error_msg or 'could not be found' in error_msg \\\n or 'resource not found' in error_msg:\n error_type = AladdinUserFaultType.AzureResourceNotFound\n if 'storage_account' in error_msg or 'storage account' in error_msg:\n error_type = AladdinUserFaultType.StorageAccountNotFound\n elif 'resource_group' in error_msg or 'resource group' in error_msg:\n error_type = AladdinUserFaultType.ResourceGroupNotFound\n elif 'pattern' in error_msg or 'is not a valid value' in error_msg or 'invalid' in error_msg:\n error_type = AladdinUserFaultType.InvalidParameterValue\n if 'jmespath_type' in error_msg:\n error_type = AladdinUserFaultType.InvalidJMESPathQuery\n elif 'datetime_type' in error_msg:\n error_type = AladdinUserFaultType.InvalidDateTimeArgumentValue\n elif '--output' in error_msg:\n error_type = AladdinUserFaultType.InvalidOutputType\n elif 'resource_group' in error_msg:\n error_type = AladdinUserFaultType.InvalidResourceGroupName\n elif 'storage_account' in error_msg:\n error_type = AladdinUserFaultType.InvalidAccountName\n elif \"validation error\" in error_msg:\n error_type = AladdinUserFaultType.ValidationError\n\n return error_type.value", "def raise_fail(*args, **kwargs):\n raise Exception(\"oops\")", "def test_ComputerPartition_error_HostingResource_StoppedState(self):\n sequence_list = SequenceList()\n sequence_string = self.prepare_stop_requested_computer_partition_sequence_string + '\\\n SlapLoginCurrentComputer \\\n CheckSuccessComputerPartitionErrorCall \\\n Tic \\\n SlapLogout \\\n \\\n LoginDefaultUser \\\n CheckSalePackingListErrorText \\\n Logout \\\n LoginERP5TypeTestCase \\\n CheckSiteConsistency \\\n Logout \\\n '\n sequence_list.addSequenceString(sequence_string)\n sequence_list.play(self)", "def failure(self, validation_failure):\n \n self.request.response.status_int = 400\n return validation_failure.error.asdict()", "def test_failed_processing(self):\n # setup\n ledger_api_dialogue, fipa_dialogue = self._setup_fipa_ledger_api_dialogues(self)\n\n self.transaction_behaviour.timedout.add(ledger_api_dialogue.dialogue_label)\n\n # operation\n with patch.object(self.logger, \"log\") as mock_logger:\n self.transaction_behaviour.failed_processing(ledger_api_dialogue)\n\n # after\n self.assert_quantity_in_outbox(0)\n\n # finish_processing\n assert self.transaction_behaviour.timedout == set()\n\n mock_logger.assert_any_call(\n logging.DEBUG,\n f\"Timeout dialogue in transaction processing: {ledger_api_dialogue}\",\n )\n\n # failed_processing\n assert fipa_dialogue in self.transaction_behaviour.waiting", "def failure(self, error):\n \n self.request.response.status_int = 400\n return None", "def raise_step_error(self, error: Exception, step: str):\n error_message = \"{}\\nFailed: Error={}\".format(step, str(error))\n logging.error(error_message)\n self.slacker.send_thread_reply(error_message)\n raise Exception(error_message)", "def _test_run_with_failure(self, task_class, expected_message):\r\n task_entry = self._create_input_entry()\r\n self.define_option_problem(PROBLEM_URL_NAME)\r\n with self.assertRaises(TestTaskFailure):\r\n self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)\r\n # compare with entry in table:\r\n entry = InstructorTask.objects.get(id=task_entry.id)\r\n self.assertEquals(entry.task_state, FAILURE)\r\n output = json.loads(entry.task_output)\r\n self.assertEquals(output['exception'], 'TestTaskFailure')\r\n self.assertEquals(output['message'], expected_message)" ]
[ "0.5803383", "0.5730745", "0.565321", "0.5649556", "0.56122845", "0.5440349", "0.5320383", "0.52736396", "0.5240046", "0.5213798", "0.5201333", "0.5083693", "0.50791305", "0.50574607", "0.5027117", "0.50205135", "0.5006612", "0.5000682", "0.49848235", "0.49722403", "0.49625576", "0.49599424", "0.4942724", "0.49408588", "0.49185506", "0.488899", "0.4879341", "0.48707384", "0.48572305", "0.4842845", "0.48270127", "0.4819731", "0.48160434", "0.48101956", "0.4799666", "0.47852236", "0.47834268", "0.47699845", "0.47579664", "0.47562757", "0.47484428", "0.4743476", "0.47429636", "0.47365955", "0.47143108", "0.46903548", "0.4688951", "0.46770164", "0.4674069", "0.46680197", "0.46428528", "0.46341965", "0.4621173", "0.46185598", "0.46027678", "0.45785254", "0.45689493", "0.4566813", "0.45639262", "0.45586154", "0.4551573", "0.4541139", "0.45411196", "0.45381817", "0.45267048", "0.45247993", "0.4517097", "0.45154822", "0.45076606", "0.44826734", "0.4478757", "0.4476612", "0.44711167", "0.44664472", "0.4465498", "0.4462783", "0.44556847", "0.44530702", "0.44476348", "0.44436517", "0.4437343", "0.4436699", "0.44356203", "0.4422762", "0.4419166", "0.44183642", "0.44152057", "0.44143176", "0.44119385", "0.44071454", "0.43809232", "0.43798807", "0.43776825", "0.43711096", "0.437048", "0.43676996", "0.43657333", "0.43651947", "0.4347545", "0.43311942" ]
0.6850455
0
StepTimeout is a subclass of StepFailure and is raised when a step times out.
def StepTimeout(self): return recipe_api.StepTimeout
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def raise_timeout(self, *args, **kwargs):\n\n self.log.error(\"Task timeout encountered.\")\n raise TimeoutError", "def handler(*args, **kwargs):\n raise TimeoutException(\"Test aborted due to timeout. Test was \" +\n \"expected to finish in less than {} second(s).\".format(time_limit))", "def StepFailure(self):\n return recipe_api.StepFailure", "async def timeout(self, failed: bool = False) -> None:\n raise NotImplementedError()", "def test_timeout_elapsed_exception(self):\n deadline = Deadline(-MS)\n with self.assertRaises(TimeoutError):\n deadline.timeout()", "def assert_timeout(self) -> None:", "def timeout(order):\n return ResultProxy(TaskResult(TaskTimedout('A task has timedout'), order))", "def _timeout(signum, frame):\n # Raise TimeoutException with system default timeout message\n raise TimeoutException()", "def test_timeoutRaises(self):\n\n @self.eventloop.wait_for(timeout=0.5)\n def times_out():\n return Deferred().addErrback(lambda f: f.trap(CancelledError))\n\n start = time.time()\n self.assertRaises(TimeoutError, times_out)\n self.assertTrue(abs(time.time() - start - 0.5) < 0.1)", "def _check_timeouts(self, chunk_timeout, total_timeout):\n cur_time = time()\n\n if chunk_timeout is not None and cur_time > self._chunk_time + chunk_timeout:\n raise ChunkTimeout('Item timeout expired.')\n elif total_timeout is not None and cur_time > self._total_time + total_timeout:\n raise TotalTimeout('Total timeout expired.')", "def test_timeout(self, mocker, mock_timedelta):\n\n tid = 289466\n site = \"mysite\"\n\n exception_response = self.generate_task_dictionary(\n tid, state=\"started\", completed=None\n )\n\n responses = [{\"json\": exception_response}]\n url = (\n \"https://cloudapi.acquia.com/v1/\"\n \"sites/prod:{site}/tasks/{tid}.json\".format(tid=tid, site=site)\n )\n\n mocker.register_uri(\"GET\", url, responses)\n\n with self.assertRaises(exceptions.AcquiaCloudTimeoutError):\n self.client.site(site).task(tid).wait(0)", "def _handle_timeout(self, frame=None, **_):\n\n raise TimeOut.TimeOutError(self, frame)", "def test_failed_processing(self):\n # setup\n ledger_api_dialogue, fipa_dialogue = self._setup_fipa_ledger_api_dialogues(self)\n\n self.transaction_behaviour.timedout.add(ledger_api_dialogue.dialogue_label)\n\n # operation\n with patch.object(self.logger, \"log\") as mock_logger:\n self.transaction_behaviour.failed_processing(ledger_api_dialogue)\n\n # after\n self.assert_quantity_in_outbox(0)\n\n # finish_processing\n assert self.transaction_behaviour.timedout == set()\n\n mock_logger.assert_any_call(\n logging.DEBUG,\n f\"Timeout dialogue in transaction processing: {ledger_api_dialogue}\",\n )\n\n # failed_processing\n assert fipa_dialogue in self.transaction_behaviour.waiting", "def assert_timeout(self) -> None:\n if self._cancelled:\n raise asyncio.TimeoutError from None", "def onTimeStepEnd(self, timeStep):\n pass", "def test_step_stop_aborted(self, _step: PropertyMock):\n _step.return_value = None\n es = exposed.ExposedStep()\n es.stop()", "def testTimeout(self):\n\n class TimeoutTestCase(cros_test_lib.TestCase):\n \"\"\"Test case that raises a TimeoutError because it takes too long.\"\"\"\n\n TEST_CASE_TIMEOUT = 1\n\n def testSleeping(self):\n \"\"\"Sleep for 2 minutes. This should raise a TimeoutError.\"\"\"\n time.sleep(2 * 60)\n raise AssertionError('Test case should have timed out.')\n\n # Run the test case, verifying it raises a TimeoutError.\n test = TimeoutTestCase(methodName='testSleeping')\n self.assertRaises(timeout_util.TimeoutError, test.testSleeping)", "def test_set_timeout_value_error(self, timeout):\n self.assertRaises(ValueError, self.root.set_timeout, timeout)", "def check_timeout(self, msg):\n if msg.clock.secs > self.timeout and not self.is_cancelled:\n rospy.loginfo(\"Test timed out, cancelling job\")\n self.utils.set_tag(name=self.test_name + \"_Status\", value=\"Failed\")\n self.utils.set_tag(name=self.test_name + \"_Timed_Out\", value=str(self.timeout))\n self.utils.cancel_job()", "def test_timeout(self):\n start = time.time()\n dr = EventualResult(Deferred(), None)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.03)\n # be a little lenient for slow computers:\n self.assertTrue(abs(time.time() - start) < 0.05)", "def pytest_timeout_cancel_timer(item):", "def test_wait_for_predicate_timeout(self):\n predicate_mock = mock.MagicMock(side_effect=[True, True, True])\n with self.assertRaises(TimeoutError):\n train_utils.wait_for_predicate(predicate_mock, num_retries=3)", "def _test_run_with_long_error_msg(self, task_class):\r\n task_entry = self._create_input_entry()\r\n self.define_option_problem(PROBLEM_URL_NAME)\r\n expected_message = \"x\" * 1500\r\n with self.assertRaises(TestTaskFailure):\r\n self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)\r\n # compare with entry in table:\r\n entry = InstructorTask.objects.get(id=task_entry.id)\r\n self.assertEquals(entry.task_state, FAILURE)\r\n self.assertGreater(1023, len(entry.task_output))\r\n output = json.loads(entry.task_output)\r\n self.assertEquals(output['exception'], 'TestTaskFailure')\r\n self.assertEquals(output['message'], expected_message[:len(output['message']) - 3] + \"...\")\r\n self.assertTrue('traceback' not in output)", "def __step_waiter(self, step_id):\n\n # don't forget to tip the waiter :)\n step_waiter = self.emr_client.get_waiter('step_complete')\n try:\n step_waiter.wait(ClusterId=self.clusID,\n StepId=step_id[0],\n WaiterConfig={\n 'Delay': 15,\n 'MaxAttempts': 480\n })\n\n except WaiterError as e:\n if 'Max attempts exceeded' in e.message:\n print('EMR Step did not complete in two hours')\n else:\n print(e.message)", "def test_task_failed(self):\n\n task1 = FailedTask(mock.Mock(), total_retries=0)\n task2 = mock.Mock(execute_after=0)\n\n g = TaskDependencyGraph(MockWorkflowContext())\n seq = g.sequence()\n seq.add(task1, task2)\n\n with limited_sleep_mock():\n self.assertRaisesRegex(WorkflowFailed, 'failtask', g.execute)\n self.assertTrue(task1.is_terminated)\n self.assertFalse(task2.apply_async.called)", "def step(self):\n raise TaskError(\"Task %s: subclass should override step() method!\" %\n self)", "def pytest_exception_interact(node):\n hooks = node.config.pluginmanager.hook\n hooks.pytest_timeout_cancel_timer(item=node)", "def test_timeout_retries(self):\n\n batch = Batch(Mock())\n self.check_instance(batch=batch)\n\n self.assertEqual(batch.timeout_retries, 0)\n self.check_instance(batch, timeout_retries=0)\n\n batch.timeout_retries = 10\n self.assertEqual(batch.timeout_retries, 10)\n self.check_instance(batch, timeout_retries=10)\n\n batch.timeout_retries = 0\n self.assertEqual(batch.timeout_retries, 0)\n self.check_instance(batch, timeout_retries=0)\n\n batch.timeout_retries = 1\n self.assertEqual(batch.timeout_retries, 1)\n self.check_instance(batch, timeout_retries=1)\n\n # exceptions\n ## error messages\n value_error = \"'timeout_retries' must be positive, i.e. greater or equal that zero (>=0).\"\n type_error = f\"'timeout_retries' must be of type {int}.\"\n\n #######################################################################\n # test wrong value\n with self.assertRaises(ValueError) as error:\n batch.timeout_retries = -1\n self.assertEqual(batch.timeout_retries, 1)\n self.check_instance(batch, timeout_retries=1)\n check_error_message(self, error, value_error)\n\n #######################################################################\n # test wrong type\n with self.assertRaises(TypeError) as error:\n batch.timeout_retries = True\n self.assertEqual(batch.timeout_retries, 1)\n self.check_instance(batch, timeout_retries=1)\n check_error_message(self, error, type_error)\n\n with self.assertRaises(TypeError) as error:\n batch.timeout_retries = '2'\n self.assertEqual(batch.timeout_retries, 1)\n self.check_instance(batch, timeout_retries=1)\n check_error_message(self, error, type_error)", "def fail(self):\n self.cleanup()\n self.runner.report_job_fail(self.id)", "def test_timeout_elapsed_no_exception(self):\n deadline = Deadline(-MS)\n timeout = deadline.timeout(raise_if_elapsed=False)\n self.assertGreater(timeout, -2 * MS)\n self.assertLess(timeout, -MS)", "def test_pipeline_timeout(mockpipe_timeout, testdir):\n test = testdir.makepyfile(TEST_TIMEOUT)\n result = testdir.inline_run(\n \"-v\",\n f\"--base-pipeline-dir={test.dirname}\",\n test\n )\n passed, skipped, failed = result.listoutcomes()\n\n assert len(passed) == 0\n assert len(skipped) == 0\n assert len(failed) == 1", "def timeout(self, timeout):\n assert timeout is None or timeout > 0\n self._timeout = timeout", "def test_timeout_twice(self):\n dr = EventualResult(Deferred(), None)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.01)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.01)", "def test_wait_timeout_raises():\n with pytest.raises(WaitTimeOut) as err_info:\n raise WaitTimeOut(\"this is error text right here\")\n\n assert str(err_info.value) == \"this is error text right here\"", "def timeout(time_limit):\n\n def wrapUnitTest(testcase):\n\n @wraps(testcase)\n def testWrapper(self):\n\n queue = Queue()\n\n try:\n p = Thread(target=handler, args=(self, testcase, queue))\n p.daemon = True\n p.start()\n err, res = queue.get(timeout=time_limit)\n p.join()\n if err:\n raise err[0](err[1]).with_traceback(err[2])\n return res\n except QueueEmptyError:\n raise TimeoutError(\"Test aborted due to timeout. Test was \" +\n \"expected to finish in less than {} second(s).\".format(time_limit))\n\n return testWrapper\n\n return wrapUnitTest", "def test_timeoutCancels(self):\n result = Deferred()\n error = []\n result.addErrback(error.append)\n\n @self.eventloop.wait_for(timeout=0.0)\n def times_out():\n return result\n\n self.assertRaises(TimeoutError, times_out)\n self.assertIsInstance(error[0].value, CancelledError)", "def test_timeout_exceeded():\n connection = FakeBaseConnection(session_timeout=10)\n start = time.time() - 11\n try:\n connection._timeout_exceeded(start)\n except NetmikoTimeoutException as exc:\n assert isinstance(exc, NetmikoTimeoutException)\n return\n\n assert False", "def failure(self, target):\n print \"FAILED:\"\n self.show_target(target)\n self.failed += 1", "def test_timeout_processing(self):\n # setup\n self.transaction_behaviour.processing_time = None\n\n # operation\n self.transaction_behaviour._timeout_processing()\n\n # after\n self.assert_quantity_in_outbox(0)", "def testTrialErrored(self):\n stats = self.default_statistics()\n trial_count = stats[str(0)][\"n\"] + 3\n sched, mock_runner = self.schedulerSetup(trial_count)\n t1, t2, t3 = sched._state[\"bracket\"].current_trials()\n for t in [t1, t2, t3]:\n mock_runner._launch_trial(t)\n\n sched.on_trial_error(mock_runner, t3)\n self.assertEqual(\n TrialScheduler.PAUSE,\n sched.on_trial_result(\n mock_runner, t1, result(stats[str(1)][\"r\"], 10)))\n self.assertEqual(\n TrialScheduler.CONTINUE,\n sched.on_trial_result(\n mock_runner, t2, result(stats[str(1)][\"r\"], 10)))", "def fail_job( self, job_state ):\n self.stop_job( self.sa_session.query( self.app.model.Job ).get( job_state.job_wrapper.job_id ) )\n job_state.job_wrapper.fail( getattr( job_state, \"fail_message\", GENERIC_REMOTE_ERROR ) )", "def on_trial_error(self, trial: Trial):\n pass", "def raise_step_error(self, error: Exception, step: str):\n error_message = \"{}\\nFailed: Error={}\".format(step, str(error))\n logging.error(error_message)\n self.slacker.send_thread_reply(error_message)\n raise Exception(error_message)", "def timeout(time_limit):\n\n class TimeoutException(Exception):\n \"\"\" Subclass Exception to catch timer expiration during search \"\"\"\n pass\n\n def handler(*args, **kwargs):\n \"\"\" Generic handler to raise an exception when a timer expires \"\"\"\n raise TimeoutException(\"Test aborted due to timeout. Test was \" +\n \"expected to finish in less than {} second(s).\".format(time_limit))\n\n def wrapUnitTest(testcase):\n\n @wraps(testcase)\n def testWrapper(self, *args, **kwargs):\n\n signal.signal(signal.SIGALRM, handler)\n signal.alarm(time_limit)\n\n try:\n return testcase(self, *args, **kwargs)\n finally:\n signal.alarm(0)\n\n return testWrapper\n\n return wrapUnitTest", "def raise_timeout_error(api_url, headers, timeout, proxies):\n raise requests.exceptions.Timeout", "def raise_timeout_error(api_url, headers, timeout, proxies):\n raise requests.exceptions.Timeout", "def set_timeout(self, timeout: int) -> None:\n raise WatchdogError(\"Setting timeout is not supported on {0}\".format(self.describe()))", "def timeout(seconds, error_message=\"Time out.\"):\n import signal\n def decorator(func):\n\n def __timiout_handler(signum, frame):\n raise TimeoutError(error_message)\n\n @functools.wraps(func)\n def wrapper(*arg, **kw):\n signal.signal(signal.SIGALRM, __timiout_handler)\n signal.alarm(seconds)\n ret = \"\"\n try:\n ret = func(*arg, **kw)\n except TimeoutError,e:\n print \"TimeoutError: \", e\n print \"{name} ran more than {seconds}s.\".format(name=func.__name__, seconds=seconds)\n except Exception,e:\n print \"Error: \",e\n finally:\n signal.alarm(0)\n return ret\n return wrapper\n return decorator", "def raise_timeout_error_upload(api_url, headers, data, timeout, proxies):\n raise requests.exceptions.Timeout", "def test_failure_result(self):\n dr = EventualResult(fail(RuntimeError()), None)\n self.assertRaises(RuntimeError, dr.wait, 0.1)", "def error_detection_timeout(self, error_detection_timeout):\n\n self._error_detection_timeout = error_detection_timeout", "def test_fail(make_runner: Callable[..., TargetFunctionRunner]) -> None:\n runner = make_runner(target_failed, use_instances=True)\n run_info = TrialInfo(config=2, instance=\"test\", seed=0, budget=0.0)\n\n runner.submit_trial(run_info)\n run_info, run_value = next(runner.iter_results())\n\n # Make sure the traceback message is included\n assert \"traceback\" in run_value.additional_info\n assert \"RuntimeError\" in run_value.additional_info[\"traceback\"]", "def timeout(self, timeout):\n\n self._timeout = timeout", "def test_wait_timeout_inheritance():\n # confirm subclassed from pypyr root error\n err = WaitTimeOut()\n assert isinstance(err, PypyrAwsError)\n assert isinstance(err, PlugInError)\n assert isinstance(err, PypyrError)", "def cancelled_to_request_timed_out_error(value, timeout):\n if isinstance(value, failure.Failure):\n value.trap(CancelledError)\n raise RequestTimedOutError()\n return value", "def stopped_check(self, timeout=None):", "def inject_failure(self):\n while len(self.circQ):\n yield self.env.timeout(time_to_failure())\n if len(self.circQ) > 0 and \\\n not self.currentProc.broken and \\\n self.currentProc.workLeft > 0:\n # Only break the machine if it is currently computing,\n # and if current proc is not restarting\n # TODO: Allow errors to be thrown while restarting\n self.BqLog(\"Injecting a failure in %s\" % (self.currentProc.name))\n self.numFailures += 1\n self.process.interrupt(cause=\"failure\")", "def grr_set_flow_timeout(line: Text) -> None:\n args = grr_set_flow_timeout.parser.parse_args(shlex.split(line))\n magics_impl.grr_set_flow_timeout_impl(args.timeout)", "def test_timeout(self) -> Optional[pulumi.Input['DurationArgs']]:\n return pulumi.get(self, \"test_timeout\")", "def test_timeout(self) -> Optional[pulumi.Input['DurationArgs']]:\n return pulumi.get(self, \"test_timeout\")", "def test_timeout_pending(self):\n deadline = Deadline(MS)\n timeout = deadline.timeout()\n self.assertGreater(timeout, 0)\n self.assertLess(timeout, MS)", "def on_trial_error(self, trial_runner, trial):\n\n raise NotImplementedError", "def end_failure(self, error: Exception):\n if self.__tracer is not None:\n elapsed = (datetime.datetime.now().timestamp() * 1000) - self.__start\n self.__tracer.failure(self.__correlation_id, self.__component, self.__operation, error, round(elapsed))", "def test_polling_custom_interval():\n with pytest.raises(polling2.MaxCallException):\n polling2.poll(\n target=lambda: requests.get(\"http://google.com\").status_code == 400,\n step_function=_custom_step, # adds 0.5 seconds to each iteration\n # step_function=polling.step_constant, # returns step\n # step_function=polling.step_linear_double, # returns step * 2\n step=0.5,\n max_tries=3,\n )", "def on_saga_failure(self, failed_step: BaseStep, initial_failure_payload: dict):\n logger.info(f'Saga {self.saga_id} failed on \"{failed_step.name}\" step. \\n'\n f'Failure details: {initial_failure_payload}')", "def onTimeStep(self, timeStep):\n pass", "def transition_on_timeout(timeout, *, to):\n\n # Figure out how many cycles need to pass before we consider ourselves timed out.\n timeout_in_cycles = int(math.ceil(timeout * self._clock_frequency))\n\n # If we've reached that many cycles, transition to the target state.\n with m.If(cycles_in_state == timeout_in_cycles):\n transition_to_state(to)", "def pytest_timeout_set_timer(item, settings):", "async def test_timeout_error(hass: HomeAssistant, mock_daikin) -> None:\n config_entry = MockConfigEntry(\n domain=DOMAIN,\n unique_id=MAC,\n data={CONF_HOST: HOST, KEY_MAC: MAC},\n )\n config_entry.add_to_hass(hass)\n\n mock_daikin.factory.side_effect = asyncio.TimeoutError\n await hass.config_entries.async_setup(config_entry.entry_id)\n await hass.async_block_till_done()\n\n assert config_entry.state == ConfigEntryState.SETUP_RETRY", "def raise_timeout_exception(self, _result=None, _timeout=None):\n raise RosTimeoutError(\"No service response received\")", "def _test_run_with_failure(self, task_class, expected_message):\r\n task_entry = self._create_input_entry()\r\n self.define_option_problem(PROBLEM_URL_NAME)\r\n with self.assertRaises(TestTaskFailure):\r\n self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)\r\n # compare with entry in table:\r\n entry = InstructorTask.objects.get(id=task_entry.id)\r\n self.assertEquals(entry.task_state, FAILURE)\r\n output = json.loads(entry.task_output)\r\n self.assertEquals(output['exception'], 'TestTaskFailure')\r\n self.assertEquals(output['message'], expected_message)", "def set_timeout(self, timeout):\n pass", "def _do_fail_retry(self, event):\n if self._retries > 0:\n self._retries -= 1\n self._state_machine.retry()\n else:\n self._state_machine.abort(result=event.result)", "def _do_fail_retry(self, event):\n if self._retries > 0:\n self._retries -= 1\n self._state_machine.retry()\n else:\n self._state_machine.abort(result=event.result)", "def test_timeout(self) -> 'outputs.DurationResponse':\n return pulumi.get(self, \"test_timeout\")", "def test_timeout(self) -> 'outputs.DurationResponse':\n return pulumi.get(self, \"test_timeout\")", "def get_test_timeout(self):\n return None", "def test_failed_job(self):\n\n failed_job = json.loads(TREEHERDER_JOB % (\"testfailed\", \"completed\"))\n self.assertEquals(self.query_api.get_job_status(failed_job), FAILURE)", "def set_timeout(self, timeout):\n self.timeout = timeout", "def testTrialErrored2(self):\n stats = self.default_statistics()\n trial_count = stats[str(0)][\"n\"] + stats[str(1)][\"n\"]\n sched, mock_runner = self.schedulerSetup(trial_count)\n trials = sched._state[\"bracket\"].current_trials()\n for t in trials[:-1]:\n mock_runner._launch_trial(t)\n sched.on_trial_result(\n mock_runner, t, result(stats[str(1)][\"r\"], 10))\n\n mock_runner._launch_trial(trials[-1])\n sched.on_trial_error(mock_runner, trials[-1])\n self.assertEqual(len(sched._state[\"bracket\"].current_trials()),\n self.downscale(stats[str(1)][\"n\"], sched))", "def test_timeout(self, timeout, tmpdir, monkeypatch):\n\n file_name = \"test_workflow.yaml\"\n dev = qml.device(\"orquestra.forest\", wires=3, timeout=timeout)\n mock_res_dict = {\"First\": {\"expval\": {\"list\": [123456789]}}}\n\n test_uuid = \"1234\"\n assert dev._timeout == timeout\n assert not os.path.exists(tmpdir.join(f\"expval-{test_uuid}.yaml\"))\n with monkeypatch.context() as m:\n m.setattr(pennylane_orquestra.cli_actions, \"user_data_dir\", lambda *args: tmpdir)\n m.setattr(pennylane_orquestra.cli_actions, \"workflow_results\", lambda *args: \"Test res\")\n\n # Disable submitting to the Orquestra platform by mocking Popen\n m.setattr(subprocess, \"Popen\", lambda *args, **kwargs: MockPopen())\n\n # Disable random uuid generation\n m.setattr(uuid, \"uuid4\", lambda *args: test_uuid)\n\n @qml.qnode(dev)\n def circuit():\n qml.PauliX(0)\n return qml.expval(qml.PauliZ(0))\n\n start = time.time()\n with pytest.raises(TimeoutError, match=\"The workflow results for workflow\"):\n circuit()\n end = time.time()\n assert end - start >= timeout", "def timeout_change(self, timedelta):\n pass # pylint: disable=unnecessary-pass\n # For backward compatibility only.", "def inject_failure(self):\n # Inject a failure only if there's a process running\n self.BqLog(\"Starting failure injection\")\n while len(self.circQ) > 0 or (self.currentProc and self.currentProc.workLeft > 0):\n t = time_to_failure()\n self.BqLog(\"Inject the next failure after %d seconds\" % (t))\n if t == 0:\n continue\n yield self.env.timeout(t)\n if len(self.circQ) >= 0 and \\\n self.currentProc.workLeft > 0:\n # Only break the machine if it is currently computing,\n # and if current proc is not restarting\n self.BqLog(\"Injecting a failure in %s\" % (self.currentProc.name))\n self.numFailures += 1\n self.process.interrupt(cause=\"failure\")", "def timeout(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"timeout\")", "def timeout(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"timeout\")", "def test_pass_times_error_server(self):\n with HTTMock(self.http_wrong):\n self.assertRaises(Exception, self.iss.pass_times, 15,20)", "def trial_end(self, parameter_id, success, **kwargs):", "def test_state_after_failure(self):\n pass", "def _timeout(self, timeout, f, *args, **kwargs):\r\n\r\n t = spawn_thread(target=f, args=args, kwargs=kwargs)\r\n t.daemon = True\r\n t.start()\r\n t.join(timeout)\r\n\r\n if not t.is_alive():\r\n if t.exc_info:\r\n return t.exc_info\r\n return t.result\r\n else:\r\n try:\r\n msg = '[%s] Execution was forcefully terminated'\r\n raise RuntimeError(msg % t.name)\r\n except:\r\n return sys.exc_info()", "def job_step_error(self, job_request_payload, message):\n payload = JobStepErrorPayload(job_request_payload, message)\n self.send(job_request_payload.error_command, payload)", "def timed_out(self):\n return self.__timed_out", "def test_progress__fail(self):\n # progress all quests\n self.character.quests.progress()\n # progress one quest\n self.character.quests.progress(_TestQuest.key)\n\n # still on step A\n self.assertEqual(self._get_quest().current_step, \"A\")", "def _timestep_after_hook(self, *args, **kwargs):\n pass", "def setTimeout(self, timeout):\n self.timeout = timeout", "def failed_workflow_cleanup_duration(self,\n failed_workflow_cleanup_duration):\n\n self._failed_workflow_cleanup_duration = failed_workflow_cleanup_duration", "def timeout(self, value):\n if isinstance(value, timedelta):\n value = value.days * 3600 * 24 + value.seconds\n self._timeout = value # noqa", "def test_timeout_not_exceeded():\n connection = FakeBaseConnection(session_timeout=10)\n start = time.time()\n assert not connection._timeout_exceeded(start)", "def _timeout_observer(self, connection_observer, timeout, passed_time, runner_logger, kind=\"background_run\"):\n if not connection_observer.life_status.was_on_timeout_called:\n connection_observer.life_status.was_on_timeout_called = True\n if not connection_observer.done():\n if connection_observer.is_command():\n exception = CommandTimeout(connection_observer=connection_observer,\n timeout=timeout, kind=kind, passed_time=passed_time)\n else:\n exception = ConnectionObserverTimeout(connection_observer=connection_observer,\n timeout=timeout, kind=kind, passed_time=passed_time)\n connection_observer.set_exception(exception)\n connection_observer.on_timeout()\n\n observer_info = \"{}.{}\".format(connection_observer.__class__.__module__, connection_observer)\n timeout_msg = \"has timed out after {:.2f} seconds.\".format(passed_time)\n msg = \"{} {}\".format(observer_info, timeout_msg)\n\n # levels_to_go_up: extract caller info to log where .time_out_observer has been called from\n connection_observer._log(logging.INFO, msg, levels_to_go_up=2)\n log_into_logger(runner_logger, level=logging.DEBUG,\n msg=\"{} {}\".format(connection_observer, timeout_msg),\n levels_to_go_up=1)", "def timeout(self) -> str:\n return pulumi.get(self, \"timeout\")", "def timeout(self, timeout):\n if (self.local_vars_configuration.client_side_validation and\n timeout is not None and not isinstance(timeout, int)):\n raise ValueError(\"Parameter `timeout` must be an integer\") # noqa: E501\n\n self._timeout = timeout" ]
[ "0.6783338", "0.6670357", "0.6458349", "0.6233549", "0.620531", "0.6151229", "0.61305606", "0.6103031", "0.59383696", "0.5915559", "0.5886344", "0.5850895", "0.58435684", "0.58397263", "0.5815907", "0.5812256", "0.576869", "0.5734896", "0.5714007", "0.56782407", "0.56605077", "0.5657417", "0.5595983", "0.5576143", "0.55475986", "0.5510916", "0.5483291", "0.5479062", "0.5474232", "0.5470697", "0.5464927", "0.5464671", "0.54575634", "0.54533696", "0.5453333", "0.54485744", "0.54314154", "0.5428915", "0.54010683", "0.539764", "0.53960466", "0.539481", "0.5393146", "0.5383996", "0.53727674", "0.53727674", "0.53589004", "0.5356304", "0.5353029", "0.53518677", "0.5341568", "0.5336356", "0.5331105", "0.5327217", "0.5318342", "0.53102636", "0.5305946", "0.5298865", "0.52947414", "0.52947414", "0.52862656", "0.5280357", "0.5275419", "0.52709675", "0.5260154", "0.5256531", "0.5254572", "0.5250279", "0.52444607", "0.5243784", "0.5236402", "0.5236256", "0.5229225", "0.5229225", "0.5224458", "0.5224458", "0.52235574", "0.5222473", "0.5215483", "0.52084464", "0.5204799", "0.5202364", "0.5193975", "0.5187106", "0.5187106", "0.51860386", "0.516845", "0.51291466", "0.5123117", "0.5117812", "0.5117453", "0.51157373", "0.5111363", "0.51050705", "0.50997037", "0.50975853", "0.50962245", "0.5090135", "0.50892955", "0.50870377" ]
0.7499932
0
The currently active (open) result from the last step that was run. This is a `types.StepData` object.
def active_result(self): return self.step_client.previous_step_result()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def result(self):\n # most pythonic way to get last in last is -1\n return self.history[-1]", "def current_progress_data(self):\n return self._current_progress_data", "def previous_step_result(self):\n return self._previous_step_result", "def cur_step(self):\n return self._cur_step", "def previous_step_result(self):\n if not self._engine._step_stack:\n raise ValueError(\n 'No steps have been run yet, and you are asking for a previous step '\n 'result.')\n return self._engine._step_stack[-1].step_result", "def get_last_result(self):\n return self.last_result", "def getCurrentStep():", "def last_triggered_step(self):\n return self._last_triggered_step", "def result(self):\n assert(self.__complete)\n return self.__result", "def get_current_observation(self):\n return self.observation_history[-1]", "def result( self):\n return self._result", "def result(self):\n return self['result']", "def get_last_solution(self):\n return self.last_result", "def get_current_value(self):\n assert(self.is_started())\n return self.currValue", "def currentValue(self):\n return self.__currentValue", "def result(self):\n return self._result", "def result(self):\n return self._result", "def result(self):\n return self._result", "def result(self):\n with self._condition:\n self.fetch()\n return self.__get_result()", "def last_result(self):\n # TODO : when evaluating multiline expressions this returns the first result\n lr = self.jiloop.lastRequest()\n res = lr.lineRep().call(\"$result\", spark_jvm_helpers.to_scala_list([]))\n return res", "def _get_result(self):\r\n \r\n return self._result", "def current(self):\n return self._wizard.current_step or self.first", "def get_data(self):\n return self._result", "def state(self):\n result = self.getResult()\n return result.state", "def last_value(self):\n return self._stop", "def step(self):\n return self._step", "def extract_goal_state(self):\n time = rospy.get_time()\n ref_time = time - self.last_time\n future_time = ref_time + self.update_rate\n\n # get state of future time in global trajectory\n return df.compute_output3D(self.global_solution, self.order, self.time[self.future_index], future_time)", "def step(self):\n return self._step", "def step(self):\n return self._step", "def step(self):\n return self._step", "def step(self):\n return self._step", "def getCurrent(self):\n return self.__current", "def result(self) -> T:\n if not self.done():\n raise InvalidStateError(\"result is not yet available\")\n elif self.cancelled():\n raise self._cancelled\n else:\n return self._outcome.get()", "def GetCurrentItem(self):\r\n\r\n return self._current", "def get_goal(self):\n return self.get_observation(self.env._get_goal())", "def result(self) -> Item:\n return self._result", "def get_current_task(self):\n return self.get_current_step().get_last_task()", "def finish_checkpoint(self):\n return self.this_evaluation.checkpoint", "def state(self):\n return self._current_value", "def get_current_state(self, data):\r\n return self.get_context()", "def current(self):\n return self._current", "def current(self):\n return self._current", "def current(self):\n return self._current", "def get_step(self):\n return self.step", "def get_step(self):\n return self.step", "def result(self):\n with self.__lock:\n assert(self.__complete)\n return self.__result", "def getObservation(self):\n return self._cur_state", "def get_last_step(self):\n return self.get_step_by_index(-1)", "def get_goal(self):\n self._pid_lock.acquire() # Acquire Lock\n rtn = self._goal\n self._pid_lock.release() # Release Lock\n\n return rtn", "def getStep():\n # TODO: can there be non-Step logs?", "def last_run(self):\n return self._last_run", "def current_step(self) -> FlowNode:\n return self._current_step", "def get_outcome(self):\n return self.__outcome", "def current_time_step(self) -> ts.TimeStep:\n return self._current_time_step", "def result(self):\n if self._child:\n return self._child.result()\n return self._result", "def get_current(self) -> typing.Any:\n\n return self.current_obj", "def get_state(self):\r\n return self.currentObservation", "def get_value(self):\n return self.last_value", "def result(self):\n if not self._last_command:\n return []\n return self._last_command.result()", "def process_finish(self):\n return self.x", "def process_finish(self):\n return self.x", "def get_results(self):\n return self.result", "def CurrentState(self):\n return self.currentState", "def get(self):\n if not self.finished():\n self.wait()\n return self._result", "def step ( self ) :\n return self.__step", "def last_value(self):\n return self.samples[-1]", "def active_step(self):\n if self._step_stack:\n return self._step_stack[-1]\n return None", "def current_state(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"current_state\")", "def get_result_of_first_calculation_added_to_history():\n return Calculator.history[0].getResult()", "def last_state(self):\n return self._simstate", "def get_result(self):\n try:\n return self.results.get_nowait()\n except Empty:\n return None", "def getFinish(self):\n return self._finish", "def current_value(self):\n return self.current_counter.value", "def get_current_state(self):\n return self.nextYs[-1]", "def get_current(self):\n return self.current", "def last_value(self):\n return self._last_value", "def current_temp(self):\n return self._current_temp", "def result(self):\n if self._result is not None:\n return self._result\n if self._exc_info is not None:\n raise self._exc_info[0], self._exc_info[1], self._exc_info[2]\n self._check_done()\n return self._result", "def current_operation(self):\n return self.state", "def result(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"result\")", "def get_current_turn(self):\n return self.turns.latest('number')", "def last_value(self):\n return self._waveforms[-1].last_value", "def best_step(self):\r\n return self._best_value_step", "def last_processed(self):\n return self._last_processed", "def state(self):\n return self.probe.get_data(self.variable)", "def state(self):\n return self.probe.get_data(self.variable)", "def get_output(self):\r\n return self._api.get_output()", "def output(self) -> pulumi.Output[Optional['outputs.JobStepOutputResponse']]:\n return pulumi.get(self, \"output\")", "def current_martingale(self):\n t, *_ = self._integrator.get_state(copy=False)\n return self._martingale.value(t)", "def last_attempt_result(self) -> Optional[pulumi.Input['InstanceAttemptResultArgs']]:\n return pulumi.get(self, \"last_attempt_result\")", "def get_current(self) -> int:\n return self._current", "def get_current_step(self):\n try:\n return self.get_step_by_id(self.current_step.id)\n except (AttributeError, ValueError):\n message = \"The current step for this ticket is not set.\"\n logger.debug(message)\n raise KeyError(message)", "def get_current(self):\n return self.x", "def last_value(self):\n return self._value", "def value(self):\n\n return self._progress.value()", "def outcome(self):\r\n return self._outcome", "def latest_state_data(self):\n if not self.state_list:\n return None\n if not self.state_list[-1]:\n return None\n return self.state_list[-1]", "def get_output(self):\n return self._output", "def get_current_token(self):\n with self._lock:\n if self._unfinished_ids:\n return self._unfinished_ids[0] - self._step\n\n return self._current", "def getCurrentObservation(self):\n\n if (len(self.observationHistory) == 0):\n return None\n\n return self.observationHistory[-1]" ]
[ "0.7012832", "0.6931432", "0.6929647", "0.67603475", "0.66973376", "0.6685482", "0.66834754", "0.66508675", "0.66325194", "0.65645987", "0.6562965", "0.65574545", "0.65385914", "0.6496028", "0.6484237", "0.6467374", "0.6467374", "0.6467374", "0.64124614", "0.6401742", "0.63910645", "0.63884705", "0.6376051", "0.63594383", "0.63495255", "0.6305278", "0.6294111", "0.6282416", "0.6282416", "0.6282416", "0.6282416", "0.62147653", "0.6212894", "0.62097275", "0.6203121", "0.61913353", "0.6190178", "0.6187749", "0.6154861", "0.61456746", "0.6142923", "0.6142923", "0.6142923", "0.61391056", "0.61391056", "0.61371815", "0.61318827", "0.6122624", "0.6102315", "0.6101212", "0.60999984", "0.6097255", "0.6093705", "0.6087391", "0.60836005", "0.6081218", "0.60774505", "0.6071859", "0.6037898", "0.6036325", "0.6036325", "0.60246277", "0.60171604", "0.60038084", "0.6000166", "0.5991118", "0.59891015", "0.5985999", "0.59822875", "0.5967685", "0.5959902", "0.5959734", "0.59573627", "0.5954868", "0.5934783", "0.5913976", "0.5910642", "0.59095323", "0.5903416", "0.589284", "0.5880374", "0.5860184", "0.58465505", "0.5838734", "0.58289903", "0.58289903", "0.5822166", "0.58127725", "0.58116287", "0.580642", "0.5806218", "0.5795891", "0.5791988", "0.57916343", "0.57900494", "0.57893115", "0.57844126", "0.57789654", "0.57780576", "0.5777528" ]
0.78945726
0
Nest allows you to nest steps hierarchically on the build UI. Calling ```python
def nest(self, name): step_result = self(name, []) with self.m.context(name_prefix=name, increment_nest_level=True): yield step_result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def build_step(self):\n pass", "def build_step(self):\n pass", "def test_build_page_nested(build_resources, cli):\n books, _ = build_resources\n src = books.joinpath(\"nested\")\n page = src.joinpath(\"contents\", \"markdown.md\")\n html = src.joinpath(\"_build\", \"_page\", \"contents-markdown\", \"html\")\n index = html.joinpath(\"index.html\")\n result = cli.invoke(commands.build, [page.as_posix(), \"-n\", \"-W\", \"--keep-going\"])\n assert result.exit_code == 0, result.output\n assert html.joinpath(\"markdown.html\").exists()\n assert not html.joinpath(\"extra_page.html\").exists()\n assert 'url=markdown.html\" />' in index.read_text(encoding=\"utf8\")", "def build_step(self):\n\n pass", "def test_run_tempest(self, tempest_actions, show_step, _):\n show_step(1)\n tempest_actions.prepare_and_run_tempest()", "def stepStarted(build, step):", "def build_nested_blocks(self):\n pass", "def less_nested_example_vanilla():\n return", "def build(ctx: typer.Context):\n from .tasks import build, main\n\n sys.argv = sys.argv[:1] + (ctx.args or [\"list\"])\n main(vars(build))", "def build(parameters):\n\n\n print(\"In Build module\")", "def actionBuild():\n\n #Init builder logger\n Builder.init()\n\n for target in Settings.targets:\n targetsToBuild, combineLibs, copyToOutput = Builder.getTargetGnPath(target)\n for platform in Settings.targetPlatforms:\n for cpu in Settings.targetCPUs:\n if System.checkIfCPUIsSupportedForPlatform(cpu,platform):\n for configuration in Settings.targetConfigurations:\n if not Summary.checkIfActionFailed(ACTION_PREPARE, target, platform, cpu, configuration):\n Logger.printStartActionMessage('Build ' + target + ' ' + platform + ' ' + cpu + ' ' + configuration,ColoredFormatter.YELLOW)\n result = Builder.run(target, targetsToBuild, platform, cpu, configuration, combineLibs, copyToOutput)\n Summary.addSummary(ACTION_BUILD, target, platform, cpu, configuration, result, Builder.executionTime)\n if result != NO_ERROR:\n Logger.printEndActionMessage('Failed building ' + target + ' ' + platform + ' ' + cpu + ' ' + configuration,ColoredFormatter.RED)\n #Terminate script execution if stopExecutionOnError is set to True in userdef\n shouldEndOnError(result)\n else:\n Logger.printEndActionMessage('Build ' + target + ' ' + platform + ' ' + cpu + ' ' + configuration)\n else:\n Logger.printColorMessage('Build cannot run because preparation has failed for ' + target + ' ' + platform + ' ' + cpu + ' ' + configuration,ColoredFormatter.YELLOW)\n Logger.printEndActionMessage('Build not run for ' + target + ' ' + platform + ' ' + cpu + ' ' + configuration,ColoredFormatter.YELLOW)", "def build_root(event):\n mode = event.parameters['mode']\n if mode == 'from config':\n config = event.parameters['config']\n\n elif mode == 'from template':\n manager = event.workbench.get_plugin('exopy.tasks')\n view = TemplateSelector(event.parameters.get('widget'),\n manager=manager)\n result = view.exec_()\n if result:\n path = view.path\n config, _ = load_template(path)\n\n else:\n msg = 'Invalid mode (%s) for build_root. Valid ones are : %s'\n raise ValueError(msg % (mode, ('from config', 'from template')))\n\n if config:\n build_dep = event.parameters.get('build_dep', event.workbench)\n return build_task_from_config(config, build_dep, True)\n\n else:\n raise RuntimeError('No config for building')", "def show_build_order(c, ignore=False, update=False):\n\n print(\" # Add this to invoke.yaml\")\n print(\" build_order:\")\n for p in _build_order(c, ignore=ignore, update=update):\n print(f\" - {p}\")\n\n print(\"\")", "def main():\n logging.basicConfig(\n level=logging.DEBUG, format=\"%(levelname)s: %(message)s\")\n root.add_command(all_)\n root.add_command(build)\n root.add_command(cleanup)\n root.add_command(clitest)\n root.add_command(run)\n root.add_command(push)\n root.add_command(unittest)\n root.add_command(list_images)\n root.add_command(list_stages)\n root()", "def less_nested_example_rst():\n\n return", "def build_step(self, signals):\n raise BuildError(\"OpBuilders must implement a `build_step` function\")", "def build(root):", "def tree(ctx):\n hokusai.print_command_tree(ctx.find_root().command)", "def test_get_scenarios_expanded(self):\n pass", "def test_python(width=10):\n\n stage_1 = [diamond(sleep=60, inputs=[0])]\n\n stage_2 = []\n for i in range(0, width):\n stage_2.extend([diamond(sleep=20, inputs=stage_1)])\n\n stage_3 = [diamond(sleep=30, inputs=stage_2)]\n\n if not stage_3[0].done():\n time.sleep(30)\n for sitename in dfk.executors:\n print(dfk.executors[sitename].status())", "def ui_root1():\n return send_build()", "def run_steps(properties, stream_engine, step_runner, universe_view,\n engine_flags=None, emit_initial_properties=False):\n with stream_engine.make_step_stream('setup_build') as s:\n if emit_initial_properties:\n for key in sorted(properties.iterkeys()):\n s.set_build_property(key, json.dumps(properties[key], sort_keys=True))\n\n engine = RecipeEngine(\n step_runner, properties, os.environ, universe_view, engine_flags)\n\n # Create all API modules and top level RunSteps function. It doesn't launch\n # any recipe code yet; RunSteps needs to be called.\n api = None\n\n assert 'recipe' in properties\n recipe = properties['recipe']\n\n root_package = universe_view.universe.package_deps.root_package\n run_recipe_help_lines = [\n 'To repro this locally, run the following line from the root of a %r'\n ' checkout:' % (root_package.name),\n '',\n '%s run --properties-file - %s <<EOF' % (\n os.path.join( '.', root_package.relative_recipes_dir, 'recipes.py'),\n recipe),\n '%s' % json.dumps(properties),\n 'EOF',\n '',\n 'To run on Windows, you can put the JSON in a file and redirect the',\n 'contents of the file into run_recipe.py, with the < operator.',\n ]\n\n with s.new_log_stream('run_recipe') as l:\n for line in run_recipe_help_lines:\n l.write_line(line)\n\n # Find and load the recipe to run.\n try:\n recipe_script = universe_view.load_recipe(recipe, engine=engine)\n s.write_line('Running recipe with %s' % (properties,))\n\n api = loader.create_recipe_api(\n universe_view.universe.package_deps.root_package,\n recipe_script.LOADED_DEPS,\n recipe_script.path,\n engine,\n recipe_test_api.DisabledTestData())\n\n s.add_step_text('running recipe: \"%s\"' % recipe)\n except (loader.LoaderError, ImportError, AssertionError) as e:\n for line in str(e).splitlines():\n s.add_step_text(line)\n s.set_step_status('EXCEPTION')\n if engine_flags and engine_flags.use_result_proto:\n return result_pb2.Result(\n failure=result_pb2.Failure(\n human_reason=str(e),\n exception=result_pb2.Exception(\n traceback=traceback.format_exc().splitlines()\n )))\n return RecipeResult({\n 'status_code': 2,\n 'reason': str(e),\n })\n\n # The engine will use step_runner to run the steps, and the step_runner in\n # turn uses stream_engine internally to build steam steps IO.\n return engine.run(recipe_script, api)", "def build(ctx):\n ctx.run(\"vsce package\", replace_env=False)", "def test_sections_json_spider_three_levels_with_summary_and_call(self):\n title = (\"Taking Action for the Social and Emotional Health of \"\n\t \"Young Children: A Report to the Community from the Denver \"\n\t\t \"Early Childhood Council\")\n\tsummary = (\"Now, Denver has a plan of action to make it easier for \"\n\t \"families to access early childhood mental health \"\n\t\t \"information, intervention and services.\")\n\tcall_to_action = (\"Test call to action.\")\n\tbyline = \"Denver Early Childhood Council\"\n story = create_story(title=title, summary=summary, byline=byline,\n\t\t\t call_to_action=call_to_action)\n section1 = create_section(\"We're ready to take action. Are you?\",\n\t\t\t story=story, weight=7)\n\tsection2 = create_section(\"Ricardo's Story\",\n\t\t\t story=story, weight=2)\n\tsection3 = create_section(\"Meeting the need for better child mental health services\",\n\t\t\t story=story, root=True, weight=1)\n\tsection4 = create_section(\"Healthy Minds Support Strong Futures\",\n\t\t\t story=story, weight=5) \n\tsection5 = create_section(\"Community Voices\",\n\t\t\t story=story, weight=3)\n\tsection6 = create_section(\"Our Vision: That All Children in Denver are Valued, Healthy and Thriving\",\n\t\t\t story=story, weight=4)\n\tsection7 = create_section(\"Defining a \\\"Framework for Change\\\" with Actionable Goals and Strategies\",\n\t\t\t story=story, weight=5) \n section8 = create_section(\"How Can the Plan Make a Difference?\",\n\t\t\t story=story, weight=5)\n\tsection9 = create_section(\"Impact\", story=story, weight=6)\n SectionRelation.objects.create(parent=section6, child=section8,\n weight=0)\n SectionRelation.objects.create(parent=section7, child=section9,\n weight=0)\n SectionRelation.objects.create(parent=section6, child=section7,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section1,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section6,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section4,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section5,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section2,\n weight=0)\n\tjson_sections = simplejson.loads(story.structure.sections_json(\n\t\tinclude_summary=True, include_call_to_action=True))\n\tself.assertIn(\n\t section8.section_id,\n\t self._get_section(json_sections, section6.section_id)['children'])\n\tself.assertIn(\n\t section9.section_id,\n\t self._get_section(json_sections, section7.section_id)['children'])\n\tself.assertIn(\n\t section7.section_id,\n\t self._get_section(json_sections, section6.section_id)['children'])\n\tself.assertIn(\n\t section1.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])\n\tself.assertIn(\n\t section6.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])\n\tself.assertIn(\n\t section4.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])\n\tself.assertIn(\n\t section5.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])\n\tself.assertIn(\n\t section2.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])\n\tself.assertEqual(json_sections[0]['section_id'], 'summary')\n\tself.assertEqual(json_sections[0]['next_section_id'], \n\t\t\t json_sections[1]['section_id'])\n\tself.assertEqual(json_sections[1]['previous_section_id'], 'summary')\n\tself.assertEqual(json_sections[-1]['section_id'], 'call-to-action')\n\tself.assertEqual(json_sections[-1]['previous_section_id'], \n\t\t\t json_sections[-2]['section_id'])\n\tself.assertEqual(json_sections[-2]['next_section_id'], 'call-to-action')", "def start(context, project_name):\n\n gcc_version = '10-2020-q4-major-'\n os_extension = ''\n\n if platform.system() == 'Linux':\n if platform.machine() == 'x86_64':\n os_extension = 'x86_64-linux'\n else:\n os_extension = 'aarch64-linux'\n elif platform.system() == 'Darwin':\n os_extension = 'mac'\n elif platform.system() == 'Windows':\n os_extension = 'win32'\n\n final_branch_name = f'{gcc_version}{os_extension}'\n\n if not os_extension:\n click.secho(f'This system {platform.system()}:{platform.machine()} ' +\n 'is not supported for SJSU-Dev2 ', fg='red', bold=True)\n return -1\n\n click.secho(f'Creating project: {project_name}', fg='white', bold=True)\n Path(project_name).mkdir()\n\n click.echo(f' Creating \"{project_name}/.sj2\" directory')\n Path(f'{project_name}/.sj2').mkdir(exist_ok=True)\n Path(f'{project_name}/.sj2/reserved').touch(exist_ok=True)\n\n click.echo(f' Creating \"{project_name}/library\" directory')\n Path(f'{project_name}/library').mkdir(exist_ok=True)\n\n click.echo(f' Creating \"{project_name}/packages\" directory')\n Path(f'{project_name}/packages').mkdir(exist_ok=True)\n\n click.echo(f' Creating \"{project_name}/main.cpp\" source file')\n Path(f'{project_name}/main.cpp').write_text(BASIC_MAIN_CPP)\n\n click.echo('')\n\n context.invoke(install, library='libcore', tag='main',\n project_directory=project_name)\n context.invoke(install, library='libarmcortex',\n tag='main', project_directory=project_name)\n context.invoke(install, library='liblpc40xx', tag='main',\n project_directory=project_name)\n context.invoke(install, library='libstm32f10x',\n tag='main', project_directory=project_name)\n context.invoke(install, library='gcc-arm-none-eabi-picolibc',\n tag=final_branch_name, project_directory=project_name)", "def explore(self, board, args):\n self.tree.explore(board, *args)", "def run_steps(stream, build_properties, factory_properties,\n test_data=recipe_test_api.DisabledTestData()):\n stream.honor_zero_return_code()\n\n # TODO(iannucci): Stop this when blamelist becomes sane data.\n if ('blamelist_real' in build_properties and\n 'blamelist' in build_properties):\n build_properties['blamelist'] = build_properties['blamelist_real']\n del build_properties['blamelist_real']\n\n properties = factory_properties.copy()\n properties.update(build_properties)\n\n # TODO(iannucci): A much better way to do this would be to dynamically\n # detect if the mirrors are actually available during the execution of the\n # recipe.\n if ('use_mirror' not in properties and (\n 'TESTING_MASTERNAME' in os.environ or\n 'TESTING_SLAVENAME' in os.environ)):\n properties['use_mirror'] = False\n\n # It's an integration point with a new recipe engine that can run steps\n # in parallel (that is not implemented yet). Use new engine only if explicitly\n # asked by setting 'engine' property to 'ParallelRecipeEngine'.\n engine = RecipeEngine.create(stream, properties, test_data)\n\n # Create all API modules and an instance of top level GenSteps generator.\n # It doesn't launch any recipe code yet (generator needs to be iterated upon\n # to start executing code).\n api = None\n with stream.step('setup_build') as s:\n assert 'recipe' in factory_properties\n recipe = factory_properties['recipe']\n\n properties_to_print = properties.copy()\n if 'use_mirror' in properties:\n del properties_to_print['use_mirror']\n\n run_recipe_help_lines = [\n 'To repro this locally, run the following line from a build checkout:',\n '',\n './scripts/tools/run_recipe.py %s --properties-file - <<EOF' % recipe,\n repr(properties_to_print),\n 'EOF',\n '',\n 'To run on Windows, you can put the JSON in a file and redirect the',\n 'contents of the file into run_recipe.py, with the < operator.',\n ]\n\n for line in run_recipe_help_lines:\n s.step_log_line('run_recipe', line)\n s.step_log_end('run_recipe')\n\n try:\n recipe_module = recipe_loader.load_recipe(recipe)\n stream.emit('Running recipe with %s' % (properties,))\n api = recipe_loader.create_recipe_api(recipe_module.DEPS,\n engine,\n test_data)\n steps = recipe_module.GenSteps\n s.step_text('<br/>running recipe: \"%s\"' % recipe)\n except recipe_loader.NoSuchRecipe as e:\n s.step_text('<br/>recipe not found: %s' % e)\n s.step_failure()\n return RecipeExecutionResult(2, None)\n\n # Run the steps emitted by a recipe via the engine, emitting annotations\n # into |stream| along the way.\n return engine.run(steps, api)", "def stage(self, stage: osbuild.Stage):", "def test_quick_build1(self):\n pass", "def expand_tasks_with_samples( # pylint: disable=R0913,R0914\n self,\n dag,\n chain_,\n samples,\n labels,\n task_type,\n adapter_config,\n level_max_dirs,\n):\n LOG.debug(f\"expand_tasks_with_samples called with chain,{chain_}\\n\")\n # Figure out how many directories there are, make a glob string\n directory_sizes = uniform_directories(len(samples), bundle_size=1, level_max_dirs=level_max_dirs)\n\n glob_path = \"*/\" * len(directory_sizes)\n\n LOG.debug(\"creating sample_index\")\n # Write a hierarchy to get the all paths string\n sample_index = create_hierarchy(\n len(samples),\n bundle_size=1,\n directory_sizes=directory_sizes,\n root=\"\",\n n_digits=len(str(level_max_dirs)),\n )\n\n LOG.debug(\"creating sample_paths\")\n sample_paths = sample_index.make_directory_string()\n\n LOG.debug(\"assembling steps\")\n # the steps in the chain\n steps = [dag.step(name) for name in chain_]\n\n # sub in globs prior to expansion\n # sub the glob command\n steps = [\n step.clone_changing_workspace_and_cmd(cmd_replacement_pairs=parameter_substitutions_for_cmd(glob_path, sample_paths))\n for step in steps\n ]\n\n # workspaces = [step.get_workspace() for step in steps]\n # LOG.debug(f\"workspaces : {workspaces}\")\n\n needs_expansion = is_chain_expandable(steps, labels)\n\n LOG.debug(f\"needs_expansion {needs_expansion}\")\n\n if needs_expansion:\n # prepare_chain_workspace(sample_index, steps)\n sample_index.name = \"\"\n LOG.debug(\"queuing merlin expansion tasks\")\n found_tasks = False\n conditions = [\n lambda c: c.is_great_grandparent_of_leaf,\n lambda c: c.is_grandparent_of_leaf,\n lambda c: c.is_parent_of_leaf,\n lambda c: c.is_leaf,\n ]\n for condition in conditions:\n if not found_tasks:\n for next_index_path, next_index in sample_index.traverse(conditional=condition):\n LOG.info(\n f\"generating next step for range {next_index.min}:{next_index.max} {next_index.max-next_index.min}\"\n )\n next_index.name = next_index_path\n\n sig = add_merlin_expanded_chain_to_chord.s(\n task_type,\n steps,\n samples[next_index.min : next_index.max],\n labels,\n next_index,\n adapter_config,\n next_index.min,\n )\n sig.set(queue=steps[0].get_task_queue())\n\n if self.request.is_eager:\n sig.delay()\n else:\n LOG.info(f\"queuing expansion task {next_index.min}:{next_index.max}\")\n self.add_to_chord(sig, lazy=False)\n LOG.info(f\"merlin expansion task {next_index.min}:{next_index.max} queued\")\n found_tasks = True\n else:\n LOG.debug(\"queuing simple chain task\")\n add_simple_chain_to_chord(self, task_type, steps, adapter_config)\n LOG.debug(\"simple chain task queued\")", "def run():\n from cgl.plugins.blender.tasks.rig import parent_mdl_to_rig\n parent_mdl_to_rig()", "def test_quick_build(self):\n pass", "def main():\n parser = argparse.ArgumentParser(\n epilog=main.__doc__, formatter_class=argparse.RawDescriptionHelpFormatter\n )\n parser.add_argument(\n \"-d\", \"--dry-run\", action=\"store_true\", default=0, help=\"Dry run mode.\"\n )\n parser.add_argument(\n \"-v\",\n \"--verbose\",\n action=\"count\",\n default=0,\n help=\"Verbosity. Default is WARNING level.\",\n )\n\n subparsers = parser.add_subparsers(help=\"Sub commands\", dest=\"subparser\")\n subparsers.required = True\n\n build_parser = subparsers.add_parser(\n \"build\",\n description=\"Build an image from Dockerfile, caching image hierarchy\",\n help=\"Build an image from a Dockerfile\",\n )\n build_parser.add_argument(\n \"path\", metavar=\"PATH\", help=\"The build context directory\"\n )\n build_parser.add_argument(\n \"-f\",\n \"--file\",\n help=\"Name of the Dockerfile. If not provided, \"\n \"will use config.DOCKERFILE_PATH_PATTERN to compute. \",\n )\n build_parser.add_argument(\n \"-v\",\n \"--git-sha\",\n required=True,\n help=\"The version of code to build against, \" \"will pass as GIT_SHA variable\",\n )\n build_parser.add_argument(\n \"-n\", \"--name\", required=True, help=\"The name of the image to build\"\n )\n build_parser.add_argument(\n \"--build-arg\",\n metavar=\"ARG=VALUE\",\n nargs=\"*\",\n default=[],\n help=\"Set extra build-time variables. GIT_SHA, TIMESTAMP will be passed by default.\",\n )\n build_parser.add_argument(\n \"-r\",\n \"--raw\",\n action=\"store_true\",\n help=\"Whether to use raw docker build command to build, skipping caching logic\",\n )\n build_parser.add_argument(\n \"--registry\",\n default=config.DOCKER_REGISTRY,\n help=\"Docker registry use to determine the image identity, \"\n \"can be set via IMAGE_BUILDER_DOCKER_REGISTRY environment variable, \"\n 'or set DOCKER_REGISTRY in config.py. Default is \"%(default)s\"',\n )\n build_parser.add_argument(\n \"-t\",\n \"--tag-pattern\",\n default=config.GIT_SHA_TAG_PATTERN,\n help=\"Tag pattern, can only include one `{git_sha}` placeholder, \"\n 'such as \"{git_sha}-new\". If the tag exists, we won\\'t rebuild it. '\n 'Default is \"%(default)s\"',\n )\n build_parser.add_argument(\n \"-e\",\n \"--extra-tag\",\n nargs=\"*\",\n default=[],\n help=\"Extra tags to tag to the final images\",\n )\n build_parser.add_argument(\n \"--extra-name\",\n nargs=\"*\",\n default=[],\n help=\"Extra name and optionally with a tag in the 'name:tag' format\",\n )\n build_parser.add_argument(\n \"-o\", \"--output-hash\", help=\"The output filename of the files hash log.\"\n )\n build_parser.set_defaults(func=build)\n\n args = parser.parse_args()\n if args.dry_run:\n # DRY_RUN env will be read in image_builder.libs.process\n os.environ[\"DRY_RUN\"] = \"1\"\n\n if args.func == build:\n args.path = expand_path(args.path)\n if args.output_hash:\n args.output_hash = expand_path(args.output_hash)\n\n args.file = args.file or locate_dockerfile(args.name)\n args.file = expand_path(args.file)\n # set environ for main dockerfile for possibly retrieving later\n os.environ[\n config.DOCKERFILE_ENV_PATTERN.format(image_name=args.name)\n ] = args.file\n\n # change CWD to PATH\n os.chdir(args.path)\n\n if not args.registry:\n parser.error(\n \"--registry should be provied \"\n \"or specified by IMAGE_BUILDER_DOCKER_REGISTRY environment variable or set DOCKER_REGISTRY in config.py\"\n )\n if not all(\"=\" in kv for kv in args.build_arg):\n parser.error(\"--build_arg must be in ARG=VALUE format\")\n\n # set git_sha_tag\n try:\n args.git_sha_tag = args.tag_pattern.format(git_sha=args.git_sha)\n except KeyError:\n parser.error(\n 'Wrong --tag-pattern provided. Can only include one `{git_sha}` placeholder, such as \"{git_sha}-new\"'\n )\n\n # setup logging\n level = logging.WARNING - args.verbose * 10\n logging.basicConfig(\n level=level, format=\"%(asctime)s %(name)s %(levelname)s %(message)s\"\n )\n\n if args.output_hash:\n h = logging.FileHandler(args.output_hash)\n h.setLevel(logging.DEBUG)\n h.setFormatter(logging.Formatter(\"%(message)s\"))\n hash_logger.addHandler(h)\n\n # Suppress warning when we don't verify ssl\n import urllib3\n\n urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\n\n return args.func(args)", "def build(c, force=None):\n for sp_ns in ns_foreach_task_subdir(c):\n print(\"-- running build in \", os.getcwd())\n\n # sp_ns.tasks.build(c, force)\n c.run('invoke build')", "def test_documentation_popxl_nested_session_contexts(self):\n filename = \"nested_session_contexts.py\"\n self.run_python(filename, file_dir=working_dir, working_dir=working_dir)", "def tfrun(args, build_modules, build_workspace, build_env):\n\n # loop through each selected module(s) and apply the action as specified by user\n for m in build_modules:\n print(\"\\n\\n****************************************************************************\")\n print(\"Permforming action \\\"{0}\\\" for module {1}\".format(args.action, m))\n print(\"****************************************************************************\\n\\n\")\n run_module(args, m, build_workspace, build_env)", "def test_documentation_popxl_nested_code_loading(self):\n filename = \"code_loading_nested.py\"\n self.run_python(filename, file_dir=working_dir, working_dir=working_dir)", "def ninja_simulate_block():\n return '''\n```sh\n# In build directory\nninja && ./simulate\n```'''", "def run():\n build_no_documentation()\n build_sphinx_build()\n #build_sphinx_pdf()\n build_graphviz_files()", "def buildStarted(self, name, build):\n if self.isInterestingBuilder(name):\n return self", "def test_build(self):\n self.app.build()", "def step_impl(context):\n pass", "def step_impl(context):\n pass", "def process_steps(steplist, build, buildslave, build_status, basedir):\n for step in steplist:\n step.setBuild(build)\n step.setBuildSlave(buildslave)\n step.setStepStatus(build_status.addStepWithName(step.name))\n step.setDefaultWorkdir(os.path.join(basedir, 'build'))\n step.workdir = os.path.join(basedir, 'build')", "def build(_):", "def nested(*contexts):\n with ExitStack() as stack:\n for ctx in contexts:\n stack.enter_context(ctx())\n yield contexts", "def do_build(self):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\tcfg = self.cfg\n\t\tself.log('PHASE: build, repository work', level=logging.DEBUG)\n\t\tmodule_id_list = self.module_ids()\n\t\tif self.build['deps_only']:\n\t\t\tmodule_id_list_build_only = filter(lambda x: cfg[x]['shutit.core.module.build'], module_id_list)\n\t\tfor module_id in module_id_list:\n\t\t\tmodule = self.shutit_map[module_id]\n\t\t\tself.log('Considering whether to build: ' + module.module_id, level=logging.INFO)\n\t\t\tif cfg[module.module_id]['shutit.core.module.build']:\n\t\t\t\tif self.build['delivery'] not in module.ok_delivery_methods:\n\t\t\t\t\tself.fail('Module: ' + module.module_id + ' can only be built with one of these --delivery methods: ' + str(module.ok_delivery_methods) + '\\nSee shutit build -h for more info, or try adding: --delivery <method> to your shutit invocation') # pragma: no cover\n\t\t\t\tif self.is_installed(module):\n\t\t\t\t\tself.build['report'] = (self.build['report'] + '\\nBuilt already: ' + module.module_id + ' with run order: ' + str(module.run_order))\n\t\t\t\telse:\n\t\t\t\t\t# We move to the module directory to perform the build, returning immediately afterwards.\n\t\t\t\t\tif self.build['deps_only'] and module_id == module_id_list_build_only[-1]:\n\t\t\t\t\t\t# If this is the last module, and we are only building deps, stop here.\n\t\t\t\t\t\tself.build['report'] = (self.build['report'] + '\\nSkipping: ' + module.module_id + ' with run order: ' + str(module.run_order) + '\\n\\tas this is the final module and we are building dependencies only')\n\t\t\t\t\telse:\n\t\t\t\t\t\trevert_dir = os.getcwd()\n\t\t\t\t\t\tself.get_current_shutit_pexpect_session_environment().module_root_dir = os.path.dirname(self.shutit_file_map[module_id])\n\t\t\t\t\t\tself.chdir(self.get_current_shutit_pexpect_session_environment().module_root_dir)\n\t\t\t\t\t\tself.login(prompt_prefix=module_id,command=shutit_global.shutit_global_object.bash_startup_command,echo=False)\n\t\t\t\t\t\tself.build_module(module)\n\t\t\t\t\t\tself.logout(echo=False)\n\t\t\t\t\t\tself.chdir(revert_dir)\n\t\t\tif self.is_installed(module):\n\t\t\t\tself.log('Starting module',level=logging.DEBUG)\n\t\t\t\tif not module.start(self):\n\t\t\t\t\tself.fail(module.module_id + ' failed on start', shutit_pexpect_child=self.get_shutit_pexpect_session_from_id('target_child').pexpect_child) # pragma: no cover", "def test_sections_json_spider_three_levels_with_summary_and_call(self):\n title = (\"Taking Action for the Social and Emotional Health of \"\n \"Young Children: A Report to the Community from the \" \n\t\t \"Denver Early Childhood Council\")\n summary = (\"Now, Denver has a plan of action to make it easier \"\n \"for families to access early childhood mental health \"\n \"information, intervention and services.\")\n call_to_action = (\"Test call to action.\")\n byline = \"Denver Early Childhood Council\"\n story = create_story(title=title, summary=summary, byline=byline,\n\t\t\t call_to_action=call_to_action)\n layout = SectionLayout.objects.get(sectionlayouttranslation__name=\"Side by Side\")\n section1 = create_section(\"We're ready to take action. Are you?\",\n\t\t\t story=story, layout=layout, weight=7)\n section2 = create_section(\"Ricardo's Story\",\n\t\t\t story=story, layout=layout, weight=2)\n section3 = create_section(\"Meeting the need for better child mental health services\",\n\t\t\t story=story, layout=layout, root=True, weight=1)\n section4 = create_section(\"Healthy Minds Support Strong Futures\",\n\t\t\t story=story, layout=layout, weight=5) \n section5 = create_section(\"Community Voices\",\n\t\t\t story=story, layout=layout, weight=3)\n section6 = create_section(\"Our Vision: That All Children in Denver are Valued, Healthy and Thriving\",\n\t\t\t story=story, layout=layout, weight=4)\n section7 = create_section(\"Defining a \\\"Framework for Change\\\" with Actionable Goals and Strategies\",\n\t\t\t story=story, layout=layout, weight=5) \n section8 = create_section(\"How Can the Plan Make a Difference?\",\n\t\t\t story=story, layout=layout, weight=5)\n section9 = create_section(\"Impact\", \n story=story, layout=layout, weight=6)\n SectionRelation.objects.create(parent=section6, child=section8,\n weight=0)\n SectionRelation.objects.create(parent=section7, child=section9,\n weight=0)\n SectionRelation.objects.create(parent=section6, child=section7,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section1,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section6,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section4,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section5,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section2,\n weight=0)\n json_sections = simplejson.loads(story.structure.sections_json(\n include_summary=True, include_call_to_action=True))\n self.assertIn(\n section8.section_id,\n self._get_section(json_sections, section6.section_id)['children'])\n self.assertIn(\n section9.section_id,\n self._get_section(json_sections, section7.section_id)['children'])\n self.assertIn(\n section7.section_id,\n self._get_section(json_sections, section6.section_id)['children'])\n self.assertIn(\n section1.section_id,\n self._get_section(json_sections, section3.section_id)['children'])\n self.assertIn(\n section6.section_id,\n self._get_section(json_sections, section3.section_id)['children'])\n self.assertIn(\n section4.section_id,\n self._get_section(json_sections, section3.section_id)['children'])\n self.assertIn(\n section5.section_id,\n self._get_section(json_sections, section3.section_id)['children'])\n self.assertIn(\n section2.section_id,\n self._get_section(json_sections, section3.section_id)['children'])\n self.assertEqual(json_sections[0]['section_id'], 'summary')\n self.assertEqual(json_sections[0]['next_section_id'], \n json_sections[1]['section_id'])\n self.assertEqual(json_sections[1]['previous_section_id'], 'summary')\n self.assertEqual(json_sections[-1]['section_id'], 'call-to-action')\n self.assertEqual(json_sections[-1]['previous_section_id'], \n json_sections[-2]['section_id'])\n self.assertEqual(json_sections[-2]['next_section_id'], 'call-to-action')", "def deploy_steps(self):\n return [ShellCommand(command=[\"node\", \"medic/build_\" + self.platform + \".js\"], workdir='build', timeout=CONFIG.build_timeout, description='Run tests', name='Run tests')]", "def build():", "def do_step(self) -> None:", "def build():\n local('wintersmith build')", "def import_ebuilds ( self, catview, **kwargs ):\n stats = self.STATS\n for eview in catview:\n self._get_package_dir ( eview.name ).import_ebuilds (\n eview, stats=stats, **kwargs\n )", "def stage_run(args):\n\n print \"stage_run args:\", args\n if args.phase == \"split\":\n adapters.split(args.stage, args)\n if args.phase == \"main\":\n adapters.main(args.stage, args)\n if args.phase == \"join\":\n\n #for key in args.__dict__:\n # if isinstance(getattr(args, key), list) and len(getattr(args, key)) == 1:\n # setattr(args, key, getattr(args, key)[0])\n\n adapters.join(args.stage, args)", "def test_explant(install_test_files, data_dir):\n with make_workdir() as workdir:\n cl = [\"bcbio_nextgen.py\",\n get_post_process_yaml(data_dir, workdir),\n os.path.join(data_dir, os.pardir, \"1_explant\"),\n os.path.join(data_dir, \"run_info-explant.yaml\")]\n subprocess.check_call(cl)", "def ninja_block():\n return '''\n```sh\n# In build directory\nninja\n```'''", "def run(self):\n self.render_templates()\n self.create_docker_context()\n self.build()\n self.push()", "def explore(self, *args):", "def step_by_step_instructions(element, sectionlevel=\"##\", to_markdown=safe_html_to_markdown):\n output = \"\"\n for step in element:\n assert step.tag.lower() == \"step\", \"instructions sections can only contain steps\"\n output += sectionlevel + \" Step\\n\"\n output += to_markdown(step.find(\"description\"))\n output += \"\\n\"\n output += media_section(step)\n output += \"\\n\\n\"\n return output", "def test_BuildModel3(self):\n print(\"\\nTest 7: Building a more complicated Model\")\n builder = StaticBuilder(\"BreakIt\")\n in1 = builder.addInput(10)\n in2 = builder.addInput(20)\n enc1 = builder.addInner(3)\n enc2 = builder.addInner(5, num_islots=2)\n out1 = builder.addOutput()\n out2 = builder.addOutput()\n \n builder.addDirectedLink(in1, enc1)\n builder.addDirectedLink(in2, enc2, islot=0)\n builder.addDirectedLink(enc1, enc2, islot=1)\n builder.addDirectedLink(enc1, out1)\n builder.addDirectedLink(enc2, out2)\n \n builder.build()", "def rf_stepMode(self, selTree, treeDict):\n for step in treeDict['steps']:\n newStep = TreeNode(nodeType='step', nodeLabel=step, nodeName=step)\n self.addTopLevelItem(newStep)\n for node in treeDict['tree']['_order']:\n newItem = TreeNode(**treeDict['tree'][node])\n if len(node.split('/')) == 1:\n newStep.addChild(newItem)\n else:\n rootPath = \"%s/%s\" % (step, '/'.join(node.split('/')[:-1]))\n parent = self._getItemFromTreePath(rootPath)\n parent.addChild(newItem)\n if getattr(newItem, 'nodeType') == 'shotNode':\n newItem._tree = selTree\n newItem._step = step\n newItem._itemPath = node\n newItem._dataPath = os.path.join(self.pm._treePath, selTree)\n for fld in node.split('/'):\n newItem._dataPath = os.path.join(newItem._dataPath, fld)\n newItem._dataPath = pFile.conformPath(newItem._dataPath)\n newItem._ltPath = pFile.conformPath(os.path.join(newItem._dataPath, 'lt', step))\n newItem._dataFile = \"%s.py\" % newItem._dataPath", "def run_module_structure_plan(args):\n step = args.step\n if step == \"generate_images\":\n StructureGenerator = dataset_builder.ImageStructureGenerator(\n input_directory=args.input_directory, output_directory=args.output_directory\n )\n StructureGenerator.run(n_jobs=args.n_jobs, starting_block=args.starting_block)\n elif step == \"generate_dataset\":\n DatasetGenerator = dataset_builder.DatasetGenerator(\n input_directory=args.input_directory, output_directory=args.output_directory\n )\n DatasetGenerator.generate_dataset()", "def make(repo, component, clean, install, install_prefix, storage_type, with_java):\n if clean:\n click.secho(\"Cleaning previous build.\", fg=\"green\")\n cmd = [\"make\", \"clean\"]\n run_shell_cmd(cmd, repo.home)\n return\n click.secho(\n \"Before making artifacts, please manually source ENVs from ~/.graphscope_env.\",\n fg=\"yellow\",\n )\n click.secho(\n f\"Begin the make command, to build components [{component}] of GraphScope, with repo = {repo.home}\",\n fg=\"green\",\n )\n cmd = []\n workingdir = repo.home\n if component == \"interactive\":\n click.secho(\"Building interactive engine.\", fg=\"green\")\n if storage_type == \"experimental\":\n cmd = [\"make\", \"build\", 'QUIET_OPT=\"\"']\n workingdir = os.path.join(repo.home, \"interactive_engine\", \"compiler\")\n if storage_type == \"vineyard\":\n cmd = [\n \"mvn\",\n \"install\",\n \"-DskipTests\",\n \"-Drust.compile.mode=release\",\n \"-P\",\n \"graphscope,graphscope-assembly\",\n ]\n workingdir = os.path.join(repo.home, \"interactive_engine\")\n run_shell_cmd(cmd, workingdir)\n cmd = [\"tar\", \"xvzf\", \"graphscope.tar.gz\"]\n workingdir = os.path.join(\n repo.home, \"interactive_engine\", \"assembly\", \"target\"\n )\n click.secho(f\"Begin to extract, from {workingdir}.\", fg=\"green\")\n run_shell_cmd(cmd, workingdir)\n click.secho(\"GraphScope interactive engine has been built.\", fg=\"green\")\n if install is True:\n cmd = [\n \"make\",\n \"interactive-install\",\n \"INSTALL_PREFIX={}\".format(install_prefix),\n ]\n run_shell_cmd(cmd, repo.home)\n click.secho(\n f\"GraphScope interactive engine has been installed to {install_prefix}.\",\n fg=\"green\",\n )\n\n if component == \"analytical\":\n cmd = [\"make\", \"analytical\"]\n if with_java:\n cmd = [\"make\", \"analytical-java\"]\n run_shell_cmd(cmd, repo.home)\n click.secho(\"GraphScope analytical engine has been built.\", fg=\"green\")\n if install is True:\n cmd = [\n \"make\",\n \"analytical-install\",\n \"INSTALL_PREFIX={}\".format(install_prefix),\n ]\n run_shell_cmd(cmd, repo.home)\n click.secho(\n f\"GraphScope analytical engine has been installed to {install_prefix}.\",\n fg=\"green\",\n )\n\n if component == \"client\":\n cmd = [\"make\", \"client\"]\n run_shell_cmd(cmd, repo.home)\n\n if component == \"coordinator\":\n cmd = [\"make\", \"coordinator\"]\n run_shell_cmd(cmd, repo.home)\n\n if component is None:\n click.secho(\"Building all components.\", fg=\"green\")\n cmd = [\"make\", \"all\"]\n if install is True:\n cmd = [\"make\", \"install\", \"INSTALL_PREFIX={}\".format(install_prefix)]\n run_shell_cmd(cmd, repo.home)", "def go_deeper(cls, *args, **kwargs):\n\t\treturn True", "def gen_stage_loop(cls, _opts, tests, put_next_stage, _put_result_stage):\n for test in tests:\n put_next_stage(test)", "def execute(self):\n if len(self._tree) > 0:\n return self._tour(self._tree.root(),0,[]) # start the recursion", "def step(self, **kwargs):\n pass", "def build(c, path=\"../..\", name=\"testapp\"):\n if not os.name in [\"nt\", \"posix\"]:\n print(\"Sorry. this only supports Posix (e.g. Linux, OSX) and Windows OS. \")\n sys.exit()\n\n path=os.path.normpath(path)\n print(\"Building : -n {} -p {} \".format(name, path))\n if os.path.exists(os.path.join(path, name)):\n print(\"sorry, path {} exists\".format(os.path.abspath(os.path.join(path, name))))\n r=input(\" .. type y or yes, to go ahead deleting the existing: {} ? : \".format(os.path.join(path,name)))\n if r in [\"y\", \"yes\"]:\n import shutil\n r=shutil.rmtree(os.path.join(path,name))\n print(40*\"-\")\n print(\" ..... deleted dir tree: {}\".format(os.path.join(path, name)))\n print(40*\"-\")\n build_all(c,name, path)\n else:\n print(40*\"-\")\n print(\" ok, exiting...\")\n print(40*\"-\")\n sys.exit()\n else:\n # start the build and check\n build_all(c,name, path)", "def render(self):\n self.env.render()\n #input(\"Press enter to take a step \")", "def build():\n conf_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'conf'))\n instance_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'instance'))\n outdir = os.path.join(conf_dir, '..', '_build')\n\n options_file = os.path.join(instance_dir, 'site.yaml')\n if not os.path.exists(options_file):\n click.echo('ERROR: Could not find %s' % options_file)\n click.echo('...a sample is located in `conf`')\n click.echo('...copy `conf/site.yaml` to your instance folder, and modify it as needed')\n raise click.Abort()\n\n options = ruamel.yaml.safe_load(open(options_file).read())\n\n if not os.path.isdir(outdir):\n os.makedirs(outdir)\n\n env = Environment(\n loader=FileSystemLoader(conf_dir),\n undefined=StrictUndefined)\n\n ###########################################################################\n click.echo('Creating `_build/invoicer-uwsgi.ini')\n template = env.get_template('invoicer-uwsgi.ini.j2')\n content = template.render(**options)\n with open(os.path.join(outdir, 'invoicer-uwsgi.ini'), 'w') as fh:\n fh.write(content)\n click.echo('...done')\n ###########################################################################\n\n ###########################################################################\n click.echo('Creating `_build/invoicer-systemd.service')\n template = env.get_template('invoicer-systemd.service.j2')\n content = template.render(**options)\n with open(os.path.join(outdir, 'invoicer-systemd.service'), 'w') as fh:\n fh.write(content)\n click.echo('...done')\n ###########################################################################\n\n ###########################################################################\n click.echo('Creating `_build/invoicer-upstream.nginx')\n template = env.get_template('invoicer-upstream.nginx.j2')\n content = template.render(**options)\n with open(os.path.join(outdir, 'invoicer-upstream.nginx'), 'w') as fh:\n fh.write(content)\n click.echo('...done')\n\n click.echo('Creating `_build/invoicer-location.nginx')\n template = env.get_template('invoicer-location.nginx.j2')\n content = template.render(**options)\n with open(os.path.join(outdir, 'invoicer-location.nginx'), 'w') as fh:\n fh.write(content)\n click.echo('...done')\n ###########################################################################\n\n ###########################################################################\n click.echo('Creating `_build/fail2ban/filter.d/invoicer.local')\n f2b_filter_outdir = os.path.join(outdir, 'fail2ban', 'filter.d')\n if not os.path.isdir(f2b_filter_outdir):\n os.makedirs(f2b_filter_outdir)\n\n template = env.get_template('fail2ban/filter.d/invoicer.local.j2')\n content = template.render(**options)\n with open(os.path.join(f2b_filter_outdir, 'invoicer.local'), 'w') as fh:\n fh.write(content)\n click.echo('...done')\n ###########################################################################\n\n ###########################################################################\n click.echo('Creating `_build/fail2ban/jail.d/invoicer.local')\n f2b_filter_outdir = os.path.join(outdir, 'fail2ban', 'jail.d')\n if not os.path.isdir(f2b_filter_outdir):\n os.makedirs(f2b_filter_outdir)\n\n template = env.get_template('fail2ban/jail.d/invoicer.local.j2')\n content = template.render(**options)\n with open(os.path.join(f2b_filter_outdir, 'invoicer.local'), 'w') as fh:\n fh.write(content)\n click.echo('...done')\n ###########################################################################\n\n ###########################################################################\n click.echo('Creating `_build/deploy.bash')\n template = env.get_template('deploy.bash.j2')\n content = template.render(**options)\n with open(os.path.join(outdir, 'deploy.bash'), 'w') as fh:\n fh.write(content)\n click.echo('...done')\n ###########################################################################", "def _close_through_level(self, level):\n while self._step_stack and self._step_stack[-1].config.nest_level >= level:\n cur = self._step_stack.pop()\n if cur.step_result:\n cur.step_result.presentation.finalize(cur.open_step.stream)\n cur.open_step.finalize()", "def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0,\n\tfetchonly=0, cleanup=0, dbkey=None, use_cache=1, fetchall=0, tree=None,\n\tmydbapi=None, vartree=None, prev_mtimes=None,\n\tfd_pipes=None, returnpid=False):\n\n\tif settings is None:\n\t\traise TypeError(\"settings parameter is required\")\n\tmysettings = settings\n\tmyroot = settings['EROOT']\n\n\tif _unused is not None and _unused != mysettings['EROOT']:\n\t\twarnings.warn(\"The third parameter of the \"\n\t\t\t\"portage.doebuild() is now unused. Use \"\n\t\t\t\"settings['ROOT'] instead.\",\n\t\t\tDeprecationWarning, stacklevel=2)\n\n\tif not tree:\n\t\twritemsg(\"Warning: tree not specified to doebuild\\n\")\n\t\ttree = \"porttree\"\n\t\n\t# chunked out deps for each phase, so that ebuild binary can use it \n\t# to collapse targets down.\n\tactionmap_deps={\n\t\"pretend\" : [],\n\t\"setup\": [\"pretend\"],\n\t\"unpack\": [\"setup\"],\n\t\"prepare\": [\"unpack\"],\n\t\"configure\": [\"prepare\"],\n\t\"compile\":[\"configure\"],\n\t\"test\": [\"compile\"],\n\t\"install\":[\"test\"],\n\t\"rpm\": [\"install\"],\n\t\"package\":[\"install\"],\n\t\"merge\" :[\"install\"],\n\t}\n\t\n\tif mydbapi is None:\n\t\tmydbapi = portage.db[myroot][tree].dbapi\n\n\tif vartree is None and mydo in (\"merge\", \"qmerge\", \"unmerge\"):\n\t\tvartree = portage.db[myroot][\"vartree\"]\n\n\tfeatures = mysettings.features\n\n\tclean_phases = (\"clean\", \"cleanrm\")\n\tvalidcommands = [\"help\",\"clean\",\"prerm\",\"postrm\",\"cleanrm\",\"preinst\",\"postinst\",\n\t \"config\", \"info\", \"setup\", \"depend\", \"pretend\",\n\t \"fetch\", \"fetchall\", \"digest\",\n\t \"unpack\", \"prepare\", \"configure\", \"compile\", \"test\",\n\t \"install\", \"rpm\", \"qmerge\", \"merge\",\n\t \"package\",\"unmerge\", \"manifest\"]\n\n\tif mydo not in validcommands:\n\t\tvalidcommands.sort()\n\t\twritemsg(\"!!! doebuild: '%s' is not one of the following valid commands:\" % mydo,\n\t\t\tnoiselevel=-1)\n\t\tfor vcount in range(len(validcommands)):\n\t\t\tif vcount%6 == 0:\n\t\t\t\twritemsg(\"\\n!!! \", noiselevel=-1)\n\t\t\twritemsg(validcommands[vcount].ljust(11), noiselevel=-1)\n\t\twritemsg(\"\\n\", noiselevel=-1)\n\t\treturn 1\n\n\tif returnpid and mydo != 'depend':\n\t\twarnings.warn(\"portage.doebuild() called \" + \\\n\t\t\t\"with returnpid parameter enabled. This usage will \" + \\\n\t\t\t\"not be supported in the future.\",\n\t\t\tDeprecationWarning, stacklevel=2)\n\n\tif mydo == \"fetchall\":\n\t\tfetchall = 1\n\t\tmydo = \"fetch\"\n\n\tparallel_fetchonly = mydo in (\"fetch\", \"fetchall\") and \\\n\t\t\"PORTAGE_PARALLEL_FETCHONLY\" in mysettings\n\n\tif mydo not in clean_phases and not os.path.exists(myebuild):\n\t\twritemsg(\"!!! doebuild: %s not found for %s\\n\" % (myebuild, mydo),\n\t\t\tnoiselevel=-1)\n\t\treturn 1\n\n\tglobal _doebuild_manifest_cache\n\tpkgdir = os.path.dirname(myebuild)\n\tmanifest_path = os.path.join(pkgdir, \"Manifest\")\n\tif tree == \"porttree\":\n\t\trepo_config = mysettings.repositories.get_repo_for_location(\n\t\t\tos.path.dirname(os.path.dirname(pkgdir)))\n\telse:\n\t\trepo_config = None\n\n\tmf = None\n\tif \"strict\" in features and \\\n\t\t\"digest\" not in features and \\\n\t\ttree == \"porttree\" and \\\n\t\tnot repo_config.thin_manifest and \\\n\t\tmydo not in (\"digest\", \"manifest\", \"help\") and \\\n\t\tnot portage._doebuild_manifest_exempt_depend and \\\n\t\tnot (repo_config.allow_missing_manifest and not os.path.exists(manifest_path)):\n\t\t# Always verify the ebuild checksums before executing it.\n\t\tglobal _doebuild_broken_ebuilds\n\n\t\tif myebuild in _doebuild_broken_ebuilds:\n\t\t\treturn 1\n\n\t\t# Avoid checking the same Manifest several times in a row during a\n\t\t# regen with an empty cache.\n\t\tif _doebuild_manifest_cache is None or \\\n\t\t\t_doebuild_manifest_cache.getFullname() != manifest_path:\n\t\t\t_doebuild_manifest_cache = None\n\t\t\tif not os.path.exists(manifest_path):\n\t\t\t\tout = portage.output.EOutput()\n\t\t\t\tout.eerror(_(\"Manifest not found for '%s'\") % (myebuild,))\n\t\t\t\t_doebuild_broken_ebuilds.add(myebuild)\n\t\t\t\treturn 1\n\t\t\tmf = repo_config.load_manifest(pkgdir, mysettings[\"DISTDIR\"])\n\n\t\telse:\n\t\t\tmf = _doebuild_manifest_cache\n\n\t\ttry:\n\t\t\tmf.checkFileHashes(\"EBUILD\", os.path.basename(myebuild))\n\t\texcept KeyError:\n\t\t\tif not (mf.allow_missing and\n\t\t\t\tos.path.basename(myebuild) not in mf.fhashdict[\"EBUILD\"]):\n\t\t\t\tout = portage.output.EOutput()\n\t\t\t\tout.eerror(_(\"Missing digest for '%s'\") % (myebuild,))\n\t\t\t\t_doebuild_broken_ebuilds.add(myebuild)\n\t\t\t\treturn 1\n\t\texcept FileNotFound:\n\t\t\tout = portage.output.EOutput()\n\t\t\tout.eerror(_(\"A file listed in the Manifest \"\n\t\t\t\t\"could not be found: '%s'\") % (myebuild,))\n\t\t\t_doebuild_broken_ebuilds.add(myebuild)\n\t\t\treturn 1\n\t\texcept DigestException as e:\n\t\t\tout = portage.output.EOutput()\n\t\t\tout.eerror(_(\"Digest verification failed:\"))\n\t\t\tout.eerror(\"%s\" % e.value[0])\n\t\t\tout.eerror(_(\"Reason: %s\") % e.value[1])\n\t\t\tout.eerror(_(\"Got: %s\") % e.value[2])\n\t\t\tout.eerror(_(\"Expected: %s\") % e.value[3])\n\t\t\t_doebuild_broken_ebuilds.add(myebuild)\n\t\t\treturn 1\n\n\t\tif mf.getFullname() in _doebuild_broken_manifests:\n\t\t\treturn 1\n\n\t\tif mf is not _doebuild_manifest_cache and not mf.allow_missing:\n\n\t\t\t# Make sure that all of the ebuilds are\n\t\t\t# actually listed in the Manifest.\n\t\t\tfor f in os.listdir(pkgdir):\n\t\t\t\tpf = None\n\t\t\t\tif f[-7:] == '.ebuild':\n\t\t\t\t\tpf = f[:-7]\n\t\t\t\tif pf is not None and not mf.hasFile(\"EBUILD\", f):\n\t\t\t\t\tf = os.path.join(pkgdir, f)\n\t\t\t\t\tif f not in _doebuild_broken_ebuilds:\n\t\t\t\t\t\tout = portage.output.EOutput()\n\t\t\t\t\t\tout.eerror(_(\"A file is not listed in the \"\n\t\t\t\t\t\t\t\"Manifest: '%s'\") % (f,))\n\t\t\t\t\t_doebuild_broken_manifests.add(manifest_path)\n\t\t\t\t\treturn 1\n\n\t\t# We cache it only after all above checks succeed.\n\t\t_doebuild_manifest_cache = mf\n\n\tlogfile=None\n\tbuilddir_lock = None\n\ttmpdir = None\n\ttmpdir_orig = None\n\n\ttry:\n\t\tif mydo in (\"digest\", \"manifest\", \"help\"):\n\t\t\t# Temporarily exempt the depend phase from manifest checks, in case\n\t\t\t# aux_get calls trigger cache generation.\n\t\t\tportage._doebuild_manifest_exempt_depend += 1\n\n\t\t# If we don't need much space and we don't need a constant location,\n\t\t# we can temporarily override PORTAGE_TMPDIR with a random temp dir\n\t\t# so that there's no need for locking and it can be used even if the\n\t\t# user isn't in the portage group.\n\t\tif mydo in (\"info\",):\n\t\t\ttmpdir = tempfile.mkdtemp()\n\t\t\ttmpdir_orig = mysettings[\"PORTAGE_TMPDIR\"]\n\t\t\tmysettings[\"PORTAGE_TMPDIR\"] = tmpdir\n\n\t\tdoebuild_environment(myebuild, mydo, myroot, mysettings, debug,\n\t\t\tuse_cache, mydbapi)\n\n\t\tif mydo in clean_phases:\n\t\t\tbuilddir_lock = None\n\t\t\tif not returnpid and \\\n\t\t\t\t'PORTAGE_BUILDIR_LOCKED' not in mysettings:\n\t\t\t\tbuilddir_lock = EbuildBuildDir(\n\t\t\t\t\tscheduler=EventLoop(main=False),\n\t\t\t\t\tsettings=mysettings)\n\t\t\t\tbuilddir_lock.lock()\n\t\t\ttry:\n\t\t\t\treturn _spawn_phase(mydo, mysettings,\n\t\t\t\t\tfd_pipes=fd_pipes, returnpid=returnpid)\n\t\t\tfinally:\n\t\t\t\tif builddir_lock is not None:\n\t\t\t\t\tbuilddir_lock.unlock()\n\n\t\t# get possible slot information from the deps file\n\t\tif mydo == \"depend\":\n\t\t\twritemsg(\"!!! DEBUG: dbkey: %s\\n\" % str(dbkey), 2)\n\t\t\tif returnpid:\n\t\t\t\treturn _spawn_phase(mydo, mysettings,\n\t\t\t\t\tfd_pipes=fd_pipes, returnpid=returnpid)\n\t\t\telif isinstance(dbkey, dict):\n\t\t\t\twarnings.warn(\"portage.doebuild() called \" + \\\n\t\t\t\t\t\"with dict dbkey argument. This usage will \" + \\\n\t\t\t\t\t\"not be supported in the future.\",\n\t\t\t\t\tDeprecationWarning, stacklevel=2)\n\t\t\t\tmysettings[\"dbkey\"] = \"\"\n\t\t\t\tpr, pw = os.pipe()\n\t\t\t\tfd_pipes = {\n\t\t\t\t\t0:sys.__stdin__.fileno(),\n\t\t\t\t\t1:sys.__stdout__.fileno(),\n\t\t\t\t\t2:sys.__stderr__.fileno(),\n\t\t\t\t\t9:pw}\n\t\t\t\tmypids = _spawn_phase(mydo, mysettings, returnpid=True,\n\t\t\t\t\tfd_pipes=fd_pipes)\n\t\t\t\tos.close(pw) # belongs exclusively to the child process now\n\t\t\t\tf = os.fdopen(pr, 'rb', 0)\n\t\t\t\tfor k, v in zip(auxdbkeys,\n\t\t\t\t\t(_unicode_decode(line).rstrip('\\n') for line in f)):\n\t\t\t\t\tdbkey[k] = v\n\t\t\t\tf.close()\n\t\t\t\tretval = os.waitpid(mypids[0], 0)[1]\n\t\t\t\tportage.process.spawned_pids.remove(mypids[0])\n\t\t\t\t# If it got a signal, return the signal that was sent, but\n\t\t\t\t# shift in order to distinguish it from a return value. (just\n\t\t\t\t# like portage.process.spawn() would do).\n\t\t\t\tif retval & 0xff:\n\t\t\t\t\tretval = (retval & 0xff) << 8\n\t\t\t\telse:\n\t\t\t\t\t# Otherwise, return its exit code.\n\t\t\t\t\tretval = retval >> 8\n\t\t\t\tif retval == os.EX_OK and len(dbkey) != len(auxdbkeys):\n\t\t\t\t\t# Don't trust bash's returncode if the\n\t\t\t\t\t# number of lines is incorrect.\n\t\t\t\t\tretval = 1\n\t\t\t\treturn retval\n\t\t\telif dbkey:\n\t\t\t\tmysettings[\"dbkey\"] = dbkey\n\t\t\telse:\n\t\t\t\tmysettings[\"dbkey\"] = \\\n\t\t\t\t\tos.path.join(mysettings.depcachedir, \"aux_db_key_temp\")\n\n\t\t\treturn _spawn_phase(mydo, mysettings,\n\t\t\t\tfd_pipes=fd_pipes, returnpid=returnpid)\n\n\t\t# Validate dependency metadata here to ensure that ebuilds with invalid\n\t\t# data are never installed via the ebuild command. Don't bother when\n\t\t# returnpid == True since there's no need to do this every time emerge\n\t\t# executes a phase.\n\t\tif tree == \"porttree\":\n\t\t\trval = _validate_deps(mysettings, myroot, mydo, mydbapi)\n\t\t\tif rval != os.EX_OK:\n\t\t\t\treturn rval\n\n\t\telse:\n\t\t\t# FEATURES=noauto only makes sense for porttree, and we don't want\n\t\t\t# it to trigger redundant sourcing of the ebuild for API consumers\n\t\t\t# that are using binary packages\n\t\t\tif \"noauto\" in mysettings.features:\n\t\t\t\tmysettings.features.discard(\"noauto\")\n\n\t\t# The info phase is special because it uses mkdtemp so and\n\t\t# user (not necessarily in the portage group) can run it.\n\t\tif mydo not in ('info',) and \\\n\t\t\tmydo not in _doebuild_commands_without_builddir:\n\t\t\trval = _check_temp_dir(mysettings)\n\t\t\tif rval != os.EX_OK:\n\t\t\t\treturn rval\n\n\t\tif mydo == \"unmerge\":\n\t\t\treturn unmerge(mysettings[\"CATEGORY\"],\n\t\t\t\tmysettings[\"PF\"], myroot, mysettings, vartree=vartree)\n\n\t\tphases_to_run = set()\n\t\tif \"noauto\" in mysettings.features or \\\n\t\t\tmydo not in actionmap_deps:\n\t\t\tphases_to_run.add(mydo)\n\t\telse:\n\t\t\tphase_stack = [mydo]\n\t\t\twhile phase_stack:\n\t\t\t\tx = phase_stack.pop()\n\t\t\t\tif x in phases_to_run:\n\t\t\t\t\tcontinue\n\t\t\t\tphases_to_run.add(x)\n\t\t\t\tphase_stack.extend(actionmap_deps.get(x, []))\n\t\t\tdel phase_stack\n\n\t\talist = set(mysettings.configdict[\"pkg\"].get(\"A\", \"\").split())\n\n\t\tunpacked = False\n\t\tif tree != \"porttree\":\n\t\t\tpass\n\t\telif \"unpack\" not in phases_to_run:\n\t\t\tunpacked = os.path.exists(os.path.join(\n\t\t\t\tmysettings[\"PORTAGE_BUILDDIR\"], \".unpacked\"))\n\t\telse:\n\t\t\ttry:\n\t\t\t\tworkdir_st = os.stat(mysettings[\"WORKDIR\"])\n\t\t\texcept OSError:\n\t\t\t\tpass\n\t\t\telse:\n\t\t\t\tnewstuff = False\n\t\t\t\tif not os.path.exists(os.path.join(\n\t\t\t\t\tmysettings[\"PORTAGE_BUILDDIR\"], \".unpacked\")):\n\t\t\t\t\twritemsg_stdout(_(\n\t\t\t\t\t\t\">>> Not marked as unpacked; recreating WORKDIR...\\n\"))\n\t\t\t\t\tnewstuff = True\n\t\t\t\telse:\n\t\t\t\t\tfor x in alist:\n\t\t\t\t\t\twritemsg_stdout(\">>> Checking %s's mtime...\\n\" % x)\n\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\tx_st = os.stat(os.path.join(\n\t\t\t\t\t\t\t\tmysettings[\"DISTDIR\"], x))\n\t\t\t\t\t\texcept OSError:\n\t\t\t\t\t\t\t# file not fetched yet\n\t\t\t\t\t\t\tx_st = None\n\n\t\t\t\t\t\tif x_st is None or x_st.st_mtime > workdir_st.st_mtime:\n\t\t\t\t\t\t\twritemsg_stdout(_(\">>> Timestamp of \"\n\t\t\t\t\t\t\t\t\"%s has changed; recreating WORKDIR...\\n\") % x)\n\t\t\t\t\t\t\tnewstuff = True\n\t\t\t\t\t\t\tbreak\n\n\t\t\t\tif newstuff:\n\t\t\t\t\tif builddir_lock is None and \\\n\t\t\t\t\t\t'PORTAGE_BUILDIR_LOCKED' not in mysettings:\n\t\t\t\t\t\tbuilddir_lock = EbuildBuildDir(\n\t\t\t\t\t\t\tscheduler=EventLoop(main=False),\n\t\t\t\t\t\t\tsettings=mysettings)\n\t\t\t\t\t\tbuilddir_lock.lock()\n\t\t\t\t\ttry:\n\t\t\t\t\t\t_spawn_phase(\"clean\", mysettings)\n\t\t\t\t\tfinally:\n\t\t\t\t\t\tif builddir_lock is not None:\n\t\t\t\t\t\t\tbuilddir_lock.unlock()\n\t\t\t\t\t\t\tbuilddir_lock = None\n\t\t\t\telse:\n\t\t\t\t\twritemsg_stdout(_(\">>> WORKDIR is up-to-date, keeping...\\n\"))\n\t\t\t\t\tunpacked = True\n\n\t\t# Build directory creation isn't required for any of these.\n\t\t# In the fetch phase, the directory is needed only for RESTRICT=fetch\n\t\t# in order to satisfy the sane $PWD requirement (from bug #239560)\n\t\t# when pkg_nofetch is spawned.\n\t\thave_build_dirs = False\n\t\tif not parallel_fetchonly and \\\n\t\t\tmydo not in ('digest', 'fetch', 'help', 'manifest'):\n\t\t\tif not returnpid and \\\n\t\t\t\t'PORTAGE_BUILDIR_LOCKED' not in mysettings:\n\t\t\t\tbuilddir_lock = EbuildBuildDir(\n\t\t\t\t\tscheduler=EventLoop(main=False),\n\t\t\t\t\tsettings=mysettings)\n\t\t\t\tbuilddir_lock.lock()\n\t\t\tmystatus = prepare_build_dirs(myroot, mysettings, cleanup)\n\t\t\tif mystatus:\n\t\t\t\treturn mystatus\n\t\t\thave_build_dirs = True\n\n\t\t\t# emerge handles logging externally\n\t\t\tif not returnpid:\n\t\t\t\t# PORTAGE_LOG_FILE is set by the\n\t\t\t\t# above prepare_build_dirs() call.\n\t\t\t\tlogfile = mysettings.get(\"PORTAGE_LOG_FILE\")\n\n\t\tif have_build_dirs:\n\t\t\trval = _prepare_env_file(mysettings)\n\t\t\tif rval != os.EX_OK:\n\t\t\t\treturn rval\n\n\t\tif eapi_exports_merge_type(mysettings[\"EAPI\"]) and \\\n\t\t\t\"MERGE_TYPE\" not in mysettings.configdict[\"pkg\"]:\n\t\t\tif tree == \"porttree\":\n\t\t\t\tmysettings.configdict[\"pkg\"][\"EMERGE_FROM\"] = \"ebuild\"\n\t\t\t\tmysettings.configdict[\"pkg\"][\"MERGE_TYPE\"] = \"source\"\n\t\t\telif tree == \"bintree\":\n\t\t\t\tmysettings.configdict[\"pkg\"][\"EMERGE_FROM\"] = \"binary\"\n\t\t\t\tmysettings.configdict[\"pkg\"][\"MERGE_TYPE\"] = \"binary\"\n\n\t\t# NOTE: It's not possible to set REPLACED_BY_VERSION for prerm\n\t\t# and postrm here, since we don't necessarily know what\n\t\t# versions are being installed. This could be a problem\n\t\t# for API consumers if they don't use dblink.treewalk()\n\t\t# to execute prerm and postrm.\n\t\tif eapi_exports_replace_vars(mysettings[\"EAPI\"]) and \\\n\t\t\t(mydo in (\"postinst\", \"preinst\", \"pretend\", \"setup\") or \\\n\t\t\t(\"noauto\" not in features and not returnpid and \\\n\t\t\t(mydo in actionmap_deps or mydo in (\"merge\", \"package\", \"qmerge\")))):\n\t\t\tif not vartree:\n\t\t\t\twritemsg(\"Warning: vartree not given to doebuild. \" + \\\n\t\t\t\t\t\"Cannot set REPLACING_VERSIONS in pkg_{pretend,setup}\\n\")\n\t\t\telse:\n\t\t\t\tvardb = vartree.dbapi\n\t\t\t\tcpv = mysettings.mycpv\n\t\t\t\tcpv_slot = \"%s%s%s\" % \\\n\t\t\t\t\t(cpv.cp, portage.dep._slot_separator, cpv.slot)\n\t\t\t\tmysettings[\"REPLACING_VERSIONS\"] = \" \".join(\n\t\t\t\t\tset(portage.versions.cpv_getversion(match) \\\n\t\t\t\t\t\tfor match in vardb.match(cpv_slot) + \\\n\t\t\t\t\t\tvardb.match('='+cpv)))\n\n\t\t# if any of these are being called, handle them -- running them out of\n\t\t# the sandbox -- and stop now.\n\t\tif mydo in (\"config\", \"help\", \"info\", \"postinst\",\n\t\t\t\"preinst\", \"pretend\", \"postrm\", \"prerm\"):\n\t\t\tif mydo in (\"preinst\", \"postinst\"):\n\t\t\t\tenv_file = os.path.join(os.path.dirname(mysettings[\"EBUILD\"]),\n\t\t\t\t\t\"environment.bz2\")\n\t\t\t\tif os.path.isfile(env_file):\n\t\t\t\t\tmysettings[\"PORTAGE_UPDATE_ENV\"] = env_file\n\t\t\ttry:\n\t\t\t\treturn _spawn_phase(mydo, mysettings,\n\t\t\t\t\tfd_pipes=fd_pipes, logfile=logfile, returnpid=returnpid)\n\t\t\tfinally:\n\t\t\t\tmysettings.pop(\"PORTAGE_UPDATE_ENV\", None)\n\n\t\tmycpv = \"/\".join((mysettings[\"CATEGORY\"], mysettings[\"PF\"]))\n\n\t\t# Only try and fetch the files if we are going to need them ...\n\t\t# otherwise, if user has FEATURES=noauto and they run `ebuild clean\n\t\t# unpack compile install`, we will try and fetch 4 times :/\n\t\tneed_distfiles = tree == \"porttree\" and not unpacked and \\\n\t\t\t(mydo in (\"fetch\", \"unpack\") or \\\n\t\t\tmydo not in (\"digest\", \"manifest\") and \"noauto\" not in features)\n\t\tif need_distfiles:\n\n\t\t\tsrc_uri, = mydbapi.aux_get(mysettings.mycpv,\n\t\t\t\t[\"SRC_URI\"], mytree=os.path.dirname(os.path.dirname(\n\t\t\t\tos.path.dirname(myebuild))))\n\t\t\tmetadata = {\n\t\t\t\t\"EAPI\" : mysettings[\"EAPI\"],\n\t\t\t\t\"SRC_URI\" : src_uri,\n\t\t\t}\n\t\t\tuse = frozenset(mysettings[\"PORTAGE_USE\"].split())\n\t\t\ttry:\n\t\t\t\talist = _parse_uri_map(mysettings.mycpv, metadata, use=use)\n\t\t\t\taalist = _parse_uri_map(mysettings.mycpv, metadata)\n\t\t\texcept InvalidDependString as e:\n\t\t\t\twritemsg(\"!!! %s\\n\" % str(e), noiselevel=-1)\n\t\t\t\twritemsg(_(\"!!! Invalid SRC_URI for '%s'.\\n\") % mycpv,\n\t\t\t\t\tnoiselevel=-1)\n\t\t\t\tdel e\n\t\t\t\treturn 1\n\n\t\t\tif \"mirror\" in features or fetchall:\n\t\t\t\tfetchme = aalist\n\t\t\telse:\n\t\t\t\tfetchme = alist\n\n\t\t\tdist_digests = None\n\t\t\tif mf is not None:\n\t\t\t\tdist_digests = mf.getTypeDigests(\"DIST\")\n\t\t\tif not fetch(fetchme, mysettings, listonly=listonly,\n\t\t\t\tfetchonly=fetchonly, allow_missing_digests=True,\n\t\t\t\tdigests=dist_digests):\n\t\t\t\tspawn_nofetch(mydbapi, myebuild, settings=mysettings)\n\t\t\t\tif listonly:\n\t\t\t\t\t# The convention for listonly mode is to report\n\t\t\t\t\t# success in any case, even though fetch() may\n\t\t\t\t\t# return unsuccessfully in order to trigger the\n\t\t\t\t\t# nofetch phase.\n\t\t\t\t\treturn 0\n\t\t\t\treturn 1\n\n\t\tif need_distfiles:\n\t\t\t# Files are already checked inside fetch(),\n\t\t\t# so do not check them again.\n\t\t\tcheckme = []\n\t\telif unpacked:\n\t\t\t# The unpack phase is marked as complete, so it\n\t\t\t# would be wasteful to check distfiles again.\n\t\t\tcheckme = []\n\t\telse:\n\t\t\tcheckme = alist\n\n\t\tif mydo == \"fetch\" and listonly:\n\t\t\treturn 0\n\n\t\ttry:\n\t\t\tif mydo == \"manifest\":\n\t\t\t\tmf = None\n\t\t\t\t_doebuild_manifest_cache = None\n\t\t\t\treturn not digestgen(mysettings=mysettings, myportdb=mydbapi)\n\t\t\telif mydo == \"digest\":\n\t\t\t\tmf = None\n\t\t\t\t_doebuild_manifest_cache = None\n\t\t\t\treturn not digestgen(mysettings=mysettings, myportdb=mydbapi)\n\t\t\telif mydo != 'fetch' and \\\n\t\t\t\t\"digest\" in mysettings.features:\n\t\t\t\t# Don't do this when called by emerge or when called just\n\t\t\t\t# for fetch (especially parallel-fetch) since it's not needed\n\t\t\t\t# and it can interfere with parallel tasks.\n\t\t\t\tmf = None\n\t\t\t\t_doebuild_manifest_cache = None\n\t\t\t\tdigestgen(mysettings=mysettings, myportdb=mydbapi)\n\t\texcept PermissionDenied as e:\n\t\t\twritemsg(_(\"!!! Permission Denied: %s\\n\") % (e,), noiselevel=-1)\n\t\t\tif mydo in (\"digest\", \"manifest\"):\n\t\t\t\treturn 1\n\n\t\t# See above comment about fetching only when needed\n\t\tif tree == 'porttree' and \\\n\t\t\tnot digestcheck(checkme, mysettings, \"strict\" in features, mf=mf):\n\t\t\treturn 1\n\n\t\tif mydo == \"fetch\":\n\t\t\treturn 0\n\n\t\t# remove PORTAGE_ACTUAL_DISTDIR once cvs/svn is supported via SRC_URI\n\t\tif tree == 'porttree' and \\\n\t\t\t((mydo != \"setup\" and \"noauto\" not in features) \\\n\t\t\tor mydo in (\"install\", \"unpack\")):\n\t\t\t_prepare_fake_distdir(mysettings, alist)\n\n\t\t#initial dep checks complete; time to process main commands\n\t\tactionmap = _spawn_actionmap(mysettings)\n\n\t\t# merge the deps in so we have again a 'full' actionmap\n\t\t# be glad when this can die.\n\t\tfor x in actionmap:\n\t\t\tif len(actionmap_deps.get(x, [])):\n\t\t\t\tactionmap[x][\"dep\"] = ' '.join(actionmap_deps[x])\n\n\t\tif mydo in actionmap:\n\t\t\tbintree = None\n\t\t\tif mydo == \"package\":\n\t\t\t\t# Make sure the package directory exists before executing\n\t\t\t\t# this phase. This can raise PermissionDenied if\n\t\t\t\t# the current user doesn't have write access to $PKGDIR.\n\t\t\t\tif hasattr(portage, 'db'):\n\t\t\t\t\tbintree = portage.db[mysettings['EROOT']]['bintree']\n\t\t\t\t\tmysettings[\"PORTAGE_BINPKG_TMPFILE\"] = \\\n\t\t\t\t\t\tbintree.getname(mysettings.mycpv) + \\\n\t\t\t\t\t\t\".%s\" % (os.getpid(),)\n\t\t\t\t\tbintree._ensure_dir(os.path.dirname(\n\t\t\t\t\t\tmysettings[\"PORTAGE_BINPKG_TMPFILE\"]))\n\t\t\t\telse:\n\t\t\t\t\tparent_dir = os.path.join(mysettings[\"PKGDIR\"],\n\t\t\t\t\t\tmysettings[\"CATEGORY\"])\n\t\t\t\t\tportage.util.ensure_dirs(parent_dir)\n\t\t\t\t\tif not os.access(parent_dir, os.W_OK):\n\t\t\t\t\t\traise PermissionDenied(\n\t\t\t\t\t\t\t\"access('%s', os.W_OK)\" % parent_dir)\n\t\t\tretval = spawnebuild(mydo,\n\t\t\t\tactionmap, mysettings, debug, logfile=logfile,\n\t\t\t\tfd_pipes=fd_pipes, returnpid=returnpid)\n\n\t\t\tif retval == os.EX_OK:\n\t\t\t\tif mydo == \"package\" and bintree is not None:\n\t\t\t\t\tbintree.inject(mysettings.mycpv,\n\t\t\t\t\t\tfilename=mysettings[\"PORTAGE_BINPKG_TMPFILE\"])\n\t\t\telse:\n\t\t\t\tif \"PORTAGE_BINPKG_TMPFILE\" in mysettings:\n\t\t\t\t\ttry:\n\t\t\t\t\t\tos.unlink(mysettings[\"PORTAGE_BINPKG_TMPFILE\"])\n\t\t\t\t\texcept OSError:\n\t\t\t\t\t\tpass\n\n\t\telif mydo==\"qmerge\":\n\t\t\t# check to ensure install was run. this *only* pops up when users\n\t\t\t# forget it and are using ebuild\n\t\t\tif not os.path.exists(\n\t\t\t\tos.path.join(mysettings[\"PORTAGE_BUILDDIR\"], \".installed\")):\n\t\t\t\twritemsg(_(\"!!! mydo=qmerge, but the install phase has not been run\\n\"),\n\t\t\t\t\tnoiselevel=-1)\n\t\t\t\treturn 1\n\t\t\t# qmerge is a special phase that implies noclean.\n\t\t\tif \"noclean\" not in mysettings.features:\n\t\t\t\tmysettings.features.add(\"noclean\")\n\t\t\t_handle_self_update(mysettings, vartree.dbapi)\n\t\t\t#qmerge is specifically not supposed to do a runtime dep check\n\t\t\tretval = merge(\n\t\t\t\tmysettings[\"CATEGORY\"], mysettings[\"PF\"], mysettings[\"D\"],\n\t\t\t\tos.path.join(mysettings[\"PORTAGE_BUILDDIR\"], \"build-info\"),\n\t\t\t\tmyroot, mysettings, myebuild=mysettings[\"EBUILD\"], mytree=tree,\n\t\t\t\tmydbapi=mydbapi, vartree=vartree, prev_mtimes=prev_mtimes)\n\t\telif mydo==\"merge\":\n\t\t\tretval = spawnebuild(\"install\", actionmap, mysettings, debug,\n\t\t\t\talwaysdep=1, logfile=logfile, fd_pipes=fd_pipes,\n\t\t\t\treturnpid=returnpid)\n\t\t\tif retval != os.EX_OK:\n\t\t\t\t# The merge phase handles this already. Callers don't know how\n\t\t\t\t# far this function got, so we have to call elog_process() here\n\t\t\t\t# so that it's only called once.\n\t\t\t\telog_process(mysettings.mycpv, mysettings)\n\t\t\tif retval == os.EX_OK:\n\t\t\t\t_handle_self_update(mysettings, vartree.dbapi)\n\t\t\t\tretval = merge(mysettings[\"CATEGORY\"], mysettings[\"PF\"],\n\t\t\t\t\tmysettings[\"D\"], os.path.join(mysettings[\"PORTAGE_BUILDDIR\"],\n\t\t\t\t\t\"build-info\"), myroot, mysettings,\n\t\t\t\t\tmyebuild=mysettings[\"EBUILD\"], mytree=tree, mydbapi=mydbapi,\n\t\t\t\t\tvartree=vartree, prev_mtimes=prev_mtimes)\n\t\telse:\n\t\t\twritemsg_stdout(_(\"!!! Unknown mydo: %s\\n\") % mydo, noiselevel=-1)\n\t\t\treturn 1\n\n\t\treturn retval\n\n\tfinally:\n\n\t\tif builddir_lock is not None:\n\t\t\tbuilddir_lock.unlock()\n\t\tif tmpdir:\n\t\t\tmysettings[\"PORTAGE_TMPDIR\"] = tmpdir_orig\n\t\t\tshutil.rmtree(tmpdir)\n\n\t\tmysettings.pop(\"REPLACING_VERSIONS\", None)\n\n\t\t# Make sure that DISTDIR is restored to it's normal value before we return!\n\t\tif \"PORTAGE_ACTUAL_DISTDIR\" in mysettings:\n\t\t\tmysettings[\"DISTDIR\"] = mysettings[\"PORTAGE_ACTUAL_DISTDIR\"]\n\t\t\tdel mysettings[\"PORTAGE_ACTUAL_DISTDIR\"]\n\n\t\tif logfile and not returnpid:\n\t\t\ttry:\n\t\t\t\tif os.stat(logfile).st_size == 0:\n\t\t\t\t\tos.unlink(logfile)\n\t\t\texcept OSError:\n\t\t\t\tpass\n\n\t\tif mydo in (\"digest\", \"manifest\", \"help\"):\n\t\t\t# If necessary, depend phase has been triggered by aux_get calls\n\t\t\t# and the exemption is no longer needed.\n\t\t\tportage._doebuild_manifest_exempt_depend -= 1", "def build_all(self):\n self.android_build()\n self.generate_patch_build('')\n self.generate_specs_build()\n self.generate_interfaces()", "def beginHypo( self ):\n assert isinstance( self._env, Env )\n assert isinstance( self._steps, list )\n\n if len(self._steps) == 0:\n prevStepLevel = 0\n else:\n prevStepLevel = self._steps[ len(self._steps) - 1 ].level\n\n if self._env.level() not in ( prevStepLevel, prevStepLevel - 1 ):\n raise Exception( 'Cannot nest more than one level in a single step.' )\n\n self._env = Env( self._env.level() + 1, self._env )", "def main(*, build, subdir, description, supports_modules=False,\n supports_quick=False):\n parser = argparse.ArgumentParser(description=description)\n group = parser.add_mutually_exclusive_group()\n group.add_argument(\n \"--serve\", action='store_true',\n help=\"Serve the documentation on the given PORT for easy preview.\")\n group.add_argument(\n \"--out_dir\", type=str, metavar=\"DIR\",\n help=\"Generate the documentation to the given output directory.\"\n \" The DIR must be an absolute path.\"\n \" If DIR already exists, then it must be empty.\"\n \" (For regression testing, the DIR can be the magic value <test>,\"\n \" in which case a $TEST_TMPDIR subdir will be used.)\")\n parser.add_argument(\n \"--port\", type=int, metavar=\"PORT\", default=8000,\n help=\"Use a non-default PORT when serving for preview.\")\n parser.add_argument(\n \"--verbose\", action=\"store_true\",\n help=\"Echo detailed commands, progress, etc. to the console\")\n if supports_modules:\n parser.add_argument(\n \"module\", nargs=\"*\",\n help=\"Limit the generated documentation to only these modules and \"\n \"their children. When none are provided, all will be generated. \"\n \"For example, specify drake.math or drake/math for the C++ \"\n \"module, or pydrake.math or pydrake/math for the Python module.\")\n if supports_quick:\n parser.add_argument(\n \"--quick\", action=\"store_true\", default=False,\n help=\"Omit from the output items that are slow to generate. \"\n \"This yields a faster preview, but the output will be incomplete.\")\n args = parser.parse_args()\n if args.verbose:\n global _verbose\n _verbose = True\n curried_build = build\n if supports_modules:\n canonicalized_modules = [\n x.replace('/', '.')\n for x in args.module\n ]\n curried_build = functools.partial(\n curried_build, modules=canonicalized_modules)\n if supports_quick:\n curried_build = functools.partial(\n curried_build, quick=args.quick)\n if args.out_dir is None:\n assert args.serve\n _do_preview(build=curried_build, subdir=subdir, port=args.port)\n else:\n _do_generate(build=curried_build, out_dir=args.out_dir,\n on_error=parser.error)", "def depth(self, create, depth, **kwargs): # pylint: disable=unused-argument\r\n # pylint: disable=no-member\r\n if depth == 0:\r\n self.get_module.side_effect = lambda x: LeafModuleFactory(descriptor_cls=HtmlDescriptor)\r\n else:\r\n self.get_module.side_effect = lambda x: ContainerModuleFactory(descriptor_cls=VerticalDescriptor, depth=depth - 1)", "def buildStarted(builderName, build):", "def work_in_example_project(request):\n return chdir_in_and_out(request, LoslassaProject.EXAMPLE_PROJECT)", "def RunStages(self):\n self._RunStage(build_stages.InitSDKStage)\n self.RunSetupBoard()\n self._RunStage(report_stages.RefreshPackageStatusStage)", "def main():\n init()\n separator_len = 40\n for s in stage_instances:\n print('='*separator_len)\n print(s.name)\n print('-'*separator_len)\n\n s.add_tasks() # Add tasks from previous stage\n s.revive_or_archive() # Revive killed tasks or move them to failed\n s.schedule_jobs() # Schedule new jobs if needed\n s.print_status()\n print('='*separator_len + '\\n')\n render(stage_instances)", "def task_generate_tasks():\n \n yield {\n 'basename': 'generate_tasks',\n 'name': None,\n # 'doc': 'docs for X',\n 'watch': ['trains/'],\n 'task_dep': ['create_folders'],\n }\n \n for root, dirs, files in os.walk('trains/',topdown=False):\n for f in files:\n #print(f)\n yield template_train_model(os.path.join(root,f))", "def depth(self, create, depth, **kwargs): # pylint: disable=unused-argument\r\n # pylint: disable=no-member\r\n if depth == 0:\r\n self.load_item.side_effect = lambda x: LeafModuleFactory(descriptor_cls=HtmlDescriptor)\r\n else:\r\n self.load_item.side_effect = lambda x: ContainerModuleFactory(descriptor_cls=VerticalDescriptor, depth=depth - 1)", "def docker_build(c):\n cli_tasks.docker_build.run(c)", "def test_sections_json_spider_three_levels(self):\n\n title = (\"Taking Action for the Social and Emotional Health of \"\n\t \"Young Children: A Report to the Community from the Denver \"\n\t\t \"Early Childhood Council\")\n\tsummary = (\"Now, Denver has a plan of action to make it easier for \"\n\t \"families to access early childhood mental health \"\n\t\t \"information, intervention and services.\")\n\tbyline = \"Denver Early Childhood Council\"\n story = create_story(title=title, summary=summary, byline=byline)\n section1 = create_section(\"We're ready to take action. Are you?\",\n\t\t\t story=story, weight=7)\n\tsection2 = create_section(\"Ricardo's Story\",\n\t\t\t story=story, weight=2)\n\tsection3 = create_section(\"Meeting the need for better child mental health services\",\n\t\t\t story=story, root=True, weight=1)\n\tsection4 = create_section(\"Healthy Minds Support Strong Futures\",\n\t\t\t story=story, weight=5) \n\tsection5 = create_section(\"Community Voices\",\n\t\t\t story=story, weight=3)\n\tsection6 = create_section(\"Our Vision: That All Children in Denver are Valued, Healthy and Thriving\",\n\t\t\t story=story, weight=4)\n\tsection7 = create_section(\"Defining a \\\"Framework for Change\\\" with Actionable Goals and Strategies\",\n\t\t\t story=story, weight=5) \n section8 = create_section(\"How Can the Plan Make a Difference?\",\n\t\t\t story=story, weight=5)\n\tsection9 = create_section(\"Impact\", story=story, weight=6)\n SectionRelation.objects.create(parent=section6, child=section8,\n weight=0)\n SectionRelation.objects.create(parent=section7, child=section9,\n weight=0)\n SectionRelation.objects.create(parent=section6, child=section7,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section1,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section6,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section4,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section5,\n weight=0)\n SectionRelation.objects.create(parent=section3, child=section2,\n weight=0)\n\tjson_sections = simplejson.loads(story.structure.sections_json(\n\t\tinclude_summary=False, include_call_to_action=False))\n\tself.assertIn(\n\t section8.section_id,\n\t self._get_section(json_sections, section6.section_id)['children'])\n\tself.assertIn(\n\t section9.section_id,\n\t self._get_section(json_sections, section7.section_id)['children'])\n\tself.assertIn(\n\t section7.section_id,\n\t self._get_section(json_sections, section6.section_id)['children'])\n\tself.assertIn(\n\t section1.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])\n\tself.assertIn(\n\t section6.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])\n\tself.assertIn(\n\t section4.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])\n\tself.assertIn(\n\t section5.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])\n\tself.assertIn(\n\t section2.section_id,\n\t self._get_section(json_sections, section3.section_id)['children'])", "def main(verbose, debug, names):\n initialize(debug)\n\n echome(names)\n # click.echo(\"hello\")\n # see\n # https://www.brianthicks.com/post/2014/11/03/build-modular-command-line-tools-with-click/", "def generate_submissons_all_steps():\n\n\n data_en = read_json_file(\"Test_Data/test-en.json\")\n data_pr = read_json_file(\"Test_Data/test-pr.json\")\n data_es = read_json_file(\"Test_Data/test-es.json\")\n res_en = generate_embeddings_sentence_test_data(data_en, \"Test_Data/embd-en.pkl\")\n res_es = generate_embeddings_sentence_test_data(data_es, \"Test_Data/embd-es.pkl\")\n res_pr = generate_embeddings_sentence_test_data(data_pr, \"Test_Data/embd-pr.pkl\")\n model = load_model(\"model_doc\")\n make_submission(res_es, model, \"submission-es\")\n make_submission(res_pr, model, \"submission-pr\")\n make_submission(res_en, model, \"submission-en\")\n exit()", "def cmnd_stage():\n pass", "def test_minimalSubProjectLayout(self):\n structure = {\n \"LICENSE\": \"copyright!\",\n \"bin\": {},\n \"twisted\":\n {\"web\": {\"__init__.py\": \"import WEB\",\n \"topfiles\": {\"setup.py\": \"import WEBINSTALL\"}},\n \"plugins\": {}}}\n\n outStructure = {\n \"setup.py\": \"import WEBINSTALL\",\n \"LICENSE\": \"copyright!\",\n \"twisted\": {\"web\": {\"__init__.py\": \"import WEB\"}}}\n\n self.createStructure(self.rootDir, structure)\n\n outputFile = self.builder.buildSubProject(\"web\", \"0.3.0\")\n\n self.assertExtractedStructure(outputFile, outStructure)", "def test_expand_experiments():\n template_script = get_template_script()\n experiment_systems = utils.CombinatorialLeaf(['explicit-system', 'implicit-system', 'hydration-system'])\n template_script['experiments']['system'] = experiment_systems\n\n exp_builder = ExperimentBuilder(script=template_script, job_id=1, n_jobs=2)\n experiments = list(exp_builder._expand_experiments())\n assert len(experiments) == 2\n\n exp_builder = ExperimentBuilder(script=template_script, job_id=2, n_jobs=2)\n experiments = list(exp_builder._expand_experiments())\n assert len(experiments) == 1", "def build(images, tag, quiet):\n # images to build\n build_images = None\n\n # list of available images\n image_list = build_image_map().keys()\n\n # image specified: check if it exists\n if images:\n build_images = []\n for check_name in images:\n if check_name in image_list:\n build_images.append(check_name)\n continue\n raise click.ClickException(\n \"invalid image name '{}'\".format(check_name))\n click.echo('building images {}'.format(', '.join(build_images)))\n # no image specified: build all\n else:\n click.echo('building images {}'.format(', '.join(image_list)))\n\n manager = Manager('build', tag, images=build_images, verbose=not quiet)\n manager.run()", "def nodes_run_cmd(self, group, cmd):\n path_inventory = u'%s/inventories/%s' % (self.ansible_path, self.environment)\n path_lib = u'%s/library/beehive/' % (self.ansible_path)\n runner = Runner(inventory=path_inventory, verbosity=self.verbosity, \n module=path_lib)\n tasks = [\n dict(action=dict(module=u'shell', args=cmd), register=u'shell_out'),\n ]\n runner.run_task(group, tasks=tasks, frmt=u'json')", "def walk(node, parent):\n for name, item in node.iteritems():\n if type(item) is dict:\n cat = etree.SubElement(parent, 'menu', id=name, label=name)\n walk (item, cat)\n else:\n menu_item = etree.SubElement(parent, 'item', label=name)\n action = etree.SubElement(menu_item, 'action', name='execute')\n etree.SubElement(action, 'command').text = item", "def main(\n output,\n elm_make,\n mount_at,\n exclude,\n force_exclusion,\n validate,\n doit_args,\n project_path,\n include_paths):\n\n if not validate and output is None:\n raise click.BadParameter('please specify --output directory')\n\n def task_build():\n resolved_include_paths = [_resolve_path(path) for path in include_paths]\n exclude_modules = exclude.split(',') if exclude else []\n return create_tasks(\n _resolve_path(project_path),\n _resolve_path(output) if output is not None else None,\n elm_make=_resolve_path(elm_make) if elm_make is not None else None,\n include_paths=resolved_include_paths,\n exclude_modules=exclude_modules,\n force_exclusion=force_exclusion,\n mount_point=mount_at,\n validate=validate)\n\n result = DoitMain(ModuleTaskLoader(locals())).run(\n doit_args.split(' ') if doit_args else [])\n if result is not None and result > 0:\n raise DoitException('', result)", "def main():\n parser = argparse.ArgumentParser(description=\"Wrapper for the GROMACS make_ndx module.\",\n formatter_class=lambda prog: argparse.RawTextHelpFormatter(prog, width=99999))\n parser.add_argument('-c', '--config', required=False, help=\"This file can be a YAML file, JSON file or JSON string\")\n\n # Specific args of each building block\n required_args = parser.add_argument_group('required arguments')\n required_args.add_argument('--input_structure_path', required=True)\n required_args.add_argument('--output_ndx_path', required=True)\n parser.add_argument('--input_ndx_path', required=False)\n\n args = parser.parse_args()\n config = args.config if args.config else None\n properties = settings.ConfReader(config=config).get_prop_dic()\n\n # Specific call of each building block\n make_ndx(input_structure_path=args.input_structure_path,\n output_ndx_path=args.output_ndx_path,\n input_ndx_path=args.input_ndx_path,\n properties=properties)", "def build(self, parent):\n raise NotImplementedError", "def build_tree(java_path, verbose):\n logging.info(\"Building in %s \", java_path)\n # special hack for comparing with rel/2.0.0, see HBASE-26063 for more details\n subprocess.check_call([\"sed\", \"-i\", \"2148s/3.0.0/3.0.4/g\", \"pom.xml\"], cwd=java_path)\n mvn_cmd = [\"mvn\", \"--batch-mode\", \"-DskipTests\",\n \"-Dmaven.javadoc.skip=true\", \"package\"]\n if not verbose:\n mvn_cmd.insert(-1, \"--quiet\")\n subprocess.check_call(mvn_cmd, cwd=java_path)", "def build (self, buildtype):\n layout = self.xlayout()\n\n # NuGet removes the prefixing v from the version.\n vless_version = self.kv['version']\n if vless_version[0] == 'v':\n vless_version = vless_version[1:]\n\n\n self.stpath = tempfile.mkdtemp(prefix=\"out-\", suffix=\"-%s\" % buildtype,\n dir=\".\")\n\n self.render('librdkafka.redist.nuspec')\n self.copy_template('librdkafka.redist.targets',\n destpath=os.path.join('build', 'native'))\n self.copy_template('librdkafka.redist.props',\n destpath=os.path.join('build', 'native'))\n self.copy_template('librdkafka.redist.props',\n destpath=os.path.join('build', 'net'))\n for f in default_doc:\n shutil.copy(f, self.stpath)\n\n self.extract_artifacts(layout)\n\n print('Tree extracted to %s' % self.stpath)\n\n # After creating a bare-bone nupkg layout containing the artifacts\n # and some spec and props files, call the 'nuget' utility to\n # make a proper nupkg of it (with all the metadata files).\n subprocess.check_call(\"./nuget.sh pack %s -BasePath '%s' -NonInteractive\" % \\\n (os.path.join(self.stpath, 'librdkafka.redist.nuspec'),\n self.stpath), shell=True)\n return ['librdkafka.redist.%s.nupkg' % vless_version]", "def info_build_test(self):\n\n self._export(\"H0\", \"0.1\")\n\n self._export(\"H1a\", \"0.1\", deps=[(\"H0/0.1@lu/st\", \"private\")])\n self._export(\"H1b\", \"0.1\", deps=[\"H0/0.1@lu/st\"])\n self._export(\"H1c\", \"0.1\", deps=[(\"H0/0.1@lu/st\", \"private\")])\n\n self._export(\"H2a\", \"0.1\", deps=[\"H1a/0.1@lu/st\"])\n self._export(\"H2c\", \"0.1\", deps=[\"H1c/0.1@lu/st\"])\n\n self._export(\"H3\", \"0.1\", deps=[\"H2a/0.1@lu/st\",\n \"H2c/0.1@lu/st\"])\n\n # If we install H3 we need to build all except H1b\n self.clients[\"H3\"].run(\"info --build missing\")\n self.assert_last_line(self.clients[\"H3\"],\n \"H0/0.1@lu/st, H1a/0.1@lu/st, H1c/0.1@lu/st, H2a/0.1@lu/st, H2c/0.1@lu/st\")\n\n # If we install H0 we need to build nothing (current project)\n self.clients[\"H0\"].run(\"info --build missing\")\n self.assert_last_line(self.clients[\"H0\"], \"\")\n\n # If we install H0 we need to build H0\n self.clients[\"H1a\"].run(\"info --build missing\")\n self.assert_last_line(self.clients[\"H1a\"], \"H0/0.1@lu/st\")\n\n # If we build and upload H1a and H1c, no more H0 (private) is required\n self.clients[\"H3\"].run(\"install H1a/0.1@lu/st --build \")\n self.clients[\"H3\"].run(\"install H1c/0.1@lu/st --build \")\n self.clients[\"H3\"].run(\"upload H1a/0.1@lu/st --all\")\n self.clients[\"H3\"].run(\"upload H1c/0.1@lu/st --all\")\n\n self.clients[\"H3\"].run(\"remove '*' -f\")\n self.clients[\"H3\"].run(\"info --build missing\")\n self.assert_last_line(self.clients[\"H3\"],\n \"H2a/0.1@lu/st, H2c/0.1@lu/st\")\n\n # But if we force to build all, all nodes have to be built\n self.clients[\"H3\"].run(\"remove '*' -f\")\n self.clients[\"H3\"].run(\"info --build\")\n self.assert_last_line(self.clients[\"H3\"],\n \"H0/0.1@lu/st, H1a/0.1@lu/st, H1c/0.1@lu/st, H2a/0.1@lu/st, H2c/0.1@lu/st\")\n\n # Now upgrade the recipe H1a and upload it (but not the package)\n # so the package become outdated\n conanfile_path = os.path.join(self.clients[\"H1a\"].current_folder, CONANFILE)\n conanfile = load(conanfile_path)\n conanfile += \"\\n# MODIFIED\"\n save(conanfile_path, conanfile)\n self.clients[\"H1a\"].run(\"export lu/st\")\n self.clients[\"H1a\"].run(\"upload H1a/0.1@lu/st\") # NOW IS OUTDATED!\n\n # Without build outdated the built packages are the same\n self.clients[\"H3\"].run(\"remove '*' -f\")\n self.clients[\"H3\"].run(\"info --build missing\")\n self.assert_last_line(self.clients[\"H3\"],\n \"H2a/0.1@lu/st, H2c/0.1@lu/st\")\n\n # But with build outdated we have to build the private H0 (but only once) and H1a\n self.clients[\"H3\"].run(\"remove '*' -f\")\n self.clients[\"H3\"].run(\"info --build outdated\")\n self.assert_last_line(self.clients[\"H3\"],\n \"H0/0.1@lu/st, H1a/0.1@lu/st, H2a/0.1@lu/st, H2c/0.1@lu/st\")", "def test_workflow_parse(self):\n\n workflow = [{'job_template': 'sample_job1',\n 'success': [{'job_template': 'sample_job2'}],\n 'always': [{'job_template': 'sample_job3'}]}]\n dry_run = True\n extra_vars_arg = {'sample_vars': 'sample'}\n\n top_node = tree.generate_workflow_tree(workflow, dry_run,\n extra_vars_arg)\n correct = 1\n self.assertEqual(top_node.node_id, correct)", "def build():\n local('vagrant up')" ]
[ "0.5666077", "0.5666077", "0.5635756", "0.5634427", "0.53961426", "0.5334729", "0.52571476", "0.52398247", "0.5215903", "0.5194408", "0.5111144", "0.51089895", "0.505281", "0.50474066", "0.5011156", "0.50028014", "0.49496236", "0.4938487", "0.4927877", "0.48954463", "0.48513916", "0.4836588", "0.4828846", "0.48177338", "0.48066425", "0.48060808", "0.47948453", "0.47854248", "0.47723517", "0.47621235", "0.47492793", "0.47461206", "0.4718794", "0.47087473", "0.46959135", "0.4693809", "0.46836978", "0.46699852", "0.46698827", "0.46633774", "0.46627277", "0.46625575", "0.46625575", "0.46585703", "0.46577778", "0.4656488", "0.46496347", "0.46431005", "0.4640208", "0.46368885", "0.46348667", "0.4634819", "0.46246248", "0.46164972", "0.461522", "0.46113774", "0.4587476", "0.45855147", "0.4563029", "0.45605755", "0.455229", "0.45360094", "0.4533193", "0.45320448", "0.45311406", "0.45292613", "0.4524824", "0.45072177", "0.45034525", "0.44985053", "0.44955966", "0.449517", "0.44948965", "0.44894043", "0.4486887", "0.44831055", "0.44826013", "0.44790655", "0.44756812", "0.44746765", "0.44746503", "0.44682714", "0.44607314", "0.44561926", "0.44420224", "0.44361055", "0.44304103", "0.442917", "0.4415964", "0.44140357", "0.44121176", "0.44096816", "0.44093978", "0.4407559", "0.44057336", "0.44052455", "0.43975094", "0.4396174", "0.43923694", "0.43823224" ]
0.5708708
0
See recipe_api.py for docs.
def defer_results(self): return recipe_api.defer_results
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_recipe_information(self):\n pass", "def recipe(self):\n return self.__recipe", "def test_get_recipe_information_bulk(self):\n pass", "def get_recipe(self, _id):\n raise NotImplementedError()", "def test_get_random_recipes(self):\n pass", "def test_search_recipes(self):\n pass", "def recipes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Recipe]:\n pass", "def test_get_recipe_ingredients_by_id(self):\n pass", "def list_recipes(environ, start_response):\n return list_entities(environ, start_response, 'list_recipes')", "def test_get_analyzed_recipe_instructions(self):\n pass", "def test_summarize_recipe(self):\n pass", "def test_search_recipes_by_ingredients(self):\n pass", "def test_creating_recipe_with_ingredients(self):\n ingredient1 = sample_ingredients(user=self.user, name='Prawns')\n ingredient2 = sample_ingredients(user=self.user, name='Garlic')\n\n payload = {\n 'title': 'Avocado lime cheesecake',\n 'time_minutes': 20,\n 'price': 500.00,\n 'currency': 'NGN',\n 'ingredients': [ingredient1.id, ingredient2.id]\n }\n self.evaluate_recipe(ingredient1, ingredient2, payload, 'ingredient')", "def test_create_basic_recipe(self):\n\n payload = {'name': 'Focaccia', 'description': 'Detailed description'}\n\n res = self.client.post(RECIPES_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n recipe = Recipe.objects.get(id=res.data['id'])\n\n self.assertEqual(payload['name'], recipe.name)\n self.assertEqual(payload['description'], recipe.description)", "def set_recipe(self, recipe):\n self.__recipe.append(recipe)\n return 'Recipe created successfully'", "def add_recipe(self, recipe): \n\t\tfor key, val in self.recipes_list.items():\n\t\t\tif key == recipe.recipe_type:\n\t\t\t\tself.recipes_list[key][recipe.name] = recipe", "def sample_recipe(**params):\n defaults = {\n 'name': 'Empanadas',\n 'description': 'Test description to prepare meat empanadas!'\n }\n defaults.update(params)\n\n return Recipe.objects.create(**defaults)", "def test_create_recipe_with_ingredients(self):\n\n payload = {\n 'name': 'Gnocchi',\n 'description': 'A detailed description of a yummy recipe!',\n 'ingredients': [\n {'name': 'Potatoes'},\n {'name': 'Flour'},\n {'name': 'Nutmeg'}\n ]\n }\n\n res = self.client.post(RECIPES_URL, payload, format='json')\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n recipe = Recipe.objects.get(id=res.data['id'])\n\n self.assertEqual(payload['name'], recipe.name)\n self.assertEqual(payload['description'], recipe.description)\n self.assertEqual(recipe.ingredients.count(), 3)\n self.assertEqual(recipe.ingredients.first().name, 'Potatoes')", "def find_recipe(self, recipe_id):\n return self.find_doc('recipe', 'name', self.get_unique_recipe_name(recipe_id))", "def test_recipe(self):\n recipe = recipe_test_utils.create_recipe()\n self.job1.recipe_id = recipe.id\n self.job1.save()\n\n url = '/%s/jobs/?recipe_id=%d' % (self.api, recipe.id)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 1)\n self.assertEqual(result['results'][0]['id'], self.job1.id)", "def __init__(self):\n self.recipeset = {}\n self.hardcoded()", "def test_create_recipe_card(self):\n pass", "def test_visualize_recipe_ingredients_by_id(self):\n pass", "def __init__(self):\n TransformRecipes.__init__(self)", "def recipe(id):\n\n selected_recipe = mongo.db.recipes.find_one({'_id': ObjectId(id)})\n\n # Using create list function to display these sections easier\n display_method = create_list(selected_recipe[\"method\"])\n display_ingredients = create_list(selected_recipe[\"ingredients\"])\n display_equipment = create_list(selected_recipe[\"equipment\"])\n\n show_ad = make_comparison(ad_equipment, display_equipment)\n\n return render_template('view_recipe.html', recipe=selected_recipe,\n title='Recipe', display_method=display_method,\n ad_equipment=ad_equipment,\n display_ingredients=display_ingredients,\n display_equipment=display_equipment,\n show_ad=show_ad)", "def recipes():\n if request.method == 'GET':\n return Response(\n json.dumps(recipebook.to_json_list()),\n mimetype=\"application/json\")\n elif request.method == 'POST':\n new_dict = request.get_json()\n recipebook.recipes.append(models.Recipe.from_json_dict(new_dict))\n write_out()\n return Response(status=200)", "def test_retrieve_recipes(self):\n sample_recipe(name=\"Avocado toast\")\n sample_recipe(name='Baklava')\n\n res = self.client.get(RECIPES_URL)\n\n recipes = Recipe.objects.all()\n serializer = RecipeSerializer(recipes, many=True)\n\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertEqual(len(res.data), 2)\n self.assertEqual(res.data, serializer.data)", "def new_recipe(self):\n os.system(\"clear\")\n self.name = input(\"Recipe Name: \")\n self.ingredients = None\n self.steps=None\n self.description = None\n self.tags = {}\n self.edit_ingredients()\n self.edit_steps()\n if _yes_no_select(\"Would you like to add a description?\"):\n self.description = input(\"Description: \")\n self.edit_tags()\n while not self.check():\n continue", "def test_analyze_recipe_instructions(self):\n pass", "def retrive_recipe(self):\n sample_recipe(user=self.user)\n sample_recipe(user=self.user)\n\n res = self.client.get(RECIPE_URL)\n\n recipe = Recipe.objects.all().order_by('-id')\n serailzer = Recipeserializer(recipe,many = True)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertEqual(res.data, serailzer.data)", "def test_retrieving_recipes(self):\n sample_recipe(user=self.user)\n sample_recipe(user=self.user)\n\n res = self.client.get(RECIPES_URL)\n\n recipes = Recipe.objects.all().order_by('-id')\n serializer = RecipeSerializer(recipes, many=True)\n\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertEqual(res.data, serializer.data)", "def recipe(index):\n try:\n if request.method == 'GET':\n return Response(\n json.dumps(recipebook.recipes[index].to_json_dict()),\n mimetype=\"application/json\")\n elif request.method == 'DELETE':\n del recipebook.recipes[index]\n write_out()\n return Response(status=200)\n except IndexError: # recipe with specified index does not exist\n return Response(\n \"{\\\"error\\\":\\\"no such recipe\\\"}\",\n status=404,\n mimetype=\"application/json\")", "def getRecipeData(recipeId):\n return Gw2Spidy._request('recipe', str(recipeId))", "def test_retrieve_recipes(self):\n sample_recipe(user = self.user)\n sample_recipe(user = self.user)\n\n res = self.client.get(RECIPE_URL)\n\n recipes = Recipe.objects.all().order_by('-id')\n serializer = RecipeSerializer(recipes, many=True) # many=true returns the data as a list\n\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertEqual(res.data, serializer.data)", "def test_retrieve_recipes(self):\n sample_recipe(user=self.user)\n sample_recipe(user=self.user, title=\"Beans\")\n\n res = self.client.get(RECIPE_URL)\n\n recipes = Recipe.objects.all().order_by('-id')\n serializer = RecipeSerializer(recipes, many=True)\n\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertEqual(len(res.data), 2)\n self.assertEqual(res.data, serializer.data)", "def test_creating_recipe_with_tags(self):\n tag1 = sample_tags(user=self.user, name='Vegan')\n tag2 = sample_tags(user=self.user, name='Dessert')\n\n payload = {\n 'title': 'Avocado lime cheesecake',\n 'time_minutes': 60,\n 'price': 5000.00,\n 'currency': 'NGN',\n 'tags': [tag1.id, tag2.id]\n }\n self.evaluate_recipe(tag1, tag2, payload, 'tag')", "def save(self):\n self._payload_to_str()\n return self.recipe.client._perform_json(\n \"PUT\", \"/projects/%s/recipes/%s\" % (self.recipe.project_key, self.recipe.recipe_name),\n body=self.data)", "def test_get_similar_recipes(self):\n pass", "def create_recipe(*, recipe_in: RecipeCreate) -> dict:\n new_entry_id = len(RECIPES) + 1\n recipe_entry = Recipe(\n id=new_entry_id,\n label=recipe_in.label,\n source=recipe_in.source,\n url=recipe_in.url,\n )\n RECIPES.append(recipe_entry.dict())\n\n return recipe_entry", "def test_create_new_recipe(self):\n payload = {\n 'title': 'Cheescake',\n 'time_taken': 35,\n 'price': 5\n }\n\n res = self.client.post(RECIPE_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n recipe = Recipe.objects.get(id=res.data['id'])\n for key in payload.keys():\n self.assertEqual((payload)[key], getattr(recipe, key))\n\n # recipe = get_sample_recipe(self.sample_user)\n # db_recipe =\n\n # self.assertEqual(recipe.title, )", "def add_recipe(self, name, factory_method):\n self._recipes[name] = factory_method", "def view_recipe(request, recipe, **_kwargs):\n return render(request, \"deployments/disp_recipe.html\", {\"recipe\": recipe})", "def test_extract_recipe_from_website(self):\n pass", "def test_get_recipe_taste_by_id(self):\n pass", "def recipe(self, recipe):\n import hxl.filters\n return hxl.filters.from_recipe(self, recipe)", "def test_patch_recipe(self):\n recipe = sample_recipe(self.user)\n recipe.tags.add(sample_tag(self.user))\n tag = sample_tag(self.user, name='bacon')\n\n payload = {\n 'title': 'Ham hack',\n 'tags': tag.id\n }\n res = self.client.patch(detail_url(recipe.id), payload)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n recipe.refresh_from_db()\n serializer = RecipeSerializer(recipe)\n self.assertEqual(res.data, serializer.data)\n self.assertEqual(recipe.title, payload['title'])\n self.assertEqual(serializer.data['tags'], [payload['tags']])\n tags = recipe.tags.all()\n self.assertEqual(len(tags), 1)\n self.assertIn(tag, tags)", "def test_create_basic_recipe(self):\n payload = {\n 'title': 'Cake',\n 'time_minutes': 40,\n 'price': 20,\n }\n res = self.client.post(RECIPE_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n recipe = Recipe.objects.get(id=res.data['id'])\n for key in payload:\n self.assertEqual(payload[key], getattr(recipe, key))\n serializer = RecipeDetailSerializer(recipe)\n self.assertEqual(res.data, serializer.data)", "def create(self):\n self._finish_creation_settings()\n return self.project.create_recipe(self.recipe_proto, self.creation_settings)", "def test_get_recipes(self):\n r1 = Recipes.objects.create(chef=self.user, name=\"Recipe 1\", draft=True)\n r2 = Recipes.objects.create(chef=self.user, name=\"Recipe 2\", draft=False)\n r3 = Recipes.objects.create(chef=self.user, name=\"Recipe 3\", draft=False)\n book = Book.objects.create(chef=self.user, book_type=Book.TO_SELL)\n book.add_recipe(r3)\n\n url = '/0/chefs/%i/recipes' % self.user.pk\n\n resp = self.client.get(url)\n self.assertPermissionDenied(resp)\n\n headers = self.login()\n resp = self.client.get(url, **headers)\n self.assertEqual(resp.status_code, 200)\n self.assertIn('recipes', resp.data)\n self.assertEqual(1, len(resp.data['recipes']))\n keys = (\"liked\", \"public_url\", \"edit_date\", \"ingredients\", \"shared\", \"tags\", \"commented\",\n \"private\", \"id\", \"chef\", \"reported\", \"nb_shares\", \"added\", \"nb_added\",\n \"nb_comments\", \"draft\", \"commensals\", \"creation_date\", \"nb_likes\", \"name\",\n \"products\", \"prep_time\", \"serves\", \"bought\", \"book_for_sale\", \"description\")\n self.assertEqual(set(keys), set(resp.data['recipes'][0].keys()))\n self.assertEqual(r2.pk, resp.data['recipes'][0]['id'])", "def test_recipes_create(self):\n app = self.create_app()\n c = app.test_client()\n\n # test if authorization is required to create a recipe\n rv = c.get('/recipes/create')\n self.assertRedirects(rv, \"/auth/login\")\n\n # test recipe page\n register(c, app.config[\"USERNAME\"], app.config[\"PASSWORD\"])\n login(c, app.config[\"USERNAME\"], app.config[\"PASSWORD\"])\n c.get('/recipes/create')\n self.assert_template_used(\"recipes/create.html\")\n\n # test adding recipe\n recipe = {'author_id': \"unittest\", 'title': \"recipe_unittest2\", 'body': \"Empty body\",\n 'servings': 4, 'tag': \"dessert\", 'ingredients': [{'ingName': \"ing_unittest3_solid\", 'quantity': 180, 'portion': 'g'}, {\n 'ingName': \"ing_unittest1_liquid\", 'quantity': 2, 'portion': 'cup'}]}\n with app.app_context():\n create_recipe(c, recipe)\n self.assert_template_used(\"recipes/index.html\")", "def test_retrieve_recipe(self):\n sample_recipe(user=self.user)\n sample_recipe(user=self.user)\n res = self.client.get(RECIPE_URL)\n\n recipes = Recipe.objects.all().order_by('id')\n serializer = RecipeSerializer(recipes,many=True)\n\n print(json.dumps(serializer.data, indent=1))\n print('ok')\n print(json.dumps(res.data, indent=1))\n self.assertTrue(res.status_code,status.HTTP_200_OK)\n self.assertEqual(res.data,serializer.data)", "def test_full_update_recipe(self):\n recipe = sample_recipe()\n recipe.ingredients.create(name='Eggs')\n original_description = recipe.description\n\n payload = {\n 'name': 'Vegan gnocchi',\n 'ingredients': [{'name': 'Vegegg'}]\n }\n url = recipe_detail_url(recipe.id)\n self.client.put(url, payload, format='json')\n\n recipe.refresh_from_db()\n self.assertEqual(recipe.name, payload['name'])\n self.assertEqual(recipe.description, original_description)\n self.assertEqual(recipe.ingredients.count(), 1)\n self.assertTrue(recipe.ingredients.first().name, 'Eggs')", "def test_get_recipe_equipment_by_id(self):\n pass", "def recipe_as(self, recipe):\n prefix, suffix = self._get_jsonp()\n return prefix + JSON.recipe_as(self, recipe) + suffix", "def test_get_recipe_by_id(self):\n recipe = self.request_mgr.get_recipe_by_id(35354)\n self.assertIn(\"Guinness\", recipe.get('title'))", "def put(environ, start_response):\n recipe_name = web.get_route_value(environ, 'recipe_name')\n recipe_name = web.handle_extension(environ, recipe_name)\n\n recipe = Recipe(recipe_name)\n store = environ['tiddlyweb.store']\n length, _ = web.content_length_and_type(environ)\n\n usersign = environ['tiddlyweb.usersign']\n\n try:\n recipe = store.get(recipe)\n recipe.policy.allows(usersign, 'manage')\n except NoRecipeError:\n create_policy_check(environ, 'recipe', usersign)\n\n try:\n serialize_type = web.get_serialize_type(environ)[0]\n except TypeError:\n raise HTTP400('Content-type header required')\n\n try:\n serializer = Serializer(serialize_type, environ)\n serializer.object = recipe\n content = web.read_request_body(environ, length)\n serializer.from_string(content.decode('utf-8'))\n\n recipe.policy.owner = usersign['name']\n\n _validate_recipe(environ, recipe)\n store.put(recipe)\n except RecipeFormatError as exc:\n raise HTTP400('unable to put recipe: %s' % exc)\n except TypeError as exc:\n raise HTTP400('malformed input: %s' % exc)\n except NoSerializationError:\n raise HTTP415('Content type %s not supported' % serialize_type)\n\n start_response(\"204 No Content\",\n [('Location', web.recipe_url(environ, recipe))])\n\n return []", "def add_recipe(self, recipe):\n self.recipe_list[recipe.recipe_type].append(recipe)\n self.last_update = datetime.now", "def test_full_update_reecipe(self):\n recipe = sample_recipe(user=self.user)\n recipe.tags.add(sample_tag(user = self.user))\n payload = {\n 'title': 'mutton curry',\n 'time_minuts': 45,\n 'price':450\n\n }\n url = detail_url(recipe.id)\n self.client.put(url , payload)\n\n recipe.refresh_from_db()\n self.assertEqual(recipe.title, payload['title'])\n self.assertEqual(recipe.time_minuts, payload['time_minuts'])\n self.assertEqual(recipe.price, payload['price'])\n tags =recipe.tags.all()\n self.assertEqual(len(tags), 0 )", "def post(self, user):\n data = request.json\n return save_new_recipe(data=data, user=user)", "def name(self):\n return self.recipe_name", "def recipe(db, recipe_factory, ingredient_factory):\n recipe = recipe_factory.create()\n db.session.add(recipe)\n db.session.commit()\n\n ingredients = ingredient_factory.create_batch(10)\n db.session.add_all(ingredients)\n db.session.commit()\n\n for i in ingredients:\n recipe_ingredient = RecipeIngredient(\n recipe_id=recipe.id,\n ingredient_id=i.id,\n )\n db.session.add(recipe_ingredient)\n recipe.ingredients.append(recipe_ingredient)\n db.session.commit()\n\n return recipe", "def add_recipe(self, recipe_id, recipe_title, recipe_detail, ingredient_cuisine_doc, user_doc):\n recipe = {\n 'type': 'recipe',\n 'name': self.get_unique_recipe_name(recipe_id),\n 'title': recipe_title.strip(),\n 'instructions': recipe_detail\n }\n recipe = self.add_doc_if_not_exists(recipe, 'name')\n self.record_recipe_request_for_user(recipe, ingredient_cuisine_doc, user_doc)\n return recipe", "def _ReadRecipeFromFileObject(\n self, file_object: Union[StringIO, TextIOWrapper, TextIO]) -> Recipe:\n json_dict = json.load(file_object)\n\n description = json_dict['description']\n del json_dict['description']\n\n args = []\n for arg_list in json_dict['args']:\n args.append(RecipeArgs(*arg_list))\n del json_dict['args']\n\n return resources.Recipe(description, json_dict, args)", "def test_autocomplete_recipe_search(self):\n pass", "def run(self, recipe_script, api):\n self._get_client('paths')._initialize_with_recipe_api(api)\n\n # TODO(martiniss): Remove this once we've transitioned to the new results\n # format\n if self._engine_flags and self._engine_flags.use_result_proto:\n logging.info(\"Using new result proto logic\")\n return self._new_run(recipe_script, api)\n return self._old_run(recipe_script, api)", "def test_full_update_recipe(self):\n recipe = sample_recipe(user=self.user)\n recipe.tags.add(sample_tags(user=self.user))\n payload = {\n 'title': 'Jollof Spaghetti',\n 'time_minutes': 30,\n 'price': 5.00,\n 'currency': 'USD',\n }\n url = detail_url(recipe_id=recipe.id)\n self.client.put(url, payload)\n\n recipe.refresh_from_db()\n self.assertEqual(recipe.title, payload['title'])\n self.assertEqual(recipe.time_minutes, payload['time_minutes'])\n self.assertEqual(recipe.price, payload['price'])\n self.assertEqual(recipe.currency, payload['currency'])\n tags = recipe.tags.all()\n self.assertEqual(len(tags), 0)", "def test_retrive_recipe_detail(self):\n recipe = create_sample_recipe(user=self.sample_user)\n recipe.tag.add(create_sample_tag(user=self.sample_user))\n recipe.ingredient.add(create_sample_ingredient(user=self.sample_user))\n\n detail_URL = get_detail_URL(recipe.id)\n res = self.client.get(detail_URL)\n\n serializer = RecipeDetailSerializer(recipe)\n\n self.assertEqual(res.data, serializer.data)", "def test_full_update_recipe(self):\n recipe = sample_recipe(user=self.user)\n recipe.tag.add(sample_tag(user=self.user))\n payload = {\n 'title':'chicken noodles',\n 'time_minutes':50,\n 'price':12.67,\n }\n url = detail_url(recipe.id)\n self.client.put(url,payload)\n\n recipe.refresh_from_db()\n self.assertEqual(recipe.title,payload['title'])\n self.assertEqual(recipe.time_minutes,payload['time_minutes'])\n self.assertEqual(float(recipe.price),payload['price'])\n tags = recipe.tag.all()\n self.assertEqual(len(tags),0)\n self.assertEqual(recipe.user,self.user)", "def __init__(self, recipes=ALLRECIPES_DICT, search_ingredients=SEARCH_INGREDIENTS):\n self.recipes = json.loads(open(recipes).read())\n for (k, v) in json.loads(open(IDEORECIPES_DICT).read()).items():\n self.recipes[k] = v\n self.search_ingredients = [v.strip().lower() for v in open(search_ingredients)]\n self.food_hash = {}\n self.adj = None\n self.sortby_methods = [\"relevance\", \"popular\", \"rating\"]\n self.to_skips = [\"sugar\", \"salt\", \"milk\", \"pepper\", \"butter\"]", "def sample_recipe(user, **kwargs):\n defaults = {\n 'title': 'Sample recipe',\n 'time_minutes': 15,\n 'price': 500.00,\n 'currency': 'NGN',\n }\n defaults.update(kwargs)\n return Recipe.objects.create(user=user, **defaults)", "def _determine_recipe(environ):\n recipe_name = web.get_route_value(environ, 'recipe_name')\n recipe_name = web.handle_extension(environ, recipe_name)\n recipe = Recipe(recipe_name)\n\n store = environ['tiddlyweb.store']\n\n try:\n recipe = store.get(recipe)\n except NoRecipeError as exc:\n raise HTTP404('%s not found, %s' % (recipe.name, exc))\n\n return recipe", "def test_full_update_recipe(self):\n recipe = sample_recipe(user=self.user)\n recipe.tags.add(sample_tag(user=self.user))\n payload = {\n 'title': 'Spaghetti',\n 'time_minutes': 25,\n 'price': 5.00,\n }\n url = detail_url(recipe.id)\n self.client.put(url, payload)\n\n recipe.refresh_from_db()\n self.assertEqual(recipe.title, payload['title'])\n self.assertEqual(recipe.time_minutes, payload['time_minutes'])\n self.assertEqual(recipe.price, payload['price'])\n tags = recipe.tags.all()\n self.assertEqual(len(tags), 0)", "def test_create_basic_recipe(self):\n payload = {\"title\": \"Vietnamese Cake\",\n \"time_minutes\": 45,\n \"price\": 5.55}\n res = self.client.post(RECIPE_URL, payload)\n recipe = Recipe.objects.get(id=res.data['id'])\n for key in payload.keys():\n if key == \"price\":\n self.assertEqual(round(Decimal(payload[key]), 2), getattr(recipe, key))\n else:\n self.assertEqual(payload[key], getattr(recipe, key))\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)", "def save_recipe(self, recipe):\n\n if self.photo.data:\n recipe.photo = photos.save(self.photo.data.stream)\n\n recipe.title = self.title.data\n recipe.title_slug = slugify(self.title.data)\n recipe.description = self.description.data\n recipe.instructions = self.instructions.data\n recipe.general_ingredients = [\n i.to_model() for i in self.general_ingredients]\n recipe.ingredient_groups = [\n g.to_model() for g in self.ingredient_groups]\n recipe.tags = self.tags.data\n\n recipe.save()", "def _validate_recipe(environ, recipe):\n try:\n validate_recipe(recipe, environ)\n except InvalidBagError as exc:\n raise HTTP409('Recipe content is invalid: %s' % exc)", "def test_view_recipe_details(self):\n recipe = sample_recipe(user=self.user)\n recipe.tags.add(sample_tag(user=self.user))\n recipe.ingredient.add(sample_ingredient(user=self.user))\n\n url = detail_url(recipe.id)\n res = self.client.get(url)\n\n serailzer = RecipeDetailSerializer(recipe)\n self.assertEqual(res.data, serailzer.data)", "def recipe_id(self, recipe_id):\n\n self._recipe_id = recipe_id", "def test_retrieve_recipes(self):\n sample_quizz()\n sample_quizz()\n\n res = self.client.get(QUIZZES_URL)\n\n quizzes = Quizz.objects.all()\n serializer = QuizzSerializer(quizzes, many=True)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertEqual(res.data, serializer.data)", "def test_recipe_model(self):\n recipe = Recipe(uri=\"testuri\", name=\"testname\", image_url=\"test_image_url\")\n\n db.session.add(recipe)\n db.session.commit()\n\n recipes = Recipe.query.all()\n\n self.assertEqual(len(recipes), 1)\n self.assertEqual(recipes[0].uri, \"testuri\")\n self.assertEqual(recipes[0].name, \"testname\")\n self.assertEqual(recipes[0].image_url, \"test_image_url\")", "def _create_ingredient(self, data):\n return Ingredient(**data)", "def search_recipe(ingredients):\n\n params = '+'.join(ingredients.split())\n url_search = SEARCH_URL.format(params)\n response = req.get(url_search)\n\n return response.content", "def test_create_recipe_with_ingredients(self):\n ingredient1 = sample_ingredient(user=self.user, name = 'bla')\n ingredient2 = sample_ingredient(user=self.user, name = 'blaa')\n payload = {\n 'title': 'red curry',\n 'ingredients': [ingredient1.id, ingredient2.id],\n 'time_minutes': 30,\n 'price': 30.00\n }\n res = self.client.post(RECIPE_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n recipe = Recipe.objects.get(id=res.data['id'])\n ingredients = recipe.ingredients.all()\n\n self.assertEqual(ingredients.count(), 2)\n self.assertIn(ingredient1, ingredients)\n self.assertIn(ingredient2, ingredients)", "def get_product_ingredients(self, driver):\n pass", "def test_put_recipe(self):\n recipe = sample_recipe(self.user)\n recipe.tags.add(sample_tag(self.user))\n payload = {\n 'title': 'Ham hack',\n 'time_minutes': 38,\n 'price': 33.00\n }\n res = self.client.put(detail_url(recipe.id), payload)\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n\n recipe.refresh_from_db()\n serializer = RecipeSerializer(recipe)\n self.assertEqual(res.data, serializer.data)\n self.assertEqual(recipe.title, payload['title'])\n tags = recipe.tags.all()\n self.assertEqual(len(tags), 0)", "def sample_recipe(user, **params):\n defaults = {\n 'title': 'bread',\n 'time_minutes': 50,\n 'price': 5.00\n }\n defaults.update(params)\n\n return Recipe.objects.create(user=user, **defaults)", "def get_recipe(cls, recipeid):\n\n recipe = Recipe.query.filter_by(recipe_id=recipeid).one()\n\n return recipe", "def test_creating_recipe_with_ingredients(self):\n ingredient1 = create_sample_ingredient(user=self.user, name=\"Paprika\")\n ingredient2 = create_sample_ingredient(user=self.user, name=\"Salad\")\n\n payload = {\n \"title\": \"Green Salad\",\n \"time_minutes\": 34,\n \"price\": 4.66,\n \"ingredients\": [ingredient1.id, ingredient2.id]\n }\n res = self.client.post(RECIPE_URL, payload)\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n recipe = Recipe.objects.get(id=res.data['id'])\n ingredients = recipe.ingredients.all()\n self.assertEqual(ingredients.count(), 2)\n self.assertIn(ingredient1, ingredients)\n self.assertIn(ingredient2, ingredients)", "def __init__(self, recipe_url):\n self.recipe_url = recipe_url\n self.translator = TranslationClient()", "def get_recipe_raw_definition(self):\n return self.recipe_settings", "def recipe(self, name_parts: ResourceIdentifier, type_in: Optional[str], data_in: JsonObject, group: Optional[str] = None, conditions: Json = None) -> RecipeContext:\n res = utils.resource_location(self.domain, name_parts)\n self.write((*self.resource_dir, 'data', res.domain, 'recipes', res.path), {\n 'type': type_in,\n 'group': group,\n **data_in,\n 'conditions': utils.recipe_condition(conditions)\n })\n return RecipeContext(self, res)", "def test_create_recipe_with_ingredient(self):\n ingredient1 = sample_ingredient(user=self.user, name='Prawns')\n ingrident2 = sample_ingredient(user=self.user, name ='Ginger')\n\n payload = {\n 'title': 'Thai prawn and curry',\n 'ingredient': [ingredient1.id,ingrident2.id],\n 'time_minuts':60,\n 'price': 250\n }\n res = self.client.post(RECIPE_URL,payload)\n\n self.assertEqual(res.status_code, status.HTTP_201_CREATED)\n\n recipe = Recipe.objects.get(id=res.data['id'])\n ingredients = recipe.ingredient.all()\n self.assertEqual(ingredients.count(),2)\n self.assertIn(ingredient1, ingredients)\n self.assertIn(ingrident2,ingredients)", "def test_search_recipes_by_nutrients(self):\n pass", "def test_recipes_limited_to_user(self):\n user2 = get_user_model().objects.create_user(\n email = 'test2@gmail.com',\n password = '123465',\n )\n sample_recipe(user = user2)\n sample_recipe(user = self.user)\n\n res = self.client.get(RECIPE_URL)\n\n recipes = Recipe.objects.filter(user=self.user)\n serializer = RecipeSerializer(recipes, many=True) # although we only have one recipe for this user, we still pass many=true since even if there is one object returned, the list api should always return a data type of list\n\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertEqual(len(res.data), 1)\n self.assertEqual(res.data, serializer.data)", "def get(id: int):\r\n filename = Path(__file__).parent / \"recipe-data.csv\"\r\n files = import_file.Files()\r\n recipe_load = files.import_from_csv(filename)\r\n\r\n recipes = Recipes(recipe_load)\r\n a_recipe = recipes.filter_recipes_id(id)\r\n\r\n return jsonify(a_recipe)", "def test_create_recipe_with_ingredients(self):\n ing1 = sample_ingredient(user=self.user,name=\"ginger\")\n ing2 = sample_ingredient(user=self.user, name=\"Prawn\")\n payload = {\n 'title':'Prawn curry',\n 'ingredient':[ing1.id,ing2.id],\n 'time_minutes':60,\n 'price':10.00,\n }\n res = self.client.post(RECIPE_URL,payload)\n self.assertEqual(res.status_code,status.HTTP_201_CREATED)\n recipe = Recipe.objects.get(id=res.data['id'])\n ingredients = recipe.ingredient.all()\n self.assertEqual(ingredients.count(),2)\n self.assertIn(ing1,ingredients)\n self.assertIn(ing2,ingredients)", "def sample_recipe(user,**params):\n default = {\n 'title' : 'sample title',\n 'time_minutes' : 10,\n 'price' : 5.00,\n }\n default.update(params)\n return Recipe.objects.create(user=user,**default)", "def recipes():\n recipes = mongo.db.recipes.find()\n return render_template(\"recipes.html\", recipes=recipes)", "def single_recipe_controller(filters, db_conn, host_url):\n try:\n result = db_conn[\"recipes\"].find(filters).sort(\"createdOn\", -1)\n recipe_list = map_response(result, host_url)\n return recipe_list\n\n except Exception as e:\n print(e)\n return {\"success\": False, \"message\": \"Error in api: \" + str(e)}", "def __repr__(self):\n\n return \"<Recipe recipe_id=%d user_id=%d recipe_title=%s instructions=%s>\" % (self.recipe_id, self.user_id, self.recipe_title, self.instructions)", "def __call__(self):\n context = Context()\n return self.recipe.execute(context, self.cmd, self.cmd_args)" ]
[ "0.7399995", "0.7240145", "0.7060982", "0.69131684", "0.68955857", "0.6634333", "0.6622917", "0.6594366", "0.6353314", "0.63221014", "0.63158387", "0.6281272", "0.6268768", "0.62628794", "0.6251283", "0.6206122", "0.6205642", "0.61758476", "0.61579376", "0.6142557", "0.6135808", "0.61353326", "0.61344326", "0.61078906", "0.61035043", "0.60933095", "0.608872", "0.60832596", "0.6071791", "0.60535556", "0.605283", "0.60474753", "0.60369325", "0.6029343", "0.6020101", "0.59655184", "0.5963838", "0.5948853", "0.5938817", "0.5929235", "0.5925952", "0.59151", "0.5904971", "0.59045565", "0.5881764", "0.58802915", "0.5877684", "0.58768755", "0.5862539", "0.58590686", "0.58577776", "0.5854725", "0.58359313", "0.5815176", "0.5807889", "0.5800857", "0.57813174", "0.5770621", "0.5765002", "0.57562876", "0.57471013", "0.57334745", "0.5732018", "0.57085407", "0.57070464", "0.5704894", "0.56915", "0.5690357", "0.56891984", "0.5686359", "0.566926", "0.56677824", "0.566006", "0.566003", "0.5659482", "0.56551254", "0.56484085", "0.56313425", "0.5630888", "0.5629105", "0.561848", "0.5615964", "0.5605291", "0.56040084", "0.56013733", "0.5594902", "0.5593484", "0.5592246", "0.5591969", "0.55884343", "0.558655", "0.5577376", "0.55728245", "0.557174", "0.5563252", "0.5557508", "0.55553913", "0.555078", "0.5544834", "0.5539509" ]
0.59413797
38