code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
import random import struct import sys import time from pathlib import Path import numpy as np from SimulaQron.general.hostConfig import * from SimulaQron.cqc.backend.cqcHeader import * from SimulaQron.cqc.pythonLib.cqc import * from flow import circuit_file_to_flow, count_qubits_in_sequence from angle import measure...
[ "flow.count_qubits_in_sequence", "random.choice", "pathlib.Path", "angle.measure_angle", "time.sleep", "flow.circuit_file_to_flow", "sys.exit", "random.random" ]
[((483, 516), 'random.choice', 'random.choice', (['circuit_file_paths'], {}), '(circuit_file_paths)\n', (496, 516), False, 'import random\n'), ((599, 647), 'flow.circuit_file_to_flow', 'circuit_file_to_flow', (['"""./circuits/circuit1.json"""'], {}), "('./circuits/circuit1.json')\n", (619, 647), False, 'from flow impor...
import unittest from unittest.mock import MagicMock from datetime import timedelta from osgar.bus import Bus from osgar.node import Node class NodeTest(unittest.TestCase): def test_usage(self): empty_config = {} bus = Bus(logger=MagicMock()) node = Node(config=empty_config, bus=bus.handl...
[ "unittest.mock.MagicMock" ]
[((253, 264), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (262, 264), False, 'from unittest.mock import MagicMock\n'), ((481, 492), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (490, 492), False, 'from unittest.mock import MagicMock\n')]
from selenium import webdriver from selenium.webdriver.common.keys import Keys import time driver = webdriver.Chrome(executable_path="E:/SQA/chromedriver_win32/chromedriver.exe") driver.get("http://newtours.demoaut.com/") time.sleep(5) print(driver.title) driver.get("https://www.google.com/") time.sleep(5) print(dr...
[ "selenium.webdriver.Chrome", "time.sleep" ]
[((101, 179), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'executable_path': '"""E:/SQA/chromedriver_win32/chromedriver.exe"""'}), "(executable_path='E:/SQA/chromedriver_win32/chromedriver.exe')\n", (117, 179), False, 'from selenium import webdriver\n'), ((223, 236), 'time.sleep', 'time.sleep', (['(5)'], {})...
import asyncio import datetime import json import warnings from pathlib import Path from typing import Tuple, Optional, TYPE_CHECKING import aiorpcx from modules import config from modules.electrum_mods.functions import BIP32Node, pubkey_to_address, address_to_script, \ script_to_scripthash, constants from module...
[ "modules.helpers.timestamp", "modules.config.get", "modules.config.check", "modules.models.Payment.update", "modules.electrumx.ElectrumX", "modules.models.Invoice.select", "modules.logging.logger.info", "modules.electrumx.ElectrumError", "modules.electrum_mods.functions.pubkey_to_address", "pathli...
[((1114, 1175), 'modules.electrumx.ElectrumX', 'ElectrumX', (['self.symbol'], {'default_ports': 'self.PEER_DEFAULT_PORTS'}), '(self.symbol, default_ports=self.PEER_DEFAULT_PORTS)\n', (1123, 1175), False, 'from modules.electrumx import ElectrumX, ElectrumError\n'), ((1208, 1235), 'modules.helpers.inv_dict', 'inv_dict', ...
"""Unit tests for direction_updater.py.""" # standard library import argparse import unittest from unittest.mock import MagicMock # py3tester coverage target __test_target__ = 'delphi.epidata.acquisition.covidcast.direction_updater' class UnitTests(unittest.TestCase): """Basic unit tests.""" def test_get_argum...
[ "unittest.mock.MagicMock" ]
[((579, 607), 'unittest.mock.MagicMock', 'MagicMock', ([], {'partitions': '[0, 1]'}), '(partitions=[0, 1])\n', (588, 607), False, 'from unittest.mock import MagicMock\n'), ((628, 639), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (637, 639), False, 'from unittest.mock import MagicMock\n'), ((710, 721), 'un...
import requests import os import logging import subprocess import re import settings _GITHUB_BASE_URL = 'https://api.github.com' _REPO_DIR = settings.get('repo_dir', os.getcwd()) _REPO_CNT = settings.get('repo_count', 10) _QUERY_STRING = settings.get('repo_query_string') _TOKEN = settings.get('github_token', require...
[ "settings.get", "subprocess.Popen", "logging.warning", "os.path.join", "requests.get", "os.getcwd", "re.sub" ]
[((194, 224), 'settings.get', 'settings.get', (['"""repo_count"""', '(10)'], {}), "('repo_count', 10)\n", (206, 224), False, 'import settings\n'), ((241, 274), 'settings.get', 'settings.get', (['"""repo_query_string"""'], {}), "('repo_query_string')\n", (253, 274), False, 'import settings\n'), ((284, 328), 'settings.ge...
#!/user/env python3 # -*- coding: utf-8 -*- import threading import time import os.path from hashlib import sha256 import bjson import logging import random from binascii import hexlify from ..config import C, V, PeerToPeerError from ..client import FileReceiveError, ClientCmd from .utils import AESCipher class File...
[ "hashlib.sha256", "random.choice", "logging.debug", "threading.Lock", "binascii.hexlify", "time.strftime", "logging.info", "time.sleep", "bjson.dump", "threading.Thread", "traceback.print_exc", "time.time", "bjson.load" ]
[((862, 870), 'hashlib.sha256', 'sha256', ([], {}), '()\n', (868, 870), False, 'from hashlib import sha256\n'), ((2594, 2602), 'hashlib.sha256', 'sha256', ([], {}), '()\n', (2600, 2602), False, 'from hashlib import sha256\n'), ((3505, 3513), 'hashlib.sha256', 'sha256', ([], {}), '()\n', (3511, 3513), False, 'from hashl...
import numpy as np import deepxde as dde from deepxde.backend import tf import variable_to_parameter_transform def sbinn(data_t, data_y, meal_t, meal_q): def get_variable(v, var): low, up = v * 0.2, v * 1.8 l = (up - low) / 2 v1 = l * tf.tanh(var) + l + low return v1 ...
[ "deepxde.backend.tf.math.tanh", "deepxde.data.PDE", "deepxde.PointSetBC", "deepxde.Variable", "deepxde.Model", "numpy.hstack", "deepxde.backend.tf.constant", "variable_to_parameter_transform.variable_file", "deepxde.geometry.TimeDomain", "deepxde.backend.tf.sigmoid", "numpy.arange", "deepxde.b...
[((5369, 5457), 'variable_to_parameter_transform.variable_file', 'variable_to_parameter_transform.variable_file', (['(10000)', '(1000)', '(1000000)', '"""variables.csv"""'], {}), "(10000, 1000, 1000000,\n 'variables.csv')\n", (5414, 5457), False, 'import variable_to_parameter_transform\n'), ((329, 346), 'deepxde.Var...
""" Session submodule """ from aiohttp import web from servicelib.session import get_session from servicelib.session import setup_session as do_setup_session def setup(app: web.Application): do_setup_session(app) # alias setup_session = setup __all__ = ( "setup_session", "get_session", )
[ "servicelib.session.setup_session" ]
[((199, 220), 'servicelib.session.setup_session', 'do_setup_session', (['app'], {}), '(app)\n', (215, 220), True, 'from servicelib.session import setup_session as do_setup_session\n')]
from django.core.exceptions import ValidationError from django.core.validators import FileExtensionValidator from django.core.files.uploadedfile import InMemoryUploadedFile from django.forms import Form, ModelForm, FileInput from django.forms.fields import * from captcha.fields import CaptchaField from .models import ...
[ "django.forms.FileInput", "django.core.validators.FileExtensionValidator", "django.core.exceptions.ValidationError", "captcha.fields.CaptchaField" ]
[((803, 825), 'captcha.fields.CaptchaField', 'CaptchaField', ([], {'label': '""""""'}), "(label='')\n", (815, 825), False, 'from captcha.fields import CaptchaField\n'), ((640, 711), 'django.core.exceptions.ValidationError', 'ValidationError', (['f"""Максимальный размер файла {limit // (1024 * 1024)}M"""'], {}), "(f'Мак...
import os import sys import pyspod import shutil from setuptools import setup from setuptools import Command # GLOBAL VARIABLES NAME = pyspod.__name__ URL = pyspod.__url__ AUTHOR = pyspod.__author__ EMAIL = pyspod.__email__ VERSION = pyspod.__version__ KEYWORDS='spectral-proper-orthogonal-decomposition spod' REQUIRED ...
[ "setuptools.setup", "os.path.join", "os.path.dirname", "sys.exit", "os.system" ]
[((2024, 2955), 'setuptools.setup', 'setup', ([], {'name': 'NAME', 'version': 'VERSION', 'description': '"""Python Spectral Proper Orthogonal Decomposition"""', 'long_description': 'DESCR', 'author': 'AUTHOR', 'author_email': 'EMAIL', 'classifiers': "['License :: OSI Approved :: MIT License',\n 'Programming Language...
import os import json import pickle import collections import numpy as np from s2and.consts import CONFIG DATA_DIR = CONFIG["main_data_dir"] OUTPUT_DIR = os.path.join(DATA_DIR, "s2and_mini") if not os.path.exists(OUTPUT_DIR): os.mkdir(OUTPUT_DIR) # excluding MEDLINE because it has no clusters DATASETS = [ "a...
[ "os.path.exists", "pickle.dump", "os.path.join", "pickle.load", "collections.Counter", "os.mkdir", "json.load", "numpy.all", "json.dump" ]
[((156, 192), 'os.path.join', 'os.path.join', (['DATA_DIR', '"""s2and_mini"""'], {}), "(DATA_DIR, 's2and_mini')\n", (168, 192), False, 'import os\n'), ((200, 226), 'os.path.exists', 'os.path.exists', (['OUTPUT_DIR'], {}), '(OUTPUT_DIR)\n', (214, 226), False, 'import os\n'), ((232, 252), 'os.mkdir', 'os.mkdir', (['OUTPU...
import matplotlib.pyplot as plt from sdaudio.callables import Circular from sdaudio.callables import Constant from sdaudio import draw from sdaudio import wavio from sdaudio.wt_oscillators import Choruses def main(): #------------------------------------------------------------------------- # sawtooth dem...
[ "sdaudio.draw.line", "sdaudio.callables.Circular", "matplotlib.pyplot.grid", "sdaudio.wavio.write", "sdaudio.wt_oscillators.Choruses", "matplotlib.pyplot.plot", "matplotlib.pyplot.figure", "sdaudio.callables.Constant", "matplotlib.pyplot.title", "matplotlib.pyplot.xlim", "matplotlib.pyplot.show"...
[((415, 441), 'sdaudio.draw.line', 'draw.line', (['sr', 'dur', '(60)', '(60)'], {}), '(sr, dur, 60, 60)\n', (424, 441), False, 'from sdaudio import draw\n'), ((503, 515), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (513, 515), True, 'import matplotlib.pyplot as plt\n'), ((520, 531), 'matplotlib.pyplot.p...
""" <NAME> <NAME> <NAME> <NAME> CISC 204 Modelling project Wed december 9th 2020 Professor Muise """ #Import from nnf import Var from nnf import Or import nnf from lib204 import Encoding from csvReader import readCSV ''' Customer class Used to create a class containing the various restrictions a person might have ...
[ "csvReader.readCSV", "nnf.Var", "lib204.Encoding" ]
[((749, 759), 'nnf.Var', 'Var', (['"""low"""'], {}), "('low')\n", (752, 759), False, 'from nnf import Var\n'), ((766, 776), 'nnf.Var', 'Var', (['"""med"""'], {}), "('med')\n", (769, 776), False, 'from nnf import Var\n'), ((784, 795), 'nnf.Var', 'Var', (['"""high"""'], {}), "('high')\n", (787, 795), False, 'from nnf imp...
import os import cv2 import numpy as np import matplotlib.pyplot as plt def Compute_Block(cell_gradient_box): k=0 hog_vector = np.zeros((bin_size*4*(cell_gradient_box.shape[0] - 1)*(cell_gradient_box.shape[1] - 1))) for i in range(cell_gradient_box.shape[0] - 1): for j in range(cell_gradient...
[ "numpy.arange", "numpy.power", "cv2.cartToPolar", "os.getcwd", "numpy.zeros", "matplotlib.pyplot.bar", "numpy.concatenate", "matplotlib.pyplot.title", "cv2.resize", "cv2.imread", "numpy.float32", "cv2.Sobel", "matplotlib.pyplot.show" ]
[((141, 238), 'numpy.zeros', 'np.zeros', (['(bin_size * 4 * (cell_gradient_box.shape[0] - 1) * (cell_gradient_box.shape\n [1] - 1))'], {}), '(bin_size * 4 * (cell_gradient_box.shape[0] - 1) * (\n cell_gradient_box.shape[1] - 1))\n', (149, 238), True, 'import numpy as np\n'), ((1537, 1555), 'numpy.zeros', 'np.zero...
import sys n = int(sys.stdin.readline().strip()) dp = [[0, 0] for _ in range(n + 1)] dp[1][1] = 1 for i in range(2, n + 1): dp[i][0] = dp[i - 1][0] + dp[i - 1][1] dp[i][1] = dp[i - 1][0] result = dp[n][0] + dp[n][1] sys.stdout.write(str(result))
[ "sys.stdin.readline" ]
[((20, 40), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (38, 40), False, 'import sys\n')]
from django.conf import settings from django.urls import path, register_converter from mapentity.registry import registry from rest_framework.routers import DefaultRouter from geotrek.common.urls import LangConverter from geotrek.feedback import models as feedback_models from .views import CategoryList, FeedbackOptio...
[ "mapentity.registry.registry.register", "rest_framework.routers.DefaultRouter", "django.urls.register_converter" ]
[((346, 387), 'django.urls.register_converter', 'register_converter', (['LangConverter', '"""lang"""'], {}), "(LangConverter, 'lang')\n", (364, 387), False, 'from django.urls import path, register_converter\n'), ((644, 679), 'rest_framework.routers.DefaultRouter', 'DefaultRouter', ([], {'trailing_slash': '(False)'}), '...
from unittest import TestCase from unittest.mock import MagicMock, patch from openslides.utils import views @patch('builtins.super') class SingleObjectMixinTest(TestCase): def test_get_object_cache(self, mock_super): """ Test that the method get_object caches his result. Tests that get_o...
[ "unittest.mock.MagicMock", "openslides.utils.views.SingleObjectMixin", "unittest.mock.patch", "openslides.utils.views.CSRFMixin.as_view" ]
[((112, 135), 'unittest.mock.patch', 'patch', (['"""builtins.super"""'], {}), "('builtins.super')\n", (117, 135), False, 'from unittest.mock import MagicMock, patch\n'), ((2141, 2164), 'unittest.mock.patch', 'patch', (['"""builtins.super"""'], {}), "('builtins.super')\n", (2146, 2164), False, 'from unittest.mock import...
from flask import Flask from wdb.ext import WdbMiddleware class Wdb(object): def __init__(self, app=None): self.app = app if app: self.init_app(self.app) def init_app(self, app): if app.config.get('WDB_ENABLED', app.debug): start_disabled = app.config.get('WD...
[ "flask.Flask.run", "wdb.ext.WdbMiddleware" ]
[((663, 699), 'flask.Flask.run', 'Flask.run', (['self.app', '*args'], {}), '(self.app, *args, **kwargs)\n', (672, 699), False, 'from flask import Flask\n'), ((430, 480), 'wdb.ext.WdbMiddleware', 'WdbMiddleware', (['app.wsgi_app', 'start_disabled', 'theme'], {}), '(app.wsgi_app, start_disabled, theme)\n', (443, 480), Fa...
import cv2 #image = cv2.imread('/home/brenda/Documentos/independent_study/clevr-dataset-gen/output/images/CLEVR_new_000000.png') #image = cv2.imread('/home/brenda/Escritorio/tmpps5tswcu.png') image = cv2.imread('/home/brenda/Escritorio/tmp47s462az.png') print("imagen: ", image.shape, " ", image.shape[0]* image.sha...
[ "cv2.imshow", "cv2.circle", "cv2.destroyAllWindows", "cv2.waitKey", "cv2.imread" ]
[((203, 256), 'cv2.imread', 'cv2.imread', (['"""/home/brenda/Escritorio/tmp47s462az.png"""'], {}), "('/home/brenda/Escritorio/tmp47s462az.png')\n", (213, 256), False, 'import cv2\n'), ((346, 389), 'cv2.circle', 'cv2.circle', (['image', '(57, 96)', '(5)', '(0, 255, 0)'], {}), '(image, (57, 96), 5, (0, 255, 0))\n', (356,...
# -*- coding: utf-8 -*- """ Simulador de credito - Monto del prestamo del carro - Ingresa tu cuota inicial - Ingresos mensuales - Numero de meses del prestamo - Datos personales ---------------------------------------- Ingresos mensuales - 908.526 - 1.000.000 >>>>>>>>>>>>>> 20.000.000 - 1.000.000 -...
[ "sys.exit" ]
[((1889, 1940), 'sys.exit', 'sys.exit', (['"""El credito es solo para mayores de edad"""'], {}), "('El credito es solo para mayores de edad')\n", (1897, 1940), False, 'import sys\n'), ((1967, 2034), 'sys.exit', 'sys.exit', (['"""El credito es solo para personas entre los 18 y 69 años"""'], {}), "('El credito es solo pa...
import g1.asyncs.agents.parts import g1.messaging.parts.subscribers from g1.apps import parameters from g1.apps import utils from g1.asyncs.bases import queues from g1.bases import labels # For now these are just aliases. from g1.messaging.parts.subscribers import make_subscriber_params from .. import subscribers # p...
[ "g1.apps.utils.define_maker", "g1.messaging.parts.subscribers.make_subscriber_params", "g1.asyncs.bases.queues.Queue", "g1.bases.labels.make_nested_labels" ]
[((635, 697), 'g1.bases.labels.make_nested_labels', 'labels.make_nested_labels', (['module_path', 'SUBSCRIBER_LABEL_NAMES'], {}), '(module_path, SUBSCRIBER_LABEL_NAMES)\n', (660, 697), False, 'from g1.bases import labels\n'), ((929, 992), 'g1.apps.utils.define_maker', 'utils.define_maker', (['make_queue', "{'return': m...
import os import tkinter as tk from telnetlib import Telnet import ctp.pdu.apc as apc class PDUPower(): def __init__(self): self.window = tk.Tk() self.pdu1 = tk.IntVar() self.pdu2 = tk.IntVar() self.pdu3 = tk.IntVar() self.pdu4 = tk.IntVar() self.p1 = t...
[ "tkinter.IntVar", "tkinter.Checkbutton", "tkinter.Button", "tkinter.Tk", "tkinter.Label", "ctp.pdu.apc.APC" ]
[((160, 167), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (165, 167), True, 'import tkinter as tk\n'), ((189, 200), 'tkinter.IntVar', 'tk.IntVar', ([], {}), '()\n', (198, 200), True, 'import tkinter as tk\n'), ((222, 233), 'tkinter.IntVar', 'tk.IntVar', ([], {}), '()\n', (231, 233), True, 'import tkinter as tk\n'), ((255,...
import re from email_validator import validate_email, EmailSyntaxError from virtool.users.utils import PERMISSIONS RE_HEX_COLOR = re.compile("^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$") def strip(value: str) -> str: """ Strip flanking whitespace from the passed string. Used to coerce values in Cerberus validators...
[ "email_validator.validate_email", "re.compile" ]
[((133, 181), 're.compile', 're.compile', (['"""^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$"""'], {}), "('^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$')\n", (143, 181), False, 'import re\n'), ((2384, 2405), 'email_validator.validate_email', 'validate_email', (['value'], {}), '(value)\n', (2398, 2405), False, 'from email_validator import ...
import numpy as np import numpy.testing as npt import noisyopt def test_minimize(): deltatol = 1e-3 ## basic testing without stochasticity def quadratic(x): return (x**2).sum() res = noisyopt.minimize(quadratic, np.asarray([0.5, 1.0]), deltatol=deltatol) npt.assert_allclose(res.x, [0.0, 0....
[ "numpy.random.normal", "numpy.testing.assert_equal", "numpy.testing.assert_approx_equal", "numpy.testing.assert_allclose", "numpy.testing.assert_raises", "numpy.asarray", "noisyopt.bisect", "numpy.array", "numpy.zeros", "numpy.testing.run_module_suite", "numpy.random.randn", "noisyopt.Averaged...
[((285, 338), 'numpy.testing.assert_allclose', 'npt.assert_allclose', (['res.x', '[0.0, 0.0]'], {'atol': 'deltatol'}), '(res.x, [0.0, 0.0], atol=deltatol)\n', (304, 338), True, 'import numpy.testing as npt\n'), ((343, 385), 'numpy.testing.assert_equal', 'npt.assert_equal', (['res.free', '[False, False]'], {}), '(res.fr...
from django.contrib import admin from .models import SimpleRedirect @admin.register(SimpleRedirect) class SimpleRedirectAdmin(admin.ModelAdmin): list_display = [ 'from_url', 'to_url', 'date_created', 'date_modified', 'date_active_start', 'date_active_end', ]
[ "django.contrib.admin.register" ]
[((72, 102), 'django.contrib.admin.register', 'admin.register', (['SimpleRedirect'], {}), '(SimpleRedirect)\n', (86, 102), False, 'from django.contrib import admin\n')]
import json import py import textwrap issues_url = "http://bitbucket.org/api/1.0/repositories/pytest-dev/pytest/issues" import requests def get_issues(): chunksize = 50 start = 0 issues = [] while 1: post_data = {"accountname": "pytest-dev", "repo_slug": "pytest", ...
[ "py.path.local", "json.dumps", "argparse.ArgumentParser", "requests.get" ]
[((799, 824), 'py.path.local', 'py.path.local', (['args.cache'], {}), '(args.cache)\n', (812, 824), False, 'import py\n'), ((1948, 1999), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""process bitbucket issues"""'], {}), "('process bitbucket issues')\n", (1971, 1999), False, 'import argparse\n'), ((441, 48...
# coding=utf-8 """Environment Loader feature tests.""" # from pypi from expects import ( be_true, expect ) from pytest_bdd import ( given, scenarios, then, when, ) # for testing from .fixtures import katamari # software under test from cse_575.data.common import Environment # Setup scenarios...
[ "pytest_bdd.then", "pytest_bdd.when", "pytest_bdd.scenarios", "pytest_bdd.given", "cse_575.data.common.Environment" ]
[((311, 362), 'pytest_bdd.scenarios', 'scenarios', (['"""../features/environment_loader.feature"""'], {}), "('../features/environment_loader.feature')\n", (320, 362), False, 'from pytest_bdd import given, scenarios, then, when\n'), ((574, 609), 'pytest_bdd.given', 'given', (['"""a built environment loader"""'], {}), "(...
import os import subprocess from pretty_print import Print_C class Runner: run_kases = 3 def __init__(self, scheme, testcases): self.scheme = scheme self.testcases = testcases self.bin_file_template = f"build/test_results/{{testcase}}/bin/{scheme}" self.myout_template = f"buil...
[ "os.path.exists", "pretty_print.Print_C.print_subheader", "pretty_print.Print_C.print_procedure", "os.makedirs" ]
[((1248, 1323), 'pretty_print.Print_C.print_procedure', 'Print_C.print_procedure', (['f"""Running {self.scheme}_{testcase} [kase: {kase}]"""'], {}), "(f'Running {self.scheme}_{testcase} [kase: {kase}]')\n", (1271, 1323), False, 'from pretty_print import Print_C\n'), ((1336, 1357), 'os.path.exists', 'os.path.exists', ([...
import json from copy import deepcopy from random import shuffle cards = ['magician', 'high priestess', 'empress', 'emperor', 'hierophant', 'lovers', 'chariot', 'justice', 'hermit', 'wheel of fortune', 'strength', 'hanged man', 'death', 'temperance', 'devil', 'tower', 'star', 'moon', 'sun', 'judgem...
[ "json.load", "random.shuffle", "json.dump", "copy.deepcopy" ]
[((13203, 13218), 'copy.deepcopy', 'deepcopy', (['cards'], {}), '(cards)\n', (13211, 13218), False, 'from copy import deepcopy\n'), ((13227, 13245), 'random.shuffle', 'shuffle', (['self.deck'], {}), '(self.deck)\n', (13234, 13245), False, 'from random import shuffle\n'), ((13367, 13393), 'json.dump', 'json.dump', (['se...
# import lib.pbcvt as pbcvt import cv2 import numpy as np import sys from time import time def distance(o1, o2): (x1,y1,w1,h1) = o1 (x2,y2,w2,h2) = o2 c1 = (x1+w1/2,y1+h1/2) c2 = (x2+w2/2,y2+h2/2) return np.hypot(c1[0]-c2[0],c1[1]-c2[1]) cv2.namedWindow("preview") cv2.namedWindow("preview2") cv2....
[ "cv2.rectangle", "numpy.sqrt", "cv2.imshow", "cv2.ellipse", "cv2.fitEllipse", "cv2.CascadeClassifier", "cv2.calcHist", "cv2.threshold", "cv2.contourArea", "numpy.hypot", "cv2.waitKey", "cv2.kmeans", "cv2.equalizeHist", "cv2.cvtColor", "time.time", "cv2.namedWindow", "cv2.imwrite", ...
[((261, 287), 'cv2.namedWindow', 'cv2.namedWindow', (['"""preview"""'], {}), "('preview')\n", (276, 287), False, 'import cv2\n'), ((288, 315), 'cv2.namedWindow', 'cv2.namedWindow', (['"""preview2"""'], {}), "('preview2')\n", (303, 315), False, 'import cv2\n'), ((316, 343), 'cv2.namedWindow', 'cv2.namedWindow', (['"""pr...
# import speech_recognition as sr # import sys # # # read filename from arguments # filename = ("C:\\Users\\utkar\\Downloads\\crowd.mp3") # # # initialize the recognizer # r = sr.Recognizer() # # # open the file # with sr.AudioFile(filename) as source: # # listen for the data (load audio to memory) # audio_data...
[ "speech_recognition.Recognizer", "speech_recognition.AudioFile" ]
[((637, 652), 'speech_recognition.Recognizer', 'sr.Recognizer', ([], {}), '()\n', (650, 652), True, 'import speech_recognition as sr\n'), ((659, 683), 'speech_recognition.AudioFile', 'sr.AudioFile', (['AUDIO_FILE'], {}), '(AUDIO_FILE)\n', (671, 683), True, 'import speech_recognition as sr\n')]
import logging import os from pathlib import Path import re from typing import Dict, List, Optional, Tuple from calvin_agent.datasets.base_dataset import BaseDataset from calvin_agent.datasets.utils.episode_utils import ( get_state_info_dict, process_actions, process_depth, process_rgb, process_sta...
[ "logging.getLogger", "re.split", "calvin_agent.datasets.utils.episode_utils.process_state", "calvin_agent.datasets.utils.episode_utils.process_depth", "calvin_agent.datasets.utils.episode_utils.process_rgb", "pathlib.Path", "calvin_agent.datasets.utils.episode_utils.get_state_info_dict", "os.scandir",...
[((368, 395), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (385, 395), False, 'import logging\n'), ((1600, 1633), 'os.scandir', 'os.scandir', (['self.abs_datasets_dir'], {}), '(self.abs_datasets_dir)\n', (1610, 1633), False, 'import os\n'), ((1795, 1826), 're.split', 're.split', (['"""\...
import urllib import json import requests from bs4 import BeautifulSoup import pandas as pd import re import string from nltk.corpus import stopwords from nltk.stem import WordNetLemmatizer from nltk.stem.porter import PorterStemmer def getpage(num): url = "https://forums.eveonline.com/c/marketplace/...
[ "re.split", "nltk.corpus.stopwords.words", "re.compile", "nltk.stem.WordNetLemmatizer", "requests.get", "bs4.BeautifulSoup", "nltk.stem.porter.PorterStemmer", "pandas.DataFrame", "re.findall", "pandas.concat" ]
[((925, 951), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (940, 951), False, 'from nltk.corpus import stopwords\n'), ((1149, 1164), 'nltk.stem.porter.PorterStemmer', 'PorterStemmer', ([], {}), '()\n', (1162, 1164), False, 'from nltk.stem.porter import PorterStemmer\n'), (...
import numpy from NeuralNetworks.Layers.activations import lambda_from_function class Dense: def __init__(self, num_nodes = 1, input_dim = None, activation = 'sigmoid'): # set number of nodes self.num_nodes = num_nodes self.input_dim = input_dim self.activation = activation ...
[ "numpy.dot", "numpy.transpose", "NeuralNetworks.Layers.activations.lambda_from_function" ]
[((424, 456), 'NeuralNetworks.Layers.activations.lambda_from_function', 'lambda_from_function', (['activation'], {}), '(activation)\n', (444, 456), False, 'from NeuralNetworks.Layers.activations import lambda_from_function\n'), ((940, 970), 'numpy.dot', 'numpy.dot', (['self.weights', 'input'], {}), '(self.weights, inpu...
import tensorflow as tf import matplotlib.pyplot as plt import argparse from keras.models import load_model from data import load_from_H5 from bnn import bnn from viz import plot_predictions parser = argparse.ArgumentParser(description='Mauna Loa runner') parser.add_argument('--trained_model', default='models/mauna_...
[ "keras.models.load_model", "data.load_from_H5", "argparse.ArgumentParser", "viz.plot_predictions", "matplotlib.pyplot.show" ]
[((203, 258), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Mauna Loa runner"""'}), "(description='Mauna Loa runner')\n", (226, 258), False, 'import argparse\n'), ((908, 940), 'data.load_from_H5', 'load_from_H5', (['test_hdf5_filepath'], {}), '(test_hdf5_filepath)\n', (920, 940), False,...
import datetime from django.db import models from django.contrib.auth.models import User from django.contrib.sessions.models import Session class Guest(models.Model): """ A temporary user. Fields: ``user`` - The temporary user. ``last_used`` - The last time we noted this user doing something...
[ "django.db.models.DateTimeField", "datetime.datetime.now", "django.db.models.ForeignKey" ]
[((459, 508), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (476, 508), False, 'from django.db import models\n'), ((524, 550), 'django.db.models.DateTimeField', 'models.DateTimeField', (['User'], {}), '(User)\n', (544, 550), False,...
import collections import datetime import logging from typing import Any, Callable, Dict, List, Optional, Tuple, Union import numpy as np import pandas as pd import scipy as sp import sklearn as sklear import core.config as cconfig import core.data_adapters as cdataa import core.dataflow.utils as cdu import core.fina...
[ "logging.getLogger", "helpers.dbg.dassert_in", "core.dataflow.nodes.sources.ReadDataFromDf", "helpers.dbg.dassert_is_not", "core.dataflow.utils.get_x_and_forward_y_fit_df", "core.dataflow.utils.merge_dataframes", "core.data_adapters.transform_to_sklearn", "core.dataflow.visitors.extract_info", "help...
[((755, 782), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (772, 782), False, 'import logging\n'), ((2076, 2100), 'core.dataflow.utils.convert_to_list', 'cdu.convert_to_list', (['col'], {}), '(col)\n', (2095, 2100), True, 'import core.dataflow.utils as cdu\n'), ((2191, 2280), 'helpers.d...
""" Programmatically building up compartment models """ # Standard Libraries import json # External Libraries import numpy as np class CompartmentModelBuilder: """ The CompartmentModelBuilder class gives helper functions for defining a new compartment model from scratch within a python script. Initialize...
[ "json.dump" ]
[((11572, 11619), 'json.dump', 'json.dump', (['self.compartments', 'outfile'], {'indent': '(6)'}), '(self.compartments, outfile, indent=6)\n', (11581, 11619), False, 'import json\n')]
# Data processing imports import scipy.io as io import numpy as np from pyDOE import lhs # Plotting imports import matplotlib.pyplot as plt from mpl_toolkits.axes_grid1 import make_axes_locatable from scipy.interpolate import griddata import matplotlib.gridspec as gridspec def load_dataset(file): data = io.loadma...
[ "numpy.reshape", "numpy.ones", "numpy.random.choice", "matplotlib.pyplot.gca", "scipy.io.loadmat", "numpy.log", "matplotlib.pyplot.figure", "matplotlib.gridspec.GridSpec", "numpy.random.randn", "numpy.vstack", "mpl_toolkits.axes_grid1.make_axes_locatable", "numpy.std", "numpy.finfo", "nump...
[((311, 327), 'scipy.io.loadmat', 'io.loadmat', (['file'], {}), '(file)\n', (321, 327), True, 'import scipy.io as io\n'), ((533, 550), 'numpy.meshgrid', 'np.meshgrid', (['x', 't'], {}), '(x, t)\n', (544, 550), True, 'import numpy as np\n'), ((787, 833), 'numpy.random.choice', 'np.random.choice', (['x.shape[0]', 'N'], {...
########################################################################## # # MRC FGU Computational Genomics Group # # $Id$ # # Copyright (C) 2009 <NAME> # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Fre...
[ "CGAT.Experiment.Stop", "CGAT.Experiment.info", "CGAT.Experiment.Start" ]
[((2115, 2130), 'CGAT.Experiment.Start', 'E.Start', (['parser'], {}), '(parser)\n', (2122, 2130), True, 'import CGAT.Experiment as E\n'), ((3703, 3802), 'CGAT.Experiment.info', 'E.info', (["('ninput=%i, noutput=%i, nskipped=%i, nerrors=%i' % (ninput, noutput,\n nskipped, nerrors))"], {}), "('ninput=%i, noutput=%i, n...
import argparse import importlib import os import sys import jsonschema import pkg_resources from multiprocessing import Pool, cpu_count from pyneval.errors.exceptions import InvalidMetricError, PyNevalError from pyneval.pyneval_io import json_io from pyneval.pyneval_io import swc_io from pyneval.metric.utils import a...
[ "pyneval.metric.utils.cli_utils.make_sure_path_not_exist", "pyneval.pyneval_io.json_io.save_json", "pkg_resources.require", "pyneval.metric.utils.config_utils.get_config_schema", "multiprocessing.cpu_count", "pyneval.errors.exceptions.PyNevalError", "sys.path.append", "pyneval.pyneval_io.json_io.read_...
[((656, 696), 'os.path.join', 'os.path.join', (['base_dir', '"""pyneval/metric"""'], {}), "(base_dir, 'pyneval/metric')\n", (668, 696), False, 'import os\n'), ((709, 732), 'os.listdir', 'os.listdir', (['metric_path'], {}), '(metric_path)\n', (719, 732), False, 'import os\n'), ((1053, 1073), 'pyneval.metric.utils.metric...
"""DB Games model migrations Revision ID: 89944f8b35b3 Revises: Create Date: 2020-11-14 03:49:03.255055 """ import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. revision = "89944f8b35b3" down_revision = None branch_labels = None depends_on = None def upgrade(): # ### comman...
[ "sqlalchemy.ForeignKeyConstraint", "sqlalchemy.DateTime", "alembic.op.drop_table", "sqlalchemy.Boolean", "sqlalchemy.PrimaryKeyConstraint", "sqlalchemy.Integer", "sqlalchemy.String" ]
[((3521, 3556), 'alembic.op.drop_table', 'op.drop_table', (['"""game_console_table"""'], {}), "('game_console_table')\n", (3534, 3556), False, 'from alembic import op\n'), ((3561, 3598), 'alembic.op.drop_table', 'op.drop_table', (['"""franchiseassociation"""'], {}), "('franchiseassociation')\n", (3574, 3598), False, 'f...
""" Functions for working with tabix dosages in pandas dataframes """ import gzip import numpy as np import pandas as pd import pysam import statsmodels.api as sm class Dosage(object): def __init__(self, dosages, annotations, gene_name): # Match up the annotation dataframe with the dosage dataframe ...
[ "numpy.asarray", "pysam.Tabixfile", "pandas.Index", "statsmodels.api.add_constant", "statsmodels.api.OLS", "pandas.concat" ]
[((1215, 1247), 'pysam.Tabixfile', 'pysam.Tabixfile', (['annotation_file'], {}), '(annotation_file)\n', (1230, 1247), False, 'import pysam\n'), ((1870, 1890), 'pandas.concat', 'pd.concat', (['comb_iter'], {}), '(comb_iter)\n', (1879, 1890), True, 'import pandas as pd\n'), ((2765, 2785), 'statsmodels.api.add_constant', ...
import logging import enum import copy import telegram.error from telegram import ( InlineKeyboardButton, InlineKeyboardMarkup, ParseMode ) from app.entities import KnowledgeStatus from app.card import Card logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level...
[ "logging.basicConfig", "copy.copy", "telegram.InlineKeyboardButton", "telegram.InlineKeyboardMarkup" ]
[((223, 330), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""', 'level': 'logging.INFO'}), "(format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)\n", (242, 330), False, 'import logging\n'), ((1022, 1050), 'copy...
# # gamefaqs-scraper # github.com/01mu # from gamefaqs_scraper import GFSBoard from gamefaqs_scraper import GFSThread board = GFSBoard() board.get_site('234547-super-smash-bros-ultimate', 0) threads = board.find() print("Pages: " + str(board.max_page) + "\n") for i in range(len(threads)): print(threads[i].title...
[ "gamefaqs_scraper.GFSBoard" ]
[((128, 138), 'gamefaqs_scraper.GFSBoard', 'GFSBoard', ([], {}), '()\n', (136, 138), False, 'from gamefaqs_scraper import GFSBoard\n')]
# Generated by Django 2.1.5 on 2019-02-27 02:17 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('batchrecords', '0002_auto_20190226_1939'), ] operations = [ migrations.RemoveField( model_name='historicalbatchrecord', name...
[ "django.db.migrations.DeleteModel", "django.db.migrations.RemoveField" ]
[((232, 309), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""historicalbatchrecord"""', 'name': '"""created_by"""'}), "(model_name='historicalbatchrecord', name='created_by')\n", (254, 309), False, 'from django.db import migrations\n'), ((354, 433), 'django.db.migrations.RemoveFie...
# -*- coding: utf-8 -*- from __future__ import unicode_literals, print_function import unittest from marbles.ie import grpc from marbles.ie.ccg import parse_ccg_derivation2 as parse_ccg_derivation from marbles.ie.drt.drs import Rel from marbles.ie.semantics.ccg import process_ccg_pt, pt_to_ccg_derivation from marbles.i...
[ "marbles.ie.ccg.parse_ccg_derivation2", "marbles.test.dprint", "marbles.ie.drt.drs.Rel", "marbles.ie.semantics.ccg.pt_to_ccg_derivation", "marbles.ie.semantics.ccg.process_ccg_pt", "marbles.ie.grpc.ccg_parse", "unittest.main", "marbles.ie.grpc.CcgParserService" ]
[((1737, 1752), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1750, 1752), False, 'import unittest\n'), ((1786, 1801), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1799, 1801), False, 'import unittest\n'), ((515, 547), 'marbles.ie.grpc.CcgParserService', 'grpc.CcgParserService', (['"""easysrl"""'], {}), ...
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a cop...
[ "logging.getLogger" ]
[((932, 959), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (949, 959), False, 'import logging\n')]
#Step 1 :- Importing dependancies and train test data generated from config import * train_data = pd.read_csv("data/train_data/train_feature.csv") test_data = pd.read_csv("data/test_data/test_feature.csv") #Step 2 :- Getting train data insights and drop unnecessary columns, Splitting data into input and target ...
[ "sklearn.model_selection.train_test_split" ]
[((935, 990), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.33)', 'random_state': '(42)'}), '(X, y, test_size=0.33, random_state=42)\n', (951, 990), False, 'from sklearn.model_selection import train_test_split\n')]
# Copyright 2018 The go-python Authors. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. # Testcases for functions in math. # # Each line takes the form: # # <testid> <function> <input_value> -> <output_value> <flags> # # where: # # <testid> ...
[ "math.isnan", "math.isinf" ]
[((2624, 2640), 'math.isinf', 'math.isinf', (['want'], {}), '(want)\n', (2634, 2640), False, 'import math\n'), ((3104, 3120), 'math.isnan', 'math.isnan', (['want'], {}), '(want)\n', (3114, 3120), False, 'import math\n'), ((3125, 3140), 'math.isnan', 'math.isnan', (['got'], {}), '(got)\n', (3135, 3140), False, 'import m...
#%% # Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by app...
[ "tensorflow.cast", "tensorflow.train.AdamOptimizer", "tensorflow.random_normal", "tensorflow.transpose", "tensorflow.placeholder", "tensorflow.split", "tensorflow.Session", "tensorflow.global_variables_initializer", "tensorflow.examples.tutorials.mnist.input_data.read_data_sets", "tensorflow.argma...
[((824, 877), 'tensorflow.examples.tutorials.mnist.input_data.read_data_sets', 'input_data.read_data_sets', (['"""/tmp/data/"""'], {'one_hot': '(True)'}), "('/tmp/data/', one_hot=True)\n", (849, 877), False, 'from tensorflow.examples.tutorials.mnist import input_data\n'), ((1187, 1236), 'tensorflow.placeholder', 'tf.pl...
#!/usr/bin/env python3 from app import app import argparse HOST = '127.0.0.1' PORT = 8080 PROJECT_NAME = 'idealtrust' if __name__ == '__main__': parser = argparse.ArgumentParser(prog=PROJECT_NAME, usage='%(prog)s [options]') parser.add_argument('--port', help='port (default: {0})'.format(PORT), default=PORT) ...
[ "app.app.run", "argparse.ArgumentParser" ]
[((160, 230), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': 'PROJECT_NAME', 'usage': '"""%(prog)s [options]"""'}), "(prog=PROJECT_NAME, usage='%(prog)s [options]')\n", (183, 230), False, 'import argparse\n'), ((445, 516), 'app.app.run', 'app.run', ([], {'host': 'argv.host', 'port': 'argv.port', 'd...
import json import os import pymongo ''' fileService.py Author: <NAME> ''' mongo_client = pymongo.MongoClient() #db = {} ''' initialize Takes a 'unique_id'entifier and sets up a database in MongoDB and ensures that that database has collections associated with the various file types that are stored. ''' def init...
[ "pymongo.MongoClient", "json.loads" ]
[((94, 115), 'pymongo.MongoClient', 'pymongo.MongoClient', ([], {}), '()\n', (113, 115), False, 'import pymongo\n'), ((5647, 5670), 'json.loads', 'json.loads', (['fileContent'], {}), '(fileContent)\n', (5657, 5670), False, 'import json\n')]
""" Architecture for SFTMD """ import functools import torch import torch.nn as nn import torch.nn.functional as F import models.archs.arch_util as arch_util import torch.nn.utils.spectral_norm as spectral_norm class SFTLayer(nn.Module): def __init__(self, nf=64, n_condition=10): super(SFTLayer, self).__i...
[ "torch.nn.functional.conv2d", "torch.nn.ReLU", "models.archs.arch_util.initialize_weights", "torch.nn.LeakyReLU", "torch.nn.PixelShuffle", "torch.nn.Dropout2d", "torch.nn.Conv2d", "torch.nn.MaxPool2d", "torch.nn.utils.spectral_norm", "functools.partial", "torch.nn.Linear", "torch.nn.AdaptiveAv...
[((423, 490), 'torch.nn.Conv2d', 'nn.Conv2d', (['(nf + n_condition)', '(32)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(nf + n_condition, 32, kernel_size=3, stride=1, padding=1)\n', (432, 490), True, 'import torch.nn as nn\n'), ((516, 569), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', 'nf'], {'kernel_...
################################################### # # # Name : Online Hash Cracker (HASH++) # # Created by : MomboteQ # # Version : 1.0 # # # ########...
[ "urllib3.PoolManager", "colorama.init" ]
[((474, 480), 'colorama.init', 'init', ([], {}), '()\n', (478, 480), False, 'from colorama import Fore, Style, init\n'), ((970, 991), 'urllib3.PoolManager', 'urllib3.PoolManager', ([], {}), '()\n', (989, 991), False, 'import urllib3\n')]
import numpy as np import scipy.sparse as sp import Orange.data from Orange.statistics import distribution, basic_stats from Orange.util import Reprable from .transformation import Transformation, Lookup __all__ = [ "ReplaceUnknowns", "Average", "DoNotImpute", "DropInstances", "Model", "AsValu...
[ "Orange.statistics.distribution.get_distribution", "numpy.ones_like", "Orange.statistics.basic_stats.BasicStats", "numpy.asarray", "numpy.any", "scipy.sparse.issparse", "numpy.sum", "numpy.array", "numpy.isnan" ]
[((833, 847), 'scipy.sparse.issparse', 'sp.issparse', (['c'], {}), '(c)\n', (844, 847), True, 'import scipy.sparse as sp\n'), ((2250, 2263), 'numpy.isnan', 'np.isnan', (['col'], {}), '(col)\n', (2258, 2263), True, 'import numpy as np\n'), ((4537, 4553), 'numpy.isnan', 'np.isnan', (['column'], {}), '(column)\n', (4545, ...
""" Verify workchain. ----------------- Indented to be used to verify a calculation, perform corrections in inputs files and restart depending on physical principles etc. E.g. issues that are outside the Calculators awereness, or not currently checked in it. This workchain does currently nothing. """ # pylint: disabl...
[ "aiida.common.extendeddicts.AttributeDict", "aiida_vasp.utils.workchains.compose_exit_code", "aiida.engine.while_", "aiida.engine.append_", "aiida_vasp.utils.workchains.prepare_process_inputs", "aiida_vasp.utils.aiida_utils.get_data_node", "aiida.plugins.WorkflowFactory", "aiida_vasp.utils.aiida_utils...
[((880, 919), 'aiida.plugins.WorkflowFactory', 'WorkflowFactory', (['_next_workchain_string'], {}), '(_next_workchain_string)\n', (895, 919), False, 'from aiida.plugins import WorkflowFactory\n'), ((2369, 2384), 'aiida.common.extendeddicts.AttributeDict', 'AttributeDict', ([], {}), '()\n', (2382, 2384), False, 'from ai...
#!/usr/bin/env python """ Cubic spline peak finder. Hazen 03/16 """ import pickle import numpy import tifffile import storm_analysis.sa_library.analysis_io as analysisIO import storm_analysis.sa_library.fitting as fitting import storm_analysis.sa_library.ia_utilities_c as utilC import storm_analysis.sa_library.matc...
[ "storm_analysis.spliner.cubic_fit_c.CSpline2DFit", "storm_analysis.sa_library.fitting.PeakFinderArbitraryPSF", "storm_analysis.spliner.cubic_fit_c.CSpline3DFitFWLS", "storm_analysis.spliner.cubic_fit_c.CSpline3DFit", "storm_analysis.sa_library.fitting.PeakFitterArbitraryPSF", "storm_analysis.sa_library.fi...
[((2356, 2431), 'storm_analysis.sa_library.fitting.PeakFinderArbitraryPSF', 'fitting.PeakFinderArbitraryPSF', ([], {'parameters': 'parameters', 'psf_object': 'spline_fn'}), '(parameters=parameters, psf_object=spline_fn)\n', (2386, 2431), True, 'import storm_analysis.sa_library.fitting as fitting\n'), ((2623, 2693), 'st...
### # A script to convert the Services-consumable feeSchedules.json # into the "typed" format used by the public pricing calculator. ### import json providers = ['nodedata', 'networkdata', 'servicedata'] typed_schedules = {} with open('hedera-node/src/main/resources/feeSchedules.json', 'r') as fin: cur_and_nex...
[ "json.load", "json.dump" ]
[((334, 348), 'json.load', 'json.load', (['fin'], {}), '(fin)\n', (343, 348), False, 'import json\n'), ((1049, 1091), 'json.dump', 'json.dump', (['typed_schedules', 'fout'], {'indent': '(2)'}), '(typed_schedules, fout, indent=2)\n', (1058, 1091), False, 'import json\n')]
import requests, os, sys, numpy, requests from plexapi.server import PlexServer from tqdm import tqdm # from howdy.core import core, return_error_raw def get_tautulli_apikey( username, password, endpoint ): """ Gets the tautulli API key with provided Tautulli_ username and password. :param str usernam...
[ "plexapi.server.PlexServer", "tqdm.tqdm", "os.path.join", "requests.get", "os.getcwd", "requests.head", "os.path.basename" ]
[((572, 607), 'os.path.join', 'os.path.join', (['endpoint', '"""api"""', '"""v2"""'], {}), "(endpoint, 'api', 'v2')\n", (584, 607), False, 'import requests, os, sys, numpy, requests\n'), ((737, 837), 'requests.get', 'requests.get', (['full_url'], {'params': "{'username': username, 'password': password, 'cmd': 'get_apik...
from __future__ import print_function import datetime import hashlib import logging from abc import ABCMeta from pynamodb.attributes import UnicodeAttribute from pynamodb.models import Model from halolib.exceptions import DbIdemError from halolib.logs import log_json from .settingsx import settingsx settings = sett...
[ "logging.getLogger", "datetime.datetime.now", "pynamodb.attributes.UnicodeAttribute", "halolib.exceptions.DbIdemError" ]
[((523, 550), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (540, 550), False, 'import logging\n'), ((1678, 1706), 'pynamodb.attributes.UnicodeAttribute', 'UnicodeAttribute', ([], {'null': '(False)'}), '(null=False)\n', (1694, 1706), False, 'from pynamodb.attributes import UnicodeAttribu...
# -*- coding: utf-8 -*- """Python lager brewed by a loguru""" import asyncio from functools import wraps from time import time from typing import Union from loguru import logger from lager.const import LOG_LEVELS __all__ = ['loglevel', 'flog', 'handlers', 'logger', 'log', 'LOG', 'ln', 'LN'] logger.t = logger.trac...
[ "asyncio.iscoroutinefunction", "functools.wraps", "loguru.logger.opt", "time.time", "asyncio.iscoroutine" ]
[((1463, 1474), 'functools.wraps', 'wraps', (['funk'], {}), '(funk)\n', (1468, 1474), False, 'from functools import wraps\n'), ((2189, 2200), 'functools.wraps', 'wraps', (['funk'], {}), '(funk)\n', (2194, 2200), False, 'from functools import wraps\n'), ((1543, 1562), 'loguru.logger.opt', 'logger.opt', ([], {'depth': '(...
# -*- coding: utf-8 -*- """ @date Created on Wed Jan 13 17:45:15 2016 @copyright (C) 2015-2016 EOMYS ENGINEERING. @author pierre_b """ from os.path import join from unittest import TestCase import matplotlib.pyplot as plt from numpy import pi from pyleecan.Classes.Frame import Frame from pyleecan.Classes.LamHole imp...
[ "pyleecan.Classes.HoleM54.HoleM54", "matplotlib.pyplot.gcf", "os.path.join", "pyleecan.Classes.LamHole.LamHole", "matplotlib.pyplot.close", "pyleecan.Classes.Machine.Machine" ]
[((829, 845), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (838, 845), True, 'import matplotlib.pyplot as plt\n'), ((865, 874), 'pyleecan.Classes.Machine.Machine', 'Machine', ([], {}), '()\n', (872, 874), False, 'from pyleecan.Classes.Machine import Machine\n'), ((900, 970), 'pyleecan.Class...
# Copyright (c) 2017 RedHat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required...
[ "mock.Mock", "kuryr_kubernetes.tests.unit.kuryr_fixtures.MockNeutronClient" ]
[((1047, 1066), 'mock.Mock', 'mock.Mock', ([], {'spec': 'cls'}), '(spec=cls)\n', (1056, 1066), False, 'import mock\n'), ((1298, 1317), 'mock.Mock', 'mock.Mock', ([], {'spec': 'cls'}), '(spec=cls)\n', (1307, 1317), False, 'import mock\n'), ((1550, 1569), 'mock.Mock', 'mock.Mock', ([], {'spec': 'cls'}), '(spec=cls)\n', (...
import asyncio async def f1(): print("f1") return "f1" async def f2(): result = await f1() print(result) return "f2" loop = asyncio.get_event_loop() try: result = loop.run_until_complete(f2()) print(result) finally: print("exit")
[ "asyncio.get_event_loop" ]
[((148, 172), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (170, 172), False, 'import asyncio\n')]
# Copyright 2020-2021 OpenDR European Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agree...
[ "os.path.exists", "pybindings.LinearPlanner" ]
[((2308, 2576), 'pybindings.LinearPlanner', 'LinearPlanner', (['self.gripper_goal_wrist', 'robot_obs.gripper_tf', '[0, 0, 0, 0, 0, 0, 1]', 'robot_obs.base_tf', 'success_thres_dist', 'success_thres_rot', 'MIN_PLANNER_VELOCITY', 'MAX_PLANNER_VELOCITY', 'slow_down_factor', 'self._head_start', 'TIME_STEP_TRAIN', 'is_analyt...
#!/usr/bin/python3 # -*- coding: utf-8 -*- """ ====================== Laplacian segmentation ====================== This notebook implements the laplacian segmentation method of `McFee and Ellis, 2014 <http://bmcfee.github.io/papers/ismir2014_spectral.pdf>`_, with a couple of minor stability improvements. This impleme...
[ "librosa.feature.spectral_flatness", "librosa.util.fix_frames", "librosa.feature.mfcc", "librosa.estimate_tuning", "numpy.array", "numpy.cumsum", "sys.exit", "librosa.onset.onset_backtrack", "librosa.feature.spectral_centroid", "librosa.feature.spectral_contrast", "numpy.arange", "librosa.load...
[((1598, 1686), 'warnings.filterwarnings', 'warnings.filterwarnings', ([], {'action': '"""ignore"""', 'module': '"""scipy"""', 'message': '"""^internal gelsd"""'}), "(action='ignore', module='scipy', message=\n '^internal gelsd')\n", (1621, 1686), False, 'import warnings\n'), ((1963, 2000), 'matplotlib.pyplot.rcPara...
import subprocess import pg8000 from agnostic import AbstractBackend class PostgresBackend(AbstractBackend): ''' Support for PostgreSQL. ''' def backup_db(self, backup_file): ''' Return a ``Popen`` instance that will backup the database to the ``backup_file`` handle. ''' ...
[ "subprocess.Popen", "pg8000.connect" ]
[((762, 840), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'env': 'env', 'stdout': 'backup_file', 'stderr': 'subprocess.PIPE'}), '(command, env=env, stdout=backup_file, stderr=subprocess.PIPE)\n', (778, 840), False, 'import subprocess\n'), ((2755, 2785), 'pg8000.connect', 'pg8000.connect', ([], {}), '(**conne...
import numpy as np import pandas as pd import matplotlib.pyplot as plt import common_fn as cf import seaborn as sns plt.rcParams["svg.hashsalt"]=0 pre_path='EnvEq/All3/' parm_format='{:.2e}' parm_name='therapy_abi-Tneg_initratio-Totcell' parm_name_array=['Tneg_initratio','Totcell'] post_path1='o2-Null_test-HE/' parm_n...
[ "common_fn.mkdirs", "common_fn.eq_values", "common_fn.timeseries", "numpy.array", "numpy.append", "numpy.empty", "numpy.logspace" ]
[((350, 400), 'common_fn.mkdirs', 'cf.mkdirs', ([], {'pre_path': 'pre_path', 'parm_name': 'parm_name1'}), '(pre_path=pre_path, parm_name=parm_name1)\n', (359, 400), True, 'import common_fn as cf\n'), ((429, 451), 'numpy.logspace', 'np.logspace', (['(-1)', '(-3)', '(5)'], {}), '(-1, -3, 5)\n', (440, 451), True, 'import ...
#!/usr/bin/env python # -*- coding: utf-8 -* """ tools module """ __author__ = 'Dr. <NAME>, University of Bristol, UK' __maintainer__ = 'Dr. <NAME>' __email__ = '<EMAIL>' __status__ = 'Development' import sys import os import copy import numpy as np try: import opt_einsum as oe OE_AVAILABLE = True except Imp...
[ "opt_einsum.contract", "numpy.abs", "numpy.asarray", "os.environ.get", "numpy.count_nonzero", "numpy.array", "numpy.sum", "os.path.dirname", "numpy.einsum", "numpy.nonzero", "numpy.linalg.eigh", "pyscf.symm.label_orb_symm", "numpy.zeros_like" ]
[((5197, 5220), 'numpy.linalg.eigh', 'np.linalg.eigh', (['rdm1_mo'], {}), '(rdm1_mo)\n', (5211, 5220), True, 'import numpy as np\n'), ((4795, 4822), 'numpy.asarray', 'np.asarray', (['((mo_coeff,) * 2)'], {}), '((mo_coeff,) * 2)\n', (4805, 4822), True, 'import numpy as np\n'), ((6408, 6431), 'numpy.zeros_like', 'np.zero...
import numpy as np from ._base import FilterAlgorithmBase class WhiteTophat(FilterAlgorithmBase): """ Performs "white top hat" filtering of an image to enhance spots. "White top hat filtering" finds spots that are both smaller and brighter than their surroundings. See Also -------- https://e...
[ "numpy.minimum", "skimage.morphology.disk", "scipy.ndimage.filters.maximum_filter", "scipy.ndimage.filters.minimum_filter" ]
[((1454, 1474), 'skimage.morphology.disk', 'disk', (['self.disk_size'], {}), '(self.disk_size)\n', (1458, 1474), False, 'from skimage.morphology import disk\n'), ((1502, 1554), 'scipy.ndimage.filters.minimum_filter', 'minimum_filter', (['image'], {'footprint': 'structuring_element'}), '(image, footprint=structuring_ele...
import datetime import cv2 import numpy as np from artsci2019.lib.frame_checker import FrameChecker from artsci2019.lib.util import scale_frame, scale_point, is_in_frame from artsci2019.lib.face_recog import get_faces from artsci2019.lib.sound import SoundPlayer def draw_checked_frame(frame, checked_frame, factor): ...
[ "artsci2019.lib.util.scale_point", "cv2.transpose", "artsci2019.lib.sound.SoundPlayer", "cv2.imshow", "datetime.timedelta", "artsci2019.lib.frame_checker.FrameChecker", "numpy.reshape", "cv2.line", "cv2.addWeighted", "cv2.waitKey", "cv2.Subdiv2D", "cv2.namedWindow", "artsci2019.lib.util.is_i...
[((1334, 1352), 'cv2.Subdiv2D', 'cv2.Subdiv2D', (['rect'], {}), '(rect)\n', (1346, 1352), False, 'import cv2\n'), ((470, 513), 'artsci2019.lib.util.scale_point', 'scale_point', (['checked_frame.left_eye', 'factor'], {}), '(checked_frame.left_eye, factor)\n', (481, 513), False, 'from artsci2019.lib.util import scale_fra...
from unittest.mock import patch, MagicMock, call import json from datetime import datetime from copy import deepcopy import pytest from PIL import Image from sm.engine import DB, ESExporter, QueuePublisher from sm.engine.dataset_manager import SMapiDatasetManager, SMDaemonDatasetManager from sm.engine.dataset_manager ...
[ "sm.engine.dataset_manager.Dataset", "sm.engine.DB", "sm.engine.dataset_manager.SMapiDatasetManager", "unittest.mock.MagicMock", "PIL.Image.new", "unittest.mock.call", "json.dumps", "datetime.datetime.now", "pytest.fixture", "unittest.mock.patch", "sm.engine.dataset_manager.SMDaemonDatasetManage...
[((630, 646), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (644, 646), False, 'import pytest\n'), ((791, 810), 'sm.engine.DB', 'DB', (["sm_config['db']"], {}), "(sm_config['db'])\n", (793, 810), False, 'from sm.engine import DB, ESExporter, QueuePublisher\n'), ((2897, 3052), 'sm.engine.dataset_manager.Dataset'...
import gym from dqn_tf import DeepQNetwork, Agent import numpy as np from gym import wrappers def preprocess(observation): return np.mean(observation[30:, :], axis=2).reshape(180, 160, 1) def stack_frames(stacked_frames, frame, buffer_size): if stacked_frames is None: stacked_frames = np.zeros((buff...
[ "numpy.mean", "numpy.random.choice", "numpy.zeros", "dqn_tf.Agent", "gym.make" ]
[((717, 740), 'gym.make', 'gym.make', (['"""Breakout-v0"""'], {}), "('Breakout-v0')\n", (725, 740), False, 'import gym\n'), ((781, 899), 'dqn_tf.Agent', 'Agent', ([], {'gamma': '(0.99)', 'epsilon': '(1.0)', 'alpha': '(0.00025)', 'input_dims': '(180, 160, 4)', 'n_actions': '(3)', 'mem_size': '(3000)', 'batch_size': '(32...
from bs4 import BeautifulSoup import requests import csv import sys from urllib.error import HTTPError sys.path.append("..") import mytemp import time import json url='https://gz.17zwd.com/api/shop/get-list/73' resp=requests.get(url) f=open('17wang.txt','w+',encoding='utf-8') f.write(resp.text) print(res...
[ "sys.path.append", "requests.get" ]
[((108, 129), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (123, 129), False, 'import sys\n'), ((228, 245), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (240, 245), False, 'import requests\n')]
#!/usr/bin/env python """ Example model with strong correlations between the fitted parameters. We use a*x = y + N(0,1) made complicated by defining a=p1+p2. The expected distribution for p1 and p2 will be uniform, with p2 = a-p1 in each sample. Because this distribution is inherently unbounded, artificial bounds a...
[ "bumps.wsolve.wpolyfit" ]
[((1471, 1511), 'bumps.wsolve.wpolyfit', 'wpolyfit', (['x', 'data'], {'degree': '(1)', 'origin': '(True)'}), '(x, data, degree=1, origin=True)\n', (1479, 1511), False, 'from bumps.wsolve import wpolyfit\n')]
"""Data analyzation metrics Each algorithm works on a set of handwritings. They have to be applied like this: >>> import hwrt.data_analyzation_metrics >>> from hwrt.handwritten_data import HandwrittenData >>> data_json = '[[{"time": 123, "x": 45, "y": 67}]]' >>> a = [{'is_in_testset': 0, ... 'formula_id': "31L", ....
[ "logging.getLogger", "os.path.exists", "numpy.mean", "os.makedirs", "numpy.average", "os.path.join", "collections.defaultdict", "numpy.std", "math.hypot", "time.time" ]
[((1294, 1321), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1311, 1321), False, 'import logging\n'), ((1918, 1951), 'os.path.join', 'os.path.join', (['directory', 'filename'], {}), '(directory, filename)\n', (1930, 1951), False, 'import os\n'), ((3211, 3228), 'collections.defaultdict'...
import conftest # Add root path to sys.path import os import matplotlib.pyplot as plt from PathPlanning.SpiralSpanningTreeCPP \ import spiral_spanning_tree_coverage_path_planner spiral_spanning_tree_coverage_path_planner.do_animation = True def spiral_stc_cpp(img, start): num_free = 0 for i in range(img...
[ "os.path.join", "PathPlanning.SpiralSpanningTreeCPP.spiral_spanning_tree_coverage_path_planner.SpiralSpanningTreeCoveragePlanner", "conftest.run_this_test", "os.path.abspath" ]
[((423, 509), 'PathPlanning.SpiralSpanningTreeCPP.spiral_spanning_tree_coverage_path_planner.SpiralSpanningTreeCoveragePlanner', 'spiral_spanning_tree_coverage_path_planner.SpiralSpanningTreeCoveragePlanner', (['img'], {}), '(\n img)\n', (499, 509), False, 'from PathPlanning.SpiralSpanningTreeCPP import spiral_spann...
from __future__ import print_function import numpy as np from kernel_tuner import run_kernel from .context import skip_if_no_cuda_device, create_plot from km3net.util import get_kernel_path, generate_correlations_table def test_degrees_kernel(): skip_if_no_cuda_device() def in_degrees(correlations): ...
[ "km3net.util.generate_correlations_table", "km3net.util.get_kernel_path", "numpy.int32", "numpy.sum", "numpy.zeros", "kernel_tuner.run_kernel" ]
[((754, 767), 'numpy.int32', 'np.int32', (['(400)'], {}), '(400)\n', (762, 767), True, 'import numpy as np\n'), ((795, 808), 'numpy.int32', 'np.int32', (['(150)'], {}), '(150)\n', (803, 808), True, 'import numpy as np\n'), ((924, 989), 'km3net.util.generate_correlations_table', 'generate_correlations_table', (['N', 'sl...
import numpy as np import torch import torch.nn.functional as F from scipy.sparse import coo_matrix from sklearn.preprocessing import StandardScaler from torch.utils.data import Dataset from torch_geometric.data import InMemoryDataset, Data, Batch from tqdm.auto import tqdm from utils.data_utils import window_data_sor...
[ "numpy.clip", "torch.as_tensor", "torch.load", "torch.stack", "torch.sqrt", "torch.max", "sklearn.preprocessing.StandardScaler", "numpy.array", "torch_geometric.data.Batch.from_data_list", "torch.save", "tqdm.auto.tqdm", "numpy.percentile", "utils.data_utils.add_age_gender", "torch.cat" ]
[((622, 657), 'torch.load', 'torch.load', (['self.processed_paths[0]'], {}), '(self.processed_paths[0])\n', (632, 657), False, 'import torch\n'), ((1016, 1045), 'torch.load', 'torch.load', (['self.raw_paths[0]'], {}), '(self.raw_paths[0])\n', (1026, 1045), False, 'import torch\n'), ((1611, 1652), 'numpy.clip', 'np.clip...
#! /usr/bin/env python # -*- coding: utf8 -*- import numpy as np import matplotlib.pyplot as plt from scipy.integrate import odeint # use Runge-Kutta 4 def pend(y, t, b, c): # function definition """Gives 2D vector dy/dt as function of y and t, with parameters b and c.""" return np.array([y[1], -b*y[1] - c...
[ "matplotlib.pyplot.grid", "matplotlib.pyplot.savefig", "scipy.integrate.odeint", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "numpy.array", "numpy.linspace", "numpy.sin", "matplotlib.pyplot.legend", "matplotlib.pyplot.show" ]
[((380, 408), 'numpy.array', 'np.array', (['[np.pi - 0.1, 0.0]'], {}), '([np.pi - 0.1, 0.0])\n', (388, 408), True, 'import numpy as np\n'), ((413, 436), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(101)'], {}), '(0, 10, 101)\n', (424, 436), True, 'import numpy as np\n'), ((473, 505), 'scipy.integrate.odeint', 'o...
from PySide2.QtWidgets import QWidget from SciDataTool.GUI.WVectorSelector.Ui_WVectorSelector import Ui_WVectorSelector from PySide2.QtCore import Signal from PySide2.QtGui import QStandardItem COMP_DICT = { "radial": "radial", "circumferential": "tangential", "axial": "axial", "x-axis component": "co...
[ "PySide2.QtWidgets.QWidget.__init__", "PySide2.QtGui.QStandardItem", "PySide2.QtCore.Signal" ]
[((731, 739), 'PySide2.QtCore.Signal', 'Signal', ([], {}), '()\n', (737, 739), False, 'from PySide2.QtCore import Signal\n'), ((1075, 1112), 'PySide2.QtWidgets.QWidget.__init__', 'QWidget.__init__', (['self'], {'parent': 'parent'}), '(self, parent=parent)\n', (1091, 1112), False, 'from PySide2.QtWidgets import QWidget\...
#!/usr/bin/env python3 """Defines a status route for the HolbertonBnB API.""" from flask import jsonify from flasgger import swag_from from models import storage from api.v1.views import app_views @app_views.route("/status") @swag_from("../apidocs/status/status.yml") def status(): """Returns the server status. ...
[ "models.storage.count", "flasgger.swag_from", "api.v1.views.app_views.route", "flask.jsonify" ]
[((200, 226), 'api.v1.views.app_views.route', 'app_views.route', (['"""/status"""'], {}), "('/status')\n", (215, 226), False, 'from api.v1.views import app_views\n'), ((228, 269), 'flasgger.swag_from', 'swag_from', (['"""../apidocs/status/status.yml"""'], {}), "('../apidocs/status/status.yml')\n", (237, 269), False, 'f...
import inspect class Queue(object): ''' Queue data structure FIFO - First In First Out ''' def __init__(self, capacity = 10): ''' :param size: max capacity of the queue, default is 10 ''' self.queue = [] self.front = None self.rear = None self.s...
[ "inspect.getsource" ]
[((1659, 1683), 'inspect.getsource', 'inspect.getsource', (['Queue'], {}), '(Queue)\n', (1676, 1683), False, 'import inspect\n'), ((3428, 3452), 'inspect.getsource', 'inspect.getsource', (['Deque'], {}), '(Deque)\n', (3445, 3452), False, 'import inspect\n')]
# Copyright 2021 (David) <NAME>. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law ...
[ "os.path.abspath", "os.path.dirname", "os.path.join" ]
[((888, 909), 'os.path.abspath', 'os.path.abspath', (['root'], {}), '(root)\n', (903, 909), False, 'import os\n'), ((846, 871), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (861, 871), False, 'import os\n'), ((1124, 1157), 'os.path.join', 'os.path.join', (['self._prefix', 'value'], {}), '(s...
# Time: O(nlogk) # Space: O(k) import heapq class KthLargest(object): def __init__(self, k, nums): """ :type k: int :type nums: List[int] """ self.__k = k self.__min_heap = [] for n in nums: self.add(n) def add(self, val): """ ...
[ "heapq.heappush", "heapq.heappop" ]
[((379, 415), 'heapq.heappush', 'heapq.heappush', (['self.__min_heap', 'val'], {}), '(self.__min_heap, val)\n', (393, 415), False, 'import heapq\n'), ((472, 502), 'heapq.heappop', 'heapq.heappop', (['self.__min_heap'], {}), '(self.__min_heap)\n', (485, 502), False, 'import heapq\n')]
import os import platform from configparser import ExtendedInterpolation from pathlib import Path from subprocess import run from sys import exit from typing import List from blulib.config_parser import ConfigParser from tealprint import TealPrint from tealprint.teallevel import TealLevel from youtube_series_downloade...
[ "tealprint.TealPrint.info", "pathlib.Path.home", "subprocess.run", "platform.system", "youtube_series_downloader.core.channel.Channel", "tealprint.TealPrint.warning", "sys.exit", "youtube_series_downloader.config.General", "configparser.ExtendedInterpolation" ]
[((1482, 1491), 'youtube_series_downloader.config.General', 'General', ([], {}), '()\n', (1489, 1491), False, 'from youtube_series_downloader.config import General, config\n'), ((699, 770), 'tealprint.TealPrint.info', 'TealPrint.info', (['f"""Could not find any configuration file in {self.path}"""'], {}), "(f'Could not...
""" @author: <NAME> (University of Sydney) ------------------------------------------------------------------------- AMICAL: Aperture Masking Interferometry Calibration and Analysis Library ------------------------------------------------------------------------- Function related to data cleaning (ghost, background c...
[ "matplotlib.pyplot.grid", "numpy.sqrt", "matplotlib.pyplot.ylabel", "numpy.array", "astropy.io.fits.open", "numpy.sin", "numpy.arange", "numpy.mean", "numpy.where", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "amical.tools.apply_windowing", "numpy.fft.fft2", "numpy.max", "numpy...
[((2288, 2312), 'numpy.array', 'np.array', (['cube_corrected'], {}), '(cube_corrected)\n', (2296, 2312), True, 'import numpy as np\n'), ((3422, 3438), 'numpy.array', 'np.array', (['fluxes'], {}), '(fluxes)\n', (3430, 3438), True, 'import numpy as np\n'), ((3455, 3474), 'numpy.array', 'np.array', (['flag_fram'], {}), '(...
from requests import get import time import gi ip_starting = "" recon_command = "" rate = 60 def main(): print("Example: nordvpn disconnect && nordvpn connect") recon_command = input("Enter the command used to reconnect to VPN: ") ip_starting = get('https://api.ipify.org').text print("Starting ip:", ip_startin...
[ "subprocess.run", "time.sleep", "requests.get", "os._exit", "sys.exit" ]
[((253, 281), 'requests.get', 'get', (['"""https://api.ipify.org"""'], {}), "('https://api.ipify.org')\n", (256, 281), False, 'from requests import get\n'), ((393, 409), 'time.sleep', 'time.sleep', (['rate'], {}), '(rate)\n', (403, 409), False, 'import time\n'), ((525, 588), 'subprocess.run', 'subprocess.run', (['"""no...
import sys import torch from torch import nn from torch.autograd import Variable from view import * from holder import * from util import * from join_table import * from trilinear_prod import * from fusion import * # fused bidir attention class FusedBiAttention(torch.nn.Module): def __init__(self, opt, shared): su...
[ "torch.nn.Softmax" ]
[((602, 615), 'torch.nn.Softmax', 'nn.Softmax', (['(2)'], {}), '(2)\n', (612, 615), False, 'from torch import nn\n')]
import random import string def generate() -> str: """ direct reimpl of secretID.js from account.neosvr.com """ length = 12 valid_chars = string.ascii_letters + string.digits return "".join(random.choices(valid_chars, k=length))
[ "random.choices" ]
[((216, 253), 'random.choices', 'random.choices', (['valid_chars'], {'k': 'length'}), '(valid_chars, k=length)\n', (230, 253), False, 'import random\n')]
# Simplified Bres Maker # Version: 1.0 #Python Version: 2.0 # IMPORTS import pandas as pd import numpy as np from sklearn.cluster import KMeans from numpy import asarray from numpy import savetxt import sys import os # DEFINITIONS def find(s, ch): return [i for i, ltr in enumerate(s) if ltr == ch] # DATALOAD...
[ "sklearn.cluster.KMeans", "numpy.insert", "numpy.column_stack", "pandas.read_csv" ]
[((616, 759), 'pandas.read_csv', 'pd.read_csv', (['ranking'], {'usecols': "['A', 'R', 'N', 'D', 'C', 'Q', 'E', 'G', 'H', 'I', 'L', 'K', 'M', 'F', 'P',\n 'S', 'T', 'W', 'Y', 'V']", 'sep': '""","""'}), "(ranking, usecols=['A', 'R', 'N', 'D', 'C', 'Q', 'E', 'G', 'H',\n 'I', 'L', 'K', 'M', 'F', 'P', 'S', 'T', 'W', 'Y...
import requests """ Delete a project version. If there are no more versions available for a given project, that project will be deleted too. """ def delete_version(server, project, version): url = "http://{}/delversion.json".format(server) data = { "project": project, "version": version } with requests.Sess...
[ "requests.Session" ]
[((307, 325), 'requests.Session', 'requests.Session', ([], {}), '()\n', (323, 325), False, 'import requests\n')]
import utils import os import json def getjsondata(path): if not os.path.isabs(path): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), path) f = open(path) data = json.loads(f.read()) return data def getconfig(): return getjsondata('./conf.json')
[ "os.path.realpath", "os.path.isabs" ]
[((70, 89), 'os.path.isabs', 'os.path.isabs', (['path'], {}), '(path)\n', (83, 89), False, 'import os\n'), ((135, 161), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (151, 161), False, 'import os\n')]
import cv2 import time import logging class Sentry: #__face_cascade = cv2.CascadeClassifier('haarcascades/haarcascade_frontalface_default.xml') __face_cascade = cv2.CascadeClassifier('haarcascades/haarcascade_frontalface_alt.xml') #__face_cascade = cv2.CascadeClassifier('haarcascades/haarcascade_upperbody.xml') ...
[ "cv2.rectangle", "cv2.CascadeClassifier", "time.perf_counter", "cv2.cvtColor" ]
[((166, 235), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascades/haarcascade_frontalface_alt.xml"""'], {}), "('haarcascades/haarcascade_frontalface_alt.xml')\n", (187, 235), False, 'import cv2\n'), ((338, 395), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascades/haarcascade_eye.xml"""...
import numpy as np import pandas as pd from sklearn.model_selection import train_test_split from sklearn.metrics import accuracy_score, recall_score, precision_score, f1_score import torch from transformers import TrainingArguments, Trainer from transformers import BertTokenizer, BertForSequenceClassification from tran...
[ "sklearn.metrics.f1_score", "pandas.read_csv", "transformers.TrainingArguments", "sklearn.model_selection.train_test_split", "argparse.ArgumentParser", "transformers.BertTokenizer.from_pretrained", "numpy.argmax", "sklearn.metrics.precision_score", "sklearn.metrics.recall_score", "torch.tensor", ...
[((1036, 1059), 'numpy.argmax', 'np.argmax', (['pred'], {'axis': '(1)'}), '(pred, axis=1)\n', (1045, 1059), True, 'import numpy as np\n'), ((1076, 1118), 'sklearn.metrics.accuracy_score', 'accuracy_score', ([], {'y_true': 'labels', 'y_pred': 'pred'}), '(y_true=labels, y_pred=pred)\n', (1090, 1118), False, 'from sklearn...
# ============================================================================== # Copyright 2019 - <NAME> # # NOTICE: Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, ...
[ "gym.envs.registration.register", "warnings.filterwarnings" ]
[((1091, 1169), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'message': '"""Parameters to load are deprecated"""'}), "('ignore', message='Parameters to load are deprecated')\n", (1114, 1169), False, 'import warnings\n'), ((1171, 1276), 'gym.envs.registration.register', 'register', ([], {'id...
from bs4 import BeautifulSoup import time from kik_unofficial.datatypes.xmpp.base_elements import XMPPElement, XMPPResponse class Struct: def __init__(self, **entries): self.__dict__.update(entries) class OutgoingAcknowledgement(XMPPElement): """ Represents an outgoing acknowledgement ...
[ "time.time" ]
[((647, 658), 'time.time', 'time.time', ([], {}), '()\n', (656, 658), False, 'import time\n'), ((1906, 1917), 'time.time', 'time.time', ([], {}), '()\n', (1915, 1917), False, 'import time\n')]
import responses from cloudscale import ( CLOUDSCALE_API_URL, Cloudscale, CloudscaleApiException, CloudscaleException, ) FLAVOR_RESP = { "slug": "flex-2", "name": "Flex-2", "vcpu_count": 1, "memory_gb": 2, "zones": [{"slug": "rma1"}, {"slug": "lpg1"}], } @responses.activate def te...
[ "responses.add", "cloudscale.Cloudscale" ]
[((345, 443), 'responses.add', 'responses.add', (['responses.GET', "(CLOUDSCALE_API_URL + '/flavors')"], {'json': '[FLAVOR_RESP]', 'status': '(200)'}), "(responses.GET, CLOUDSCALE_API_URL + '/flavors', json=[\n FLAVOR_RESP], status=200)\n", (358, 443), False, 'import responses\n'), ((457, 555), 'responses.add', 'res...