code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
import os def get_invoice_files(invoices, year=False): for invoice in invoices: if invoice.invoice_file: # Get folder for this invoice and create it if it doesn't exist if not invoice.afa: folder = invoice.invoice_type.name else: folder =...
[ "os.path.exists", "os.path.join", "os.mkdir" ]
[((650, 684), 'os.path.join', 'os.path.join', (['folder', 'invoice_name'], {}), '(folder, invoice_name)\n', (662, 684), False, 'import os\n'), ((346, 368), 'os.path.exists', 'os.path.exists', (['folder'], {}), '(folder)\n', (360, 368), False, 'import os\n'), ((386, 402), 'os.mkdir', 'os.mkdir', (['folder'], {}), '(fold...
"""Demonstrates partial run when some input data not there. """ from remake import Remake, TaskRule ex8 = Remake() class CannotRun(TaskRule): rule_inputs = {'in1': 'data/inputs/input_not_there.txt'} rule_outputs = {'out': 'data/inputs/ex8_in1.txt'} def rule_run(self): input_text = self.inputs['i...
[ "remake.Remake" ]
[((107, 115), 'remake.Remake', 'Remake', ([], {}), '()\n', (113, 115), False, 'from remake import Remake, TaskRule\n')]
#Image Stego using LSB import cv2 def encode(input_image_name, output_image_name, file_name): input_image = cv2.imread(input_image_name) height, width, nbchannels = input_image.shape size = width*height current_width = 0 current_height = 0 current_channel = 0 maskonevalues = [1, 2, 4, 8, ...
[ "cv2.imwrite", "cv2.imread" ]
[((113, 141), 'cv2.imread', 'cv2.imread', (['input_image_name'], {}), '(input_image_name)\n', (123, 141), False, 'import cv2\n'), ((3410, 3440), 'cv2.imread', 'cv2.imread', (['encoded_image_name'], {}), '(encoded_image_name)\n', (3420, 3440), False, 'import cv2\n'), ((3291, 3334), 'cv2.imwrite', 'cv2.imwrite', (['outpu...
import sys from unittest import mock flash = bytearray(8 * 1024 * 1024) def read_data(addr, amount): return flash[addr : addr + amount] def write_data(addr, data): flash[addr : addr + len(data)] = data if "flash" not in sys.modules: sys.modules["flash"] = mock.MagicMock(read=read_data, write=write_da...
[ "unittest.mock.MagicMock" ]
[((275, 323), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'read': 'read_data', 'write': 'write_data'}), '(read=read_data, write=write_data)\n', (289, 323), False, 'from unittest import mock\n')]
import tensorflow as tf from base.base_model import BaseModel from utils.alad_utils import get_getter import utils.alad_utils as sn class SENCEBGAN(BaseModel): def __init__(self, config): super(SENCEBGAN, self).__init__(config) self.build_model() self.init_saver() def build_model(sel...
[ "tensorflow.shape", "tensorflow.get_variable", "tensorflow.layers.Conv2D", "tensorflow.tanh", "tensorflow.reduce_sum", "tensorflow.group", "tensorflow.control_dependencies", "tensorflow.ones_like", "tensorflow.reduce_mean", "tensorflow.summary.image", "tensorflow.layers.Flatten", "tensorflow.l...
[((983, 1006), 'tensorflow.placeholder', 'tf.placeholder', (['tf.bool'], {}), '(tf.bool)\n', (997, 1006), True, 'import tensorflow as tf\n'), ((1038, 1061), 'tensorflow.placeholder', 'tf.placeholder', (['tf.bool'], {}), '(tf.bool)\n', (1052, 1061), True, 'import tensorflow as tf\n'), ((1095, 1118), 'tensorflow.placehol...
import chainer import numpy as np from test.util import generate_kernel_test_case, wrap_template from webdnn.graph.placeholder import Placeholder from webdnn.frontend.chainer.converter import ChainerConverter from webdnn.frontend.chainer.placeholder_variable import PlaceholderVariable @wrap_template def template(n=2...
[ "numpy.random.rand", "test.util.generate_kernel_test_case", "webdnn.graph.placeholder.Placeholder", "chainer.links.Convolution2D", "webdnn.frontend.chainer.converter.ChainerConverter", "webdnn.frontend.chainer.placeholder_variable.PlaceholderVariable" ]
[((423, 520), 'chainer.links.Convolution2D', 'chainer.links.Convolution2D', (['c_in', 'c_out'], {'ksize': 'ksize', 'stride': 'stride', 'pad': 'pad', 'nobias': 'nobias'}), '(c_in, c_out, ksize=ksize, stride=stride, pad=\n pad, nobias=nobias)\n', (450, 520), False, 'import chainer\n'), ((723, 881), 'test.util.generate...
#!/usr/bin/env python3 """ Hexdump Utility =============== A command line hexdump utility. See the module's `Github homepage <https://github.com/risapav/ihex_analyzer>`_ for details. """ # pouzite kniznice import struct import codecs # definovanie konstant ROWTYPE_DATA = 0x00 # Data container ROWTYPE_EOF = 0x01 # E...
[ "struct.unpack", "codecs.decode" ]
[((3255, 3281), 'codecs.decode', 'codecs.decode', (['data', '"""hex"""'], {}), "(data, 'hex')\n", (3268, 3281), False, 'import codecs\n'), ((3481, 3507), 'codecs.decode', 'codecs.decode', (['data', '"""hex"""'], {}), "(data, 'hex')\n", (3494, 3507), False, 'import codecs\n'), ((3710, 3736), 'codecs.decode', 'codecs.dec...
# -*- coding: utf-8 -*- """Console script for pyalmondplus.""" import sys import time import click import pyalmondplus.api import threading import asyncio def do_commands(url, my_api): click.echo("Connecting to " + url) while True: value = click.prompt("What next: ") print("command is: " + val...
[ "click.prompt", "click.option", "time.sleep", "click.echo", "threading.Thread", "click.command" ]
[((620, 635), 'click.command', 'click.command', ([], {}), '()\n', (633, 635), False, 'import click\n'), ((637, 670), 'click.option', 'click.option', (['"""--url"""'], {'default': '""""""'}), "('--url', default='')\n", (649, 670), False, 'import click\n'), ((191, 225), 'click.echo', 'click.echo', (["('Connecting to ' + ...
import hashlib import sys import getpass import argparse import rx7 as rx from LIB.Functions import pause, cls from LIB.Hash import sa def print_hashes(word, file=None, Print=True): word=bytes(word, encoding='utf-8') LIST = [] for name,func in sa.items(): try: result = func(word).he...
[ "rx7.style.print", "argparse.ArgumentParser", "LIB.Hash.sa.items", "LIB.Functions.cls", "rx7.fg", "LIB.Functions.pause", "rx7.attr" ]
[((261, 271), 'LIB.Hash.sa.items', 'sa.items', ([], {}), '()\n', (269, 271), False, 'from LIB.Hash import sa\n'), ((1154, 1277), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Hash Generator"""'], {'description': '"""Generate Hash of a word in all hash types"""', 'allow_abbrev': '(False)'}), "('Hash Genera...
import numpy as np from scipy.optimize import fmin_l_bfgs_b import time import argparse import cv2 from tensorflow.keras.models import load_model import numpy as np import csv import sys from matplotlib import pyplot as plt from PIL import Image from keras.preprocessing.image import img_to_array img = cv2.imread('po...
[ "keras.preprocessing.image.img_to_array", "PIL.Image.open", "numpy.where", "PIL.Image.new", "cv2.grabCut", "numpy.zeros", "cv2.imread" ]
[((306, 364), 'cv2.imread', 'cv2.imread', (['"""pokemonimages/Groudon.jpg"""', 'cv2.COLOR_BGR2RGB'], {}), "('pokemonimages/Groudon.jpg', cv2.COLOR_BGR2RGB)\n", (316, 364), False, 'import cv2\n'), ((387, 426), 'PIL.Image.open', 'Image.open', (['"""pokemonimages/Groudon.jpg"""'], {}), "('pokemonimages/Groudon.jpg')\n", (...
############################################################### ################# https://www.fardanesh.ir #################### ############################################################### from openpyxl import load_workbook from openpyxl.styles import Font wb=load_workbook('lecture06-styles/list1.xlsx') ws1=wb....
[ "openpyxl.load_workbook", "openpyxl.styles.Font" ]
[((268, 312), 'openpyxl.load_workbook', 'load_workbook', (['"""lecture06-styles/list1.xlsx"""'], {}), "('lecture06-styles/list1.xlsx')\n", (281, 312), False, 'from openpyxl import load_workbook\n'), ((337, 462), 'openpyxl.styles.Font', 'Font', ([], {'name': '"""Tahoma"""', 'size': '(22)', 'bold': '(True)', 'italic': '(...
import os import json import torch import sys import time import random import numpy as np from tqdm import tqdm, trange import torch.multiprocessing as mp import torch.distributed as dist from torch.utils.tensorboard import SummaryWriter from apex.parallel import DistributedDataParallel as DDP from apex import amp s...
[ "wandb.log", "wandb.init", "sys.path.append", "wandb.config.update", "numpy.random.seed", "os.getpid", "os.path.dirname", "util.gqa_train.data_reader.DataReader", "time.time", "models_gqa.model.LCGNwrapper", "torch.cuda.manual_seed_all", "torch.manual_seed", "os.makedirs", "torch.multiproc...
[((319, 340), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (334, 340), False, 'import sys\n'), ((1205, 1846), 'util.gqa_train.data_reader.DataReader', 'DataReader', (['imdb_file', 'rank', 'gpu', 'num_replicas'], {'shuffle': '(True)', 'max_num': 'max_num', 'batch_size': 'cfg.TRAIN.BATCH_SIZE', '...
from typing import * import re LOWER_ERROR = "Only dictionaries and lists can be modified by this method." def apply_deep(data: Union[Mapping, List], fun: Callable) -> Union[dict, list]: ''' Applies fun to all keys in data. The method is recursive and applies as deep as possible in the dictionary nest. ...
[ "re.sub" ]
[((5452, 5472), 're.sub', 're.sub', (['r', 's', 'string'], {}), '(r, s, string)\n', (5458, 5472), False, 'import re\n')]
import boto3 import requests import unittest import os class HelloWorldTests(unittest.TestCase): stack_outputs = None def get_stack_outputs(self): if self.stack_outputs is None: stack_name = "python-serverless-example-{}".format(os.getenv('SERVERLESS_STAGE', ...
[ "unittest.main", "boto3.session.Session" ]
[((1180, 1195), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1193, 1195), False, 'import unittest\n'), ((416, 462), 'boto3.session.Session', 'boto3.session.Session', ([], {'region_name': '"""us-east-1"""'}), "(region_name='us-east-1')\n", (437, 462), False, 'import boto3\n')]
import unittest from pathlib import Path import mock import requests_mock def check_upload_to_aws_s3(csv_path, s3_bucket): assert isinstance(csv_path, Path) return True class MyTestCase(unittest.TestCase): def setUp(self) -> None: import os os.makedirs("test", exist_ok=True) os...
[ "mock.patch", "main_xml_parser.main", "os.makedirs", "pathlib.Path", "requests_mock.mock", "main_xml_parser.download_file", "main_xml_parser.parse_main_xml", "unittest.main" ]
[((363, 383), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (381, 383), False, 'import requests_mock\n'), ((1487, 1561), 'mock.patch', 'mock.patch', (['"""main_xml_parser.upload_to_aws_s3"""'], {'new': 'check_upload_to_aws_s3'}), "('main_xml_parser.upload_to_aws_s3', new=check_upload_to_aws_s3)\n", (149...
# This file was automatically generated by SWIG (http://www.swig.org). # Version 3.0.7 # # Do not make changes to this file unless you know what you are doing--modify # the SWIG interface file instead. from sys import version_info if version_info >= (2, 6, 0): def swig_import_helper(): from os.path imp...
[ "_SimSpace_Occupied_Default.SimSpace_SpaceNetVolume", "_SimSpace_Occupied_Default.SimSpace_SpaceOccupantHeatRateSensible", "_SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigEPD", "_SimSpace_Occupied_Default.SimSpace_SpaceGrossFloorArea", "_SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequenc...
[((3287, 3356), '_SimSpace_Occupied_Default.SimSpace_SpaceZoneAssignments', '_SimSpace_Occupied_Default.SimSpace_SpaceZoneAssignments', (['self', '*args'], {}), '(self, *args)\n', (3343, 3356), False, 'import _SimSpace_Occupied_Default\n'), ((3407, 3467), '_SimSpace_Occupied_Default.SimSpace_SpaceNumber', '_SimSpace_Oc...
from django.contrib import admin from .models import MovieList, Movie class MoviesInline(admin.TabularInline): model = Movie extra = 1 fieldsets = [ ('Movies', {'fields': ['title']}), (None, {'fields': ['movielist']}) ] class MovieListAdmin(admin.ModelAdmin): fieldsets = [ ...
[ "django.contrib.admin.site.register" ]
[((561, 607), 'django.contrib.admin.site.register', 'admin.site.register', (['MovieList', 'MovieListAdmin'], {}), '(MovieList, MovieListAdmin)\n', (580, 607), False, 'from django.contrib import admin\n'), ((608, 634), 'django.contrib.admin.site.register', 'admin.site.register', (['Movie'], {}), '(Movie)\n', (627, 634),...
import torch import matplotlib.pyplot as plt from torchvision import datasets, transforms from random import choice BATCH_SIZE=64 # Load the mnist dataset train_loader = torch.utils.data.DataLoader( datasets.MNIST( "./data", train=True, download=True, transform...
[ "torchvision.transforms.ToTensor", "random.choice", "matplotlib.pyplot.show" ]
[((545, 563), 'random.choice', 'choice', (['train_data'], {}), '(train_data)\n', (551, 563), False, 'from random import choice\n'), ((603, 613), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (611, 613), True, 'import matplotlib.pyplot as plt\n'), ((358, 379), 'torchvision.transforms.ToTensor', 'transforms.ToT...
from sys import platform from distutils.core import setup from distutils.extension import Extension from Cython.Build import cythonize import numpy ext_modules = [ Extension( "src.libs.cutils", ["src/libs/cutils.pyx"], extra_compile_args=['/openmp' if platform == "win32" else '-fopenmp'] ...
[ "Cython.Build.cythonize", "distutils.extension.Extension", "numpy.get_include" ]
[((170, 299), 'distutils.extension.Extension', 'Extension', (['"""src.libs.cutils"""', "['src/libs/cutils.pyx']"], {'extra_compile_args': "['/openmp' if platform == 'win32' else '-fopenmp']"}), "('src.libs.cutils', ['src/libs/cutils.pyx'], extra_compile_args=[\n '/openmp' if platform == 'win32' else '-fopenmp'])\n",...
#!/usr/bin/env python3 import logging import os import sys import time import traceback from collections import namedtuple from pathlib import Path from screenshots import Client, Screenshooter def env(name, default): return os.environ.get(name, default) Spec = namedtuple( "Spec", "commands before aft...
[ "traceback.format_exc", "screenshots.Client", "screenshots.Screenshooter", "pathlib.Path", "os.environ.get", "time.sleep" ]
[((15789, 15797), 'screenshots.Client', 'Client', ([], {}), '()\n', (15795, 15797), False, 'from screenshots import Client, Screenshooter\n'), ((233, 262), 'os.environ.get', 'os.environ.get', (['name', 'default'], {}), '(name, default)\n', (247, 262), False, 'import os\n'), ((16195, 16210), 'time.sleep', 'time.sleep', ...
from pavo_cristatus.tests.doubles.module_fakes.module_fake_class import ModuleFakeClass from trochilidae.interoperable_with_metaclass import interoperable_with_metaclass_future __all__ = ["ModuleFakeClassWithNestedAnnotatedCallables"] class ModuleFakeClassWithNestedAnnotatedCallables(interoperable_with_metaclass_fut...
[ "trochilidae.interoperable_with_metaclass.interoperable_with_metaclass_future" ]
[((288, 340), 'trochilidae.interoperable_with_metaclass.interoperable_with_metaclass_future', 'interoperable_with_metaclass_future', (['ModuleFakeClass'], {}), '(ModuleFakeClass)\n', (323, 340), False, 'from trochilidae.interoperable_with_metaclass import interoperable_with_metaclass_future\n')]
import typing from dataclasses import dataclass from starlette.datastructures import URL, QueryParams @dataclass class PageControl: text: str url: URL = None is_active: bool = False is_disabled: bool = False def inclusive_range(st: int, en: int, cutoff: int) -> typing.List[int]: """ Return ...
[ "starlette.datastructures.QueryParams" ]
[((646, 668), 'starlette.datastructures.QueryParams', 'QueryParams', (['url.query'], {}), '(url.query)\n', (657, 668), False, 'from starlette.datastructures import URL, QueryParams\n')]
import numpy as np from nnfs.layers import Linear from nnfs.optimizers import SGD class Model: def __init__(self, layers, loss, optimizer=SGD(lr=0.01)): self.layers = layers self.loss = loss self.optimizer = optimizer def save_weights(self, filename): weights = [] for ...
[ "numpy.savez", "numpy.load", "nnfs.optimizers.SGD" ]
[((144, 156), 'nnfs.optimizers.SGD', 'SGD', ([], {'lr': '(0.01)'}), '(lr=0.01)\n', (147, 156), False, 'from nnfs.optimizers import SGD\n'), ((443, 471), 'numpy.savez', 'np.savez', (['filename', '*weights'], {}), '(filename, *weights)\n', (451, 471), True, 'import numpy as np\n'), ((529, 546), 'numpy.load', 'np.load', (...
# -*- coding: utf-8 -*- import os, sys, pdb import torch from torch.autograd import Variable import torch.nn.functional as F import torch.utils.data as data from torchvision import datasets, transforms import numpy as np import cv2, copy, time import matplotlib.pyplot as plt from scipy.ndimage import binary_fill_hol...
[ "skimage.morphology.remove_small_objects", "copy.deepcopy", "numpy.ones", "loader.PatchDataset", "scipy.ndimage.binary_fill_holes", "numpy.floor", "cv2.contourArea", "numpy.argsort", "numpy.zeros", "torch.utils.data.DataLoader", "cv2.findContours", "torch.no_grad", "skimage.transform.resize"...
[((661, 686), 'scipy.ndimage.binary_fill_holes', 'binary_fill_holes', (['binary'], {}), '(binary)\n', (678, 686), False, 'from scipy.ndimage import binary_fill_holes, binary_closing, binary_dilation\n'), ((734, 797), 'skimage.morphology.remove_small_objects', 'remove_small_objects', (['binary'], {'min_size': 'min_size'...
#!/usr/bin/env python # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software...
[ "osa_toolkit.manage.remove_inventory_item", "osa_toolkit.manage.remove_ip_addresses", "test_inventory.cleanup", "os.getcwd", "test_inventory.get_inventory", "test_inventory.make_config", "unittest.main", "osa_toolkit.manage.export_host_info", "os.remove" ]
[((660, 671), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (669, 671), False, 'import os\n'), ((758, 786), 'test_inventory.make_config', 'test_inventory.make_config', ([], {}), '()\n', (784, 786), False, 'import test_inventory\n'), ((815, 857), 'os.remove', 'os.remove', (['test_inventory.USER_CONFIG_FILE'], {}), '(test_...
# -*- coding: utf-8 -*- from django.conf import settings from django.utils.translation import gettext_lazy as _ from bluebottle.notifications.messages import TransitionMessage class DonationSuccessActivityManagerMessage(TransitionMessage): subject = _(u"You have a new donation!💰") template = 'messages/donat...
[ "django.utils.translation.gettext_lazy" ]
[((257, 288), 'django.utils.translation.gettext_lazy', '_', (['u"""You have a new donation!💰"""'], {}), "(u'You have a new donation!💰')\n", (258, 288), True, 'from django.utils.translation import gettext_lazy as _\n'), ((517, 547), 'django.utils.translation.gettext_lazy', '_', (['"""Thanks for your donation!"""'], {}...
import os import subprocess from inspect import isclass import configargparse import numpy as np import sqlalchemy import yaml from IPython import embed from angular_solver import solve from database import Config, ConfigHolder, Graph, Task, get_session, DatabaseGraphGenome from genetic_algorithm import (GeneticAlgo...
[ "database.ConfigHolder.fromNamespace", "database.Task", "genetic_algorithm.SaveCallback", "database.DatabaseGraphGenome.generation.desc", "configargparse.Parser", "database.get_session", "genetic_algorithm.GeneticAlgorithm", "IPython.embed", "database.ConfigHolder", "numpy.zeros", "configargpars...
[((5499, 5525), 'configargparse.Namespace', 'configargparse.Namespace', ([], {}), '()\n', (5523, 5525), False, 'import configargparse\n'), ((5562, 5585), 'configargparse.Parser', 'configargparse.Parser', ([], {}), '()\n', (5583, 5585), False, 'import configargparse\n'), ((5846, 5878), 'database.get_session', 'get_sessi...
def make_subject(sector: str, year: str, q: str): return f'[업종: {sector}] {year}년도 {q}분기' def make_strong_tag(value: str): return f'<strong>{value}</strong>' def make_p_tag(value: str): return f'<p>{value}</p>' def make_img_tag(name: str, src: str): return f'<img src="{src}" alt="{name}">' def m...
[ "re.sub" ]
[((706, 737), 're.sub', 're.sub', (['re_key', 'value', 'template'], {}), '(re_key, value, template)\n', (712, 737), False, 'import re\n')]
from __future__ import print_function, division import os import sys import pytest import warnings import numpy from galpy.util import galpyWarning from test_actionAngle import reset_warning_registry _TRAVIS= bool(os.getenv('TRAVIS')) PY2= sys.version < '3' # Print all galpyWarnings always for tests of warnings warning...
[ "galpy.actionAngle.actionAngleTorus", "numpy.array", "galpy.potential.IsochronePotential", "galpy.potential.interpRZPotential", "galpy.potential.PowerSphericalPotential", "test_potential.BurkertPotentialNoC", "galpy.potential.JaffePotential", "galpy.actionAngle.actionAngleStaeckel", "numpy.linspace"...
[((313, 358), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""', 'galpyWarning'], {}), "('always', galpyWarning)\n", (334, 358), False, 'import warnings\n'), ((214, 233), 'os.getenv', 'os.getenv', (['"""TRAVIS"""'], {}), "('TRAVIS')\n", (223, 233), False, 'import os\n'), ((689, 722), 'galpy.actionAngle...
#coding=utf-8 #2017.6.13 #By JerCas # 导入plist文件解析模块 import plistlib def findDuplicates(fileName): """查找重复曲目""" print("Finding duplicate tracks in "+ fileName +" ...") # 读取播放列表 # P-list文件将对象表示为字典,而播放列表文件使用的是一个字典的字典字典(值仍为一个字典);readPlist读入一个P-list文件作为输入,返回一个字典字典 plist = plistlib.readPlist(fileName) ...
[ "plistlib.readPlist" ]
[((290, 318), 'plistlib.readPlist', 'plistlib.readPlist', (['fileName'], {}), '(fileName)\n', (308, 318), False, 'import plistlib\n')]
#!/usr/bin/env python3 # Copyright (C) 2020-2020 <NAME>. All rights reserved. # # This file is subject to the terms and conditions defined in file 'LICENSE', # which is part of this source code package. from typing import Optional import aioserial import asyncio import click import functools from perso import PTE, ...
[ "click.argument", "perso.PersoDataV1", "click.group", "click.option", "functools.wraps", "rtlib.Eui", "aioserial.AioSerial" ]
[((1855, 1868), 'click.group', 'click.group', ([], {}), '()\n', (1866, 1868), False, 'import click\n'), ((1870, 1942), 'click.option', 'click.option', (['"""-p"""', '"""--port"""'], {'default': '"""/dev/ttyACM0"""', 'help': '"""serial port"""'}), "('-p', '--port', default='/dev/ttyACM0', help='serial port')\n", (1882, ...
import numpy as np from scipy import ndimage __all__ = ['gabor_kernel', 'gabor_filter'] def _sigma_prefactor(bandwidth): b = bandwidth # See http://www.cs.rug.nl/~imaging/simplecell.html return 1.0 / np.pi * np.sqrt(np.log(2)/2.0) * (2.0**b + 1) / (2.0**b - 1) def gabor_kernel(frequency, theta=0, band...
[ "numpy.log", "numpy.exp", "numpy.real", "numpy.zeros", "numpy.cos", "numpy.sin", "numpy.imag" ]
[((2221, 2256), 'numpy.zeros', 'np.zeros', (['y.shape'], {'dtype': 'np.complex'}), '(y.shape, dtype=np.complex)\n', (2229, 2256), True, 'import numpy as np\n'), ((2268, 2336), 'numpy.exp', 'np.exp', (['(-0.5 * (rotx ** 2 / sigma_x ** 2 + roty ** 2 / sigma_y ** 2))'], {}), '(-0.5 * (rotx ** 2 / sigma_x ** 2 + roty ** 2 ...
# -*- coding: utf-8 -*- from setuptools import setup setup(name='compatstudy', url='http://github.com/rogalski/python-compat-libs-study', author='<NAME>', author_email='<EMAIL>', license='MIT', packages=['compatstudy'] )
[ "setuptools.setup" ]
[((54, 229), 'setuptools.setup', 'setup', ([], {'name': '"""compatstudy"""', 'url': '"""http://github.com/rogalski/python-compat-libs-study"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'packages': "['compatstudy']"}), "(name='compatstudy', url=\n 'http://github.com/rogalski...
from reconbf.modules import test_kernel from reconbf.lib.result import Result from reconbf.lib import utils import unittest from mock import patch class PtraceScope(unittest.TestCase): def test_no_yama(self): with patch.object(utils, 'kconfig_option', return_value=None): res = test_kernel.tes...
[ "mock.patch.object", "reconbf.modules.test_kernel.test_ptrace_scope" ]
[((229, 285), 'mock.patch.object', 'patch.object', (['utils', '"""kconfig_option"""'], {'return_value': 'None'}), "(utils, 'kconfig_option', return_value=None)\n", (241, 285), False, 'from mock import patch\n'), ((305, 336), 'reconbf.modules.test_kernel.test_ptrace_scope', 'test_kernel.test_ptrace_scope', ([], {}), '()...
from flask import Flask, render_template from flask_sqlalchemy import SQLAlchemy from flask_login import LoginManager from flask_migrate import Migrate from config import Config db = SQLAlchemy() login_manager = LoginManager() migrate = Migrate() def create_app(): app = Flask(__name__) app.config.from_object...
[ "flask_sqlalchemy.SQLAlchemy", "flask_login.LoginManager", "flask_migrate.Migrate", "flask.Flask" ]
[((185, 197), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', ([], {}), '()\n', (195, 197), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((214, 228), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (226, 228), False, 'from flask_login import LoginManager\n'), ((239, 248), 'flask_migrate.Migrate', 'Mi...
#!/usr/bin/env python from strip import Strip import random import time import signal import logging logger = logging.getLogger(__name__) def init_logging(log_level): logging.basicConfig(level=log_level) # catch signals for tidy exit _exiting = False def signal_handler(signal, frame): global _exiting _...
[ "logging.getLogger", "logging.basicConfig", "signal.signal", "time.sleep", "strip.Strip", "time.time", "random.randint" ]
[((113, 140), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (130, 140), False, 'import logging\n'), ((175, 211), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'log_level'}), '(level=log_level)\n', (194, 211), False, 'import logging\n'), ((427, 471), 'signal.signal', 'signa...
# Copyright 2015 - StackStorm, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agr...
[ "mistral.db.v2.api.get_action_executions", "mistral.db.v2.api.get_workflow_execution", "oslo_config.cfg.CONF.set_default", "mistral.db.v2.api.transaction", "mistral.exceptions.ActionException", "mistral.services.workbooks.create_workbook_v2" ]
[((1015, 1072), 'oslo_config.cfg.CONF.set_default', 'cfg.CONF.set_default', (['"""auth_enable"""', '(False)'], {'group': '"""pecan"""'}), "('auth_enable', False, group='pecan')\n", (1035, 1072), False, 'from oslo_config import cfg\n'), ((2280, 2326), 'mistral.services.workbooks.create_workbook_v2', 'wb_service.create_w...
import glob import pygame VEC = pygame.math.Vector2 class Sprite(pygame.sprite.Sprite): def __init__(self, start_x, start_y, height, width, vel=VEC(0, 0)): #pygame.sprite.Sprite.__init__(self) super().__init__() self.pos = VEC(start_x, start_y) self.rect = pygame.Rect(start_x, sta...
[ "pygame.image.load", "pygame.draw.rect", "pygame.Rect", "glob.glob" ]
[((296, 340), 'pygame.Rect', 'pygame.Rect', (['start_x', 'start_y', 'width', 'height'], {}), '(start_x, start_y, width, height)\n', (307, 340), False, 'import pygame\n'), ((870, 885), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (879, 885), False, 'import glob\n'), ((667, 717), 'pygame.draw.rect', 'pygame.draw...
#!/usr/bin/env python # -*- coding: utf-8 -*- # connectivity.py # definitions of connectivity characters import math import warnings import networkx as nx import numpy as np from tqdm import tqdm __all__ = [ "node_degree", "meshedness", "mean_node_dist", "cds_length", "mean_node_degree", "pro...
[ "networkx.degree", "numpy.mean", "math.sqrt", "networkx.Graph", "networkx.ego_graph", "collections.Counter", "networkx.square_clustering", "networkx.closeness_centrality", "networkx.set_edge_attributes", "functools.partial", "networkx.set_node_attributes", "networkx.betweenness_centrality", ...
[((1223, 1265), 'networkx.set_node_attributes', 'nx.set_node_attributes', (['netx', 'degree', 'name'], {}), '(netx, degree, name)\n', (1245, 1265), True, 'import networkx as nx\n'), ((9377, 9404), 'collections.Counter', 'collections.Counter', (['values'], {}), '(values)\n', (9396, 9404), False, 'import collections\n'),...
import unittest from model import prediction_with_model import pandas as pd import numpy as np class PredictionWithModel(unittest.TestCase): def test_prediction(self): d = pd.read_csv(r"C:\Users\Toan\Documents\GitHub\colossi\static\temp\cc7deed8140745d89f2f42f716f6fd1b\out_imac_atlas_expression_v7.1.tsv", ...
[ "unittest.main", "model.prediction_with_model", "pandas.read_csv" ]
[((465, 480), 'unittest.main', 'unittest.main', ([], {}), '()\n', (478, 480), False, 'import unittest\n'), ((185, 342), 'pandas.read_csv', 'pd.read_csv', (['"""C:\\\\Users\\\\Toan\\\\Documents\\\\GitHub\\\\colossi\\\\static\\\\temp\\\\cc7deed8140745d89f2f42f716f6fd1b\\\\out_imac_atlas_expression_v7.1.tsv"""', '""" """'...
import sys, os import subprocess from git_tools import git_tools import matplotlib.pylab as pylab current_dir = os.getcwd() authors_data = {} def commit_analysis(commit_hash, initial_date): # print("commit: " + commit_hash) author_name_cmd = "git log -1 --pretty=format:'%an' " + commit_hash author_name_...
[ "subprocess.check_output", "matplotlib.pylab.legend", "git_tools.git_tools.get_commit_date", "os.getcwd", "matplotlib.pylab.show" ]
[((113, 124), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (122, 124), False, 'import sys, os\n'), ((329, 381), 'subprocess.check_output', 'subprocess.check_output', (['author_name_cmd'], {'shell': '(True)'}), '(author_name_cmd, shell=True)\n', (352, 381), False, 'import subprocess\n'), ((2233, 2263), 'matplotlib.pylab....
#!/usr/bin/env python2.6 # nodestored.py - block storage access API # renamed from gt-xm-storage.py # refactored from gt-xm-reimage0.py # refactored from original gt-xm-reimage0 shell script # # Copyright (C) 2006-2011 <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use t...
[ "tarfile.open", "logging.debug", "os.path.ismount", "time.sleep", "random.getrandbits", "signal.alarm", "sys.exit", "os.fork", "pickle.loads", "os.path.islink", "urllib2.urlopen", "os.readlink", "ConfigParser.RawConfigParser", "subprocess.Popen", "twisted.python.filepath.FilePath", "st...
[((1357, 1420), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': 'LOG_FILENAME', 'level': 'logging.DEBUG'}), '(filename=LOG_FILENAME, level=logging.DEBUG)\n', (1376, 1420), False, 'import logging\n'), ((2041, 2052), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2049, 2052), False, 'import sys\n'), ((3...
import json, os from position import Pos class Loader: def __init__(self, id): file_name = str(id) + ".json" path = os.path.dirname(__file__) path_json = os.path.join(path, "data", file_name) file = open(path_json) dict = json.load(file) players = dict["players"] ...
[ "json.load", "os.path.dirname", "os.path.join", "position.Pos" ]
[((137, 162), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (152, 162), False, 'import json, os\n'), ((183, 220), 'os.path.join', 'os.path.join', (['path', '"""data"""', 'file_name'], {}), "(path, 'data', file_name)\n", (195, 220), False, 'import json, os\n'), ((267, 282), 'json.load', 'json...
import os from collections import OrderedDict import torch import torch.nn as nn from torch.jit.annotations import List, Dict from torchvision.ops.misc import FrozenBatchNorm2d from .feature_pyramid_network import FeaturePyramidNetwork, LastLevelMaxPool class Bottleneck(nn.Module): # conv block 和 identity block ...
[ "os.path.exists", "torch.nn.ReLU", "collections.OrderedDict", "torch.nn.Sequential", "torch.load", "torch.nn.init.kaiming_normal_", "torch.nn.Conv2d", "torch.nn.MaxPool2d", "torch.nn.AdaptiveAvgPool2d", "torch.nn.Linear", "torch.flatten" ]
[((565, 665), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_channel', 'out_channels': 'out_channel', 'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(in_channels=in_channel, out_channels=out_channel, kernel_size=1,\n stride=1, bias=False)\n', (574, 665), True, 'import torch.nn as nn\n'), ((82...
# Copyright 2017 Neural Networks and Deep Learning lab, MIPT # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicab...
[ "tensorflow.keras.backend.log", "tensorflow.keras.backend.shape", "tensorflow.keras.activations.get", "tensorflow.keras.backend.greater", "tensorflow.keras.layers.Multiply", "tensorflow.keras.layers.InputSpec", "tensorflow.keras.layers.Lambda", "tensorflow.keras.layers.Dropout", "tensorflow.keras.ba...
[((7912, 7940), 'tensorflow.keras.backend.log', 'K.log', (['(1.0 + position_inputs)'], {}), '(1.0 + position_inputs)\n', (7917, 7940), True, 'import tensorflow.keras.backend as K\n'), ((991, 1027), 'tensorflow.keras.activations.get', 'tf.keras.activations.get', (['activation'], {}), '(activation)\n', (1015, 1027), True...
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by appli...
[ "argparse.ArgumentParser", "paddle.io.DistributedBatchSampler", "paddle.optimizer.LinearLrWarmup", "coco.COCODataset", "os.path.join", "coco_metric.COCOMetric", "paddle.enable_static", "modeling.yolov3_darknet53", "paddle.optimizer.Momentum", "modeling.YoloLoss", "paddle.io.DataLoader", "paddl...
[((1435, 1501), 'paddle.optimizer.PiecewiseLR', 'paddle.optimizer.PiecewiseLR', ([], {'boundaries': 'boundaries', 'values': 'values'}), '(boundaries=boundaries, values=values)\n', (1463, 1501), False, 'import paddle\n'), ((1535, 1667), 'paddle.optimizer.LinearLrWarmup', 'paddle.optimizer.LinearLrWarmup', ([], {'learnin...
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by appli...
[ "paddle.is_compiled_with_xpu", "paddle.is_compiled_with_rocm", "numpy.array", "numpy.split", "paddle.is_compiled_with_cuda", "paddle.is_compiled_with_npu" ]
[((3596, 3626), 'paddle.is_compiled_with_cuda', 'paddle.is_compiled_with_cuda', ([], {}), '()\n', (3624, 3626), False, 'import paddle\n'), ((5717, 5732), 'numpy.array', 'np.array', (['probs'], {}), '(probs)\n', (5725, 5732), True, 'import numpy as np\n'), ((3658, 3687), 'paddle.is_compiled_with_npu', 'paddle.is_compile...
""" Submlime Text Package File Search. Licensed under MIT Copyright (c) 2012 <NAME> <<EMAIL>> """ import sublime import re from os import walk, listdir from os.path import basename, dirname, isdir, join, normpath, splitext, exists from fnmatch import fnmatch import zipfile __all__ = ( "sublime_package_paths", ...
[ "os.path.exists", "os.listdir", "zipfile.ZipFile", "re.compile", "os.walk", "re.match", "os.path.join", "os.path.normpath", "os.path.isdir", "sublime.executable_path", "os.path.basename", "fnmatch.fnmatch", "sublime.platform", "sublime.find_resources", "sublime.packages_path", "sublime...
[((479, 567), 're.compile', 're.compile', (['"""(?:/|^)(?:[^/]*\\\\.(?:pyc|pyo)|\\\\.git|\\\\.svn|\\\\.hg|\\\\.DS_Store)(?=$|/)"""'], {}), "(\n '(?:/|^)(?:[^/]*\\\\.(?:pyc|pyo)|\\\\.git|\\\\.svn|\\\\.hg|\\\\.DS_Store)(?=$|/)')\n", (489, 567), False, 'import re\n'), ((1258, 1268), 'os.path.isdir', 'isdir', (['pth'], ...
""" Helper functions for the LocalizerX module. """ from __future__ import absolute_import, unicode_literals import logging from django.conf import settings from django.core.exceptions import ImproperlyConfigured from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers LOGGER = loggin...
[ "logging.getLogger", "openedx.core.djangoapps.site_configuration.helpers.get_value", "django.conf.settings.FEATURES.get", "django.conf.settings.ENV_TOKENS.get" ]
[((314, 341), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (331, 341), False, 'import logging\n'), ((2897, 2973), 'django.conf.settings.ENV_TOKENS.get', 'settings.ENV_TOKENS.get', (['"""LOCALIZERX_API_URL_PREFIXES"""', 'default_api_prefixes'], {}), "('LOCALIZERX_API_URL_PREFIXES', defau...
import typing import error REQUEST_TABLE = { 'get': [[str]], 'as': [str], 'by': ([str], 'optional'), 'if': ([list], 'optional'), 'except': ([list], 'optional'), 'join': ([ { 'name': str, 'of': [str], } ], 'optional'), 'macro': ...
[ "error.API" ]
[((2569, 2583), 'error.API', 'error.API', (['msg'], {}), '(msg)\n', (2578, 2583), False, 'import error\n')]
################################################################################################################################## # Adarsh , Aadithya # ###################################################...
[ "serial.Serial" ]
[((444, 479), 'serial.Serial', 'serial.Serial', (['"""/dev/rfcomm1"""', '(9600)'], {}), "('/dev/rfcomm1', 9600)\n", (457, 479), False, 'import serial\n')]
""" The Yahoo finance component. https://github.com/iprak/yahoofinance """ from __future__ import annotations from datetime import timedelta import logging from typing import Final, Union from homeassistant.const import CONF_SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.helpers impor...
[ "logging.getLogger", "voluptuous.Required", "custom_components.yahoofinance.coordinator.YahooSymbolUpdateCoordinator", "voluptuous.Any", "homeassistant.helpers.discovery.async_load_platform", "voluptuous.Invalid", "datetime.timedelta", "voluptuous.Optional", "voluptuous.Coerce", "voluptuous.All" ]
[((1134, 1161), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1151, 1161), False, 'import logging\n'), ((1193, 1211), 'datetime.timedelta', 'timedelta', ([], {'hours': '(6)'}), '(hours=6)\n', (1202, 1211), False, 'from datetime import timedelta\n'), ((1243, 1264), 'datetime.timedelta', ...
# Generated by Django 3.2.7 on 2021-09-16 09:50 from django.db import migrations, models import uuid class Migration(migrations.Migration): dependencies = [ ('imports', '0004_importbatch_id'), ] operations = [ migrations.AlterField( model_name='importbatch', name...
[ "django.db.models.CharField" ]
[((346, 437), 'django.db.models.CharField', 'models.CharField', ([], {'default': 'uuid.uuid4', 'max_length': '(200)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, max_length=200, primary_key=True,\n serialize=False)\n', (362, 437), False, 'from django.db import migrations, models\n')]
# Copyright 2013 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by...
[ "magnum.common.exception.Conflict", "magnum.common.utils.is_uuid_like", "magnum.i18n._", "wsme.exc.ClientSideError", "jsonpatch.JsonPatch", "magnum.common.exception.ResourceNotFound" ]
[((2221, 2255), 'magnum.common.utils.is_uuid_like', 'utils.is_uuid_like', (['resource_ident'], {}), '(resource_ident)\n', (2239, 2255), False, 'from magnum.common import utils\n'), ((2899, 2933), 'magnum.common.utils.is_uuid_like', 'utils.is_uuid_like', (['resource_ident'], {}), '(resource_ident)\n', (2917, 2933), Fals...
# Copyright 2022 Netskope Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,...
[ "argparse.ArgumentParser", "google.cloud.pubsublite.types.PublishTime", "google.cloud.pubsublite.AdminClient", "google.cloud.pubsublite.types.CloudRegion", "datetime.datetime.fromisoformat", "google.cloud.pubsublite.types.SubscriptionPath", "google.cloud.pubsublite.types.EventTime" ]
[((2476, 2535), 'google.cloud.pubsublite.types.SubscriptionPath', 'SubscriptionPath', (['project_number', 'location', 'subscription_id'], {}), '(project_number, location, subscription_id)\n', (2492, 2535), False, 'from google.cloud.pubsublite.types import CloudRegion, CloudZone, SubscriptionPath, BacklogLocation, Publi...
import os from tqdm import tqdm from mmdet.apis import init_detector, inference_detector import numpy as np import torch import mmcv import cv2 import json import PIL testset_dir = '/home/xiekaiyu/ocr/dataset/ICDAR2019ArT/test_task13' output_dir = '/home/xiekaiyu/ocr/dataset/ICDAR2019ArT/output/preds' model_name = '...
[ "os.listdir", "PIL.Image.open", "numpy.where", "mmdet.apis.init_detector", "os.path.join", "cv2.findContours", "mmcv.concat_list", "numpy.vstack", "numpy.concatenate", "numpy.random.seed", "json.load", "numpy.full", "mmdet.apis.inference_detector", "torch.cuda.empty_cache", "json.dump" ]
[((518, 578), 'mmdet.apis.init_detector', 'init_detector', (['config_file', 'checkpoint_file'], {'device': '"""cuda:0"""'}), "(config_file, checkpoint_file, device='cuda:0')\n", (531, 578), False, 'from mmdet.apis import init_detector, inference_detector\n'), ((639, 662), 'os.listdir', 'os.listdir', (['testset_dir'], {...
import os import subprocess import config result = {} cwd = os.path.dirname( os.path.realpath( __file__ ) ) def parse_file(f): if not config.omit_xrdb: subprocess.call("xrdb -merge " + f, shell = True) res = os.popen("xrdb -query").read()[:-1].split("\n") for i in res: fields = i.split(":...
[ "os.path.realpath", "os.popen", "subprocess.call" ]
[((78, 104), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (94, 104), False, 'import os\n'), ((166, 213), 'subprocess.call', 'subprocess.call', (["('xrdb -merge ' + f)"], {'shell': '(True)'}), "('xrdb -merge ' + f, shell=True)\n", (181, 213), False, 'import subprocess\n'), ((226, 249), 'os...
from __future__ import print_function # if __name__ == '__main__' and __package__ == None: if __name__ == '__main__': # __package__ = 'imagemp' import ctypes # need to define the types for the data in the memory shared between processes import argparse, os # provide interface for calling this ...
[ "imagemp.Scheduler", "imagemp.Consumer", "argparse.ArgumentParser", "imagemp.FrameGrabberRunner", "os.path.splitext", "os.path.join", "time.sleep", "os.path.realpath", "imagemp.SharedFrameList", "imagemp.process_runners.examples.simple_display.SimpleDisplay", "imagemp.SharedSingleFrame", "imag...
[((840, 865), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (863, 865), False, 'import argparse, os\n'), ((3482, 3500), 'imagemp.SharedEvents', 'imp.SharedEvents', ([], {}), '()\n', (3498, 3500), True, 'import imagemp as imp\n'), ((3550, 3572), 'imagemp.Scheduler', 'imp.Scheduler', ([], {'dt':...
#!/usr/bin/python # -*- coding: utf-8 -*- from subprocess import Popen import os output = open("output.txt", "ab") error = open("errors.txt", "ab") #"-u", "{0}/main.py".format(os.getcwd()) Popen(["/usr/bin/python", "-u", "{0}/__main__.py".format(os.getcwd())], stdout = output, stderr = error)
[ "os.getcwd" ]
[((248, 259), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (257, 259), False, 'import os\n')]
from pyb import Pin, Timer def startV(): """ freq 1khz for voltage -V """ p = Pin('A8') tim = Timer(1 , freq=1000) ch = tim.channel(1, Timer.PWM, pin=p) ch.pulse_width_percent(50)
[ "pyb.Pin", "pyb.Timer" ]
[((87, 96), 'pyb.Pin', 'Pin', (['"""A8"""'], {}), "('A8')\n", (90, 96), False, 'from pyb import Pin, Timer\n'), ((107, 126), 'pyb.Timer', 'Timer', (['(1)'], {'freq': '(1000)'}), '(1, freq=1000)\n', (112, 126), False, 'from pyb import Pin, Timer\n')]
#!/usr/bin/env python import rospy import cv2 import numpy as np from std_msgs.msg import String from sensor_msgs.msg import Image, CompressedImage from cv_bridge import CvBridge, CvBridgeError bridge = CvBridge() class ImageAverager: def __init__(self): self.publisher = rospy.Publisher("~topic_out", Imag...
[ "rospy.Subscriber", "rospy.init_node", "numpy.fromstring", "cv_bridge.CvBridge", "cv2.addWeighted", "rospy.spin", "cv2.imdecode", "rospy.Publisher" ]
[((204, 214), 'cv_bridge.CvBridge', 'CvBridge', ([], {}), '()\n', (212, 214), False, 'from cv_bridge import CvBridge, CvBridgeError\n'), ((1372, 1425), 'rospy.init_node', 'rospy.init_node', (['"""image_average_node"""'], {'anonymous': '(True)'}), "('image_average_node', anonymous=True)\n", (1387, 1425), False, 'import ...
from collections import defaultdict import numpy as np class MetricsAccumulator: def __init__(self) -> None: self.accumulator = defaultdict(lambda: []) def update_metric(self, metric_name, metric_value): self.accumulator[metric_name].append(metric_value) def print_average_metric(self): ...
[ "numpy.array", "collections.defaultdict" ]
[((143, 167), 'collections.defaultdict', 'defaultdict', (['(lambda : [])'], {}), '(lambda : [])\n', (154, 167), False, 'from collections import defaultdict\n'), ((390, 401), 'numpy.array', 'np.array', (['v'], {}), '(v)\n', (398, 401), True, 'import numpy as np\n')]
#! -*- coding:utf-8 -*- ''' @Author: ZM @Date and Time: 2020/12/15 20:27 @File: ToOneHot.py ''' import numpy as np class ToOneHot: def __init__(self, num_classes): self.num_classes = num_classes def __call__(self, data): data_size = data.size if data_size > 1: ...
[ "numpy.zeros", "numpy.arange" ]
[((335, 391), 'numpy.zeros', 'np.zeros', (['(data_size, self.num_classes)'], {'dtype': '"""float32"""'}), "((data_size, self.num_classes), dtype='float32')\n", (343, 391), True, 'import numpy as np\n'), ((481, 524), 'numpy.zeros', 'np.zeros', (['self.num_classes'], {'dtype': '"""float32"""'}), "(self.num_classes, dtype...
from tkinter import * from tkinter import messagebox class marqueur(Tk): G = 0 N = 0 B = 0 equipedehors = None taille = 220 sizeSet = 100 chainetaille = "-size " + str(taille) sizeStringSet = "-size " + str(sizeSet) def __init__(self, nom, data): Tk.__init__(self) s...
[ "tkinter.messagebox.showerror", "tkinter.messagebox.showinfo", "tkinter.messagebox.askyesno" ]
[((7712, 7787), 'tkinter.messagebox.askyesno', 'messagebox.askyesno', (['"""Sortie à 11"""', "('Est-ce que les %s sortent ?' % couleur)"], {}), "('Sortie à 11', 'Est-ce que les %s sortent ?' % couleur)\n", (7731, 7787), False, 'from tkinter import messagebox\n'), ((8548, 8604), 'tkinter.messagebox.askyesno', 'messagebo...
import contextlib import numbers import typing import numpy as np import pandas as pd from river import base from river import optim from river import utils __all__ = [ 'LinearRegression', 'LogisticRegression', 'Perceptron' ] class GLM: """Generalized Linear Model. This serves as a base class...
[ "numpy.clip", "river.utils.pretty.print_table", "river.optim.schedulers.Constant", "river.optim.SGD", "river.utils.VectorDict", "river.optim.losses.Log", "river.utils.math.clamp", "numpy.argsort", "river.optim.losses.Squared", "numpy.einsum", "river.optim.initializers.Zeros", "pandas.DataFrame...
[((865, 887), 'river.utils.VectorDict', 'utils.VectorDict', (['None'], {}), '(None)\n', (881, 887), False, 'from river import utils\n'), ((3188, 3251), 'numpy.clip', 'np.clip', (['loss_gradient', '(-self.clip_gradient)', 'self.clip_gradient'], {}), '(loss_gradient, -self.clip_gradient, self.clip_gradient)\n', (3195, 32...
# coding: utf-8 from __future__ import absolute_import, division, print_function __author__ = "<NAME>" __copyright__ = "Copyright 2015, <NAME>, Liu lab" __email__ = "<EMAIL>" __license__ = "MIT" import json import re from io import StringIO import pandas as pd from flask import render_template from ..results.comm...
[ "io.StringIO", "pandas.concat", "pandas.read_table", "re.compile" ]
[((2929, 2966), 're.compile', 're.compile', (['"""Filename\\\\t(?P<name>.*)"""'], {}), "('Filename\\\\t(?P<name>.*)')\n", (2939, 2966), False, 'import re\n'), ((3175, 3309), 're.compile', 're.compile', (['"""\\\\n\\\\>\\\\>(?P<name>[\\\\w ]+)\\\\t(pass|fail)\\\\n\\\\#(?P<data>.+?)\\\\n\\\\>\\\\>END_MODULE"""'], {'flags...
import gc import json import logging import os import shutil import xml.etree.ElementTree as ET import zipfile from datetime import datetime from io import BytesIO import requests from prettytable import PrettyTable from pymongo.errors import PyMongoError from src.dataset import Dataset class LegalEntitiesRegister(...
[ "prettytable.PrettyTable", "json.loads", "os.listdir", "xml.etree.ElementTree.parse", "io.BytesIO", "logging.warning", "requests.get", "datetime.datetime.now", "gc.collect", "shutil.rmtree", "logging.info", "logging.error" ]
[((7950, 7962), 'gc.collect', 'gc.collect', ([], {}), '()\n', (7960, 7962), False, 'import gc\n'), ((8480, 8494), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8492, 8494), False, 'from datetime import datetime\n'), ((8524, 8538), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8536, 8538), Fa...
from django.core.mail.backends.base import BaseEmailBackend from mailer.models import Message class DbBackend(BaseEmailBackend): def send_messages(self, email_messages): num_sent = 0 for email in email_messages: msg = Message() msg.email = email msg.save() ...
[ "mailer.models.Message" ]
[((254, 263), 'mailer.models.Message', 'Message', ([], {}), '()\n', (261, 263), False, 'from mailer.models import Message\n')]
# Generated by Django 3.2.9 on 2021-11-14 08:06 from decimal import Decimal from django.db import migrations, models import djmoney.models.fields class Migration(migrations.Migration): dependencies = [ ('accounts', '0016_alter_order_location'), ] operations = [ migrations.AlterField( ...
[ "decimal.Decimal", "django.db.models.IntegerField" ]
[((397, 443), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'editable': '(False)'}), '(default=0, editable=False)\n', (416, 443), False, 'from django.db import migrations, models\n'), ((626, 638), 'decimal.Decimal', 'Decimal', (['"""0"""'], {}), "('0')\n", (633, 638), False, 'from deci...
import pytest from graphene import Field, ID, Int, ObjectType, String from .. import graphql_compatibility from ..extend import extend, external, requires from ..main import build_schema PRODUCT_SCHEMA_2 = """schema { query: Query } type Product { sku: ID size: Int weight: Int shippingEstimate: String } ...
[ "graphene.String", "graphene.Field", "graphene.ID", "graphene.Int", "pytest.raises" ]
[((3601, 3630), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (3614, 3630), False, 'import pytest\n'), ((4265, 4279), 'graphene.Field', 'Field', (['Product'], {}), '(Product)\n', (4270, 4279), False, 'from graphene import Field, ID, Int, ObjectType, String\n'), ((5333, 5347), 'graphe...
# -*- coding: utf-8 -*- import requests as req from bs4 import BeautifulSoup as bs import lxml from pathlib import Path # csv writer import pandas as pd import time from .config import BASE_DIR, BASE_URL # import functions from common.py from .common import (initialize, get_chrome_driver, makedirs, ...
[ "bs4.BeautifulSoup", "pandas.DataFrame", "time.sleep" ]
[((603, 616), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (613, 616), False, 'import time\n'), ((679, 692), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (689, 692), False, 'import time\n'), ((1590, 1611), 'bs4.BeautifulSoup', 'bs', (['r.content', '"""lxml"""'], {}), "(r.content, 'lxml')\n", (1592, 1611),...
from main import keys import jwt import pyrtk def create_jwt(user_id): return jwt.encode({'user_id': user_id}, keys['JWT_KEY'], algorithm="HS256").decode('utf-8') def create_rtk(user_id): return pyrtk.create_token(user_id, keys['RTK_KEY']) def decode_jwt(user_id): pass def decode_rtk(user_id): ...
[ "pyrtk.create_token", "jwt.encode" ]
[((209, 253), 'pyrtk.create_token', 'pyrtk.create_token', (['user_id', "keys['RTK_KEY']"], {}), "(user_id, keys['RTK_KEY'])\n", (227, 253), False, 'import pyrtk\n'), ((86, 154), 'jwt.encode', 'jwt.encode', (["{'user_id': user_id}", "keys['JWT_KEY']"], {'algorithm': '"""HS256"""'}), "({'user_id': user_id}, keys['JWT_KEY...
from pydantic import BaseModel, Field class Answer(BaseModel): key: str = Field(description="Name of the question") question: str = Field(description="The actual question") answer: bool = Field(description="Boolean answer")
[ "pydantic.Field" ]
[((80, 121), 'pydantic.Field', 'Field', ([], {'description': '"""Name of the question"""'}), "(description='Name of the question')\n", (85, 121), False, 'from pydantic import BaseModel, Field\n'), ((142, 182), 'pydantic.Field', 'Field', ([], {'description': '"""The actual question"""'}), "(description='The actual quest...
"""Working doc for Condition data models https://gist.github.com/mpkocher/347f9ae9092c24888e1c702a916276c2 """ from collections import namedtuple class ReseqCondition(namedtuple("ReseqCondition", "cond_id subreadset alignmentset referenceset")): def to_dict(self): return {"condId": self.cond_id, ...
[ "collections.namedtuple" ]
[((171, 247), 'collections.namedtuple', 'namedtuple', (['"""ReseqCondition"""', '"""cond_id subreadset alignmentset referenceset"""'], {}), "('ReseqCondition', 'cond_id subreadset alignmentset referenceset')\n", (181, 247), False, 'from collections import namedtuple\n'), ((720, 763), 'collections.namedtuple', 'namedtup...
from twilio.twiml.voice_response import Pay, VoiceResponse response = VoiceResponse() response.pay() print(response)
[ "twilio.twiml.voice_response.VoiceResponse" ]
[((71, 86), 'twilio.twiml.voice_response.VoiceResponse', 'VoiceResponse', ([], {}), '()\n', (84, 86), False, 'from twilio.twiml.voice_response import Pay, VoiceResponse\n')]
import pygame from pygame.sprite import Sprite class Block(Sprite): """A class representing the snake.""" def __init__(self, screen, settings, color, x, y): """Initialize the Snake.""" super(Block, self).__init__() self.screen = screen self.settings = settings self.ima...
[ "pygame.Surface" ]
[((325, 349), 'pygame.Surface', 'pygame.Surface', (['(20, 20)'], {}), '((20, 20))\n', (339, 349), False, 'import pygame\n')]
import os import shutil import argparse import time import json from datetime import datetime from collections import defaultdict from itertools import islice import pickle import copy import numpy as np import cv2 import torch from torch import nn from torch import autograd import torch.nn.functional as F import tor...
[ "mvn.utils.vis.visualize_batch", "mvn.utils.misc.config_to_str", "CameraCalibration.ArucoCalibrator", "mvn.models.algPose2d.BaselinePose2d", "onnx.load", "argparse.ArgumentParser", "torch.onnx.export", "mvn.utils.img.normalize_image", "mvn.utils.vis.visualize_heatmaps", "torch.transpose", "mvn.u...
[((942, 967), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (965, 967), False, 'import argparse\n'), ((2857, 2899), 'os.path.join', 'os.path.join', (['args.logdir', 'experiment_name'], {}), '(args.logdir, experiment_name)\n', (2869, 2899), False, 'import os\n'), ((2904, 2946), 'os.makedirs', '...
import functools from typing import Optional, Sequence from fvcore.common.registry import Registry as _Registry from tabulate import tabulate class Registry(_Registry): """Extension of fvcore's registry that supports aliases.""" _ALIAS_KEYWORDS = ("_aliases", "_ALIASES") def __init__(self, name: str): ...
[ "tabulate.tabulate", "functools.partial" ]
[((2501, 2558), 'tabulate.tabulate', 'tabulate', (['metadata'], {'headers': '"""keys"""', 'tablefmt': '"""fancy_grid"""'}), "(metadata, headers='keys', tablefmt='fancy_grid')\n", (2509, 2558), False, 'from tabulate import tabulate\n'), ((1398, 1431), 'functools.partial', 'functools.partial', (['deco'], {}), '(deco, **k...
# -*- coding: utf-8 -*- """ @author: <NAME> """ import random import string import uuid import pytest from src.encryption_ops.decryption import Decryption from src.encryption_ops.encrypt import encrypt from src.encryption_ops.encryption import Encryption SALT = uuid.uuid4().bytes PASSWORD = '<PASSWORD>' ENCRYPTED...
[ "random.choice", "random.randrange", "src.encryption_ops.decryption.Decryption", "uuid.uuid4", "pytest.raises", "src.encryption_ops.encryption.Encryption", "src.encryption_ops.encrypt.encrypt" ]
[((332, 355), 'src.encryption_ops.encrypt.encrypt', 'encrypt', (['SALT', 'PASSWORD'], {}), '(SALT, PASSWORD)\n', (339, 355), False, 'from src.encryption_ops.encrypt import encrypt\n'), ((268, 280), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (278, 280), False, 'import uuid\n'), ((375, 410), 'random.choice', 'random.c...
#!/usr/bin/env python # -*- coding: utf-8 -*- from conans import ConanFile, CMake, tools import os class CCTZConan(ConanFile): name = "cctz" version = "2.2" url = "https://github.com/bincrafters/conan-cctz" homepage = "https://github.com/google/cctz" description = "C++ library for translating bet...
[ "conans.tools.download", "os.rename", "conans.CMake", "os.path.join", "conans.tools.collect_libs" ]
[((1114, 1161), 'os.rename', 'os.rename', (['extracted_dir', 'self.source_subfolder'], {}), '(extracted_dir, self.source_subfolder)\n', (1123, 1161), False, 'import os\n'), ((1424, 1477), 'os.path.join', 'os.path.join', (['self.source_subfolder', '"""CMakeLists.txt"""'], {}), "(self.source_subfolder, 'CMakeLists.txt')\...
''' Description: expert_lz_task表查找kId程序 Author: jtx Time: 2020-10-10 17:09 ''' import os from logging.handlers import RotatingFileHandler import logging import pymongo logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s') # 获取当...
[ "logging.basicConfig", "logging.getLogger", "logging.Formatter", "os.path.join", "os.path.dirname", "pymongo.MongoClient" ]
[((171, 298), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s"""'}), "(level=logging.INFO, format=\n '%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s')\n", (190, 298), False, '...
# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or ...
[ "logging.getLogger", "logging.StreamHandler", "logging.Formatter", "tensorflow.Session", "numpy.zeros" ]
[((6029, 6056), 'logging.getLogger', 'logging.getLogger', (['__file__'], {}), '(__file__)\n', (6046, 6056), False, 'import logging\n'), ((6156, 6232), 'logging.Formatter', 'logging.Formatter', ([], {'fmt': '"""%(asctime)s %(levelname)s %(filename)s: %(message)s"""'}), "(fmt='%(asctime)s %(levelname)s %(filename)s: %(me...
""" File: 392.py Title: Is Subsequence Difficulty: Easy URL: https://leetcode.com/problems/is-subsequence/ """ import unittest from collections import defaultdict from typing import List class Solution: def isSubsequence(self, s: str, t: str) -> bool: hash_map = defaultdict(list) ...
[ "unittest.main", "collections.defaultdict" ]
[((1374, 1389), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1387, 1389), False, 'import unittest\n'), ((295, 312), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (306, 312), False, 'from collections import defaultdict\n')]
import functools import math from pathlib import Path from typing import Callable, Dict from flwr.server.strategy import Strategy from sources.experiments.experiment_metadata import ExperimentMetadata from sources.experiments.grid_search_metadata_provider import ParameterGridMetadataGenerator from experiments.cifar1...
[ "pathlib.Path", "math.pow", "sources.datasets.cifar10_lda.cifar10_lda_client_dataset_factory.Cifar10LdaClientDatasetFactory", "functools.partial", "sources.models.cifar10_lda.cifar10_lda_model_template.Cifar10LdaKerasModelTemplate", "sources.dataset_creation_utils.create_lda_dataset_utils.get_lda_cifar10_...
[((1479, 1496), 'math.pow', 'math.pow', (['(10)', 'exp'], {}), '(10, exp)\n', (1487, 1496), False, 'import math\n'), ((1705, 1747), 'sources.models.cifar10_lda.cifar10_lda_model_template.Cifar10LdaKerasModelTemplate', 'Cifar10LdaKerasModelTemplate', (['DEFAULT_SEED'], {}), '(DEFAULT_SEED)\n', (1733, 1747), False, 'from...
import os from datetime import datetime def create_res_forlder(script): # Build path this_file = os.path.abspath(os.path.dirname(__file__)) directory = os.path.join(this_file, '../' + script + '/res/' + datetime.today().strftime("%Y-%m-%d")) # Check if folder exists if not os.path.exists(director...
[ "datetime.datetime.today", "os.path.dirname", "os.path.exists", "os.makedirs" ]
[((601, 627), 'os.makedirs', 'os.makedirs', (['run_directory'], {}), '(run_directory)\n', (612, 627), False, 'import os\n'), ((123, 148), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (138, 148), False, 'import os\n'), ((297, 322), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(d...
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ Helpers for setting `stat (2)` options on files, directories, etc, which we are creating inside the image. """ impo...
[ "antlir.nspawn_in_subvol.ba_runner.BuildAppliance" ]
[((4359, 4398), 'antlir.nspawn_in_subvol.ba_runner.BuildAppliance', 'BuildAppliance', (['subvol', 'build_appliance'], {}), '(subvol, build_appliance)\n', (4373, 4398), False, 'from antlir.nspawn_in_subvol.ba_runner import BuildAppliance\n')]
"""Prioritization scheme for identifying follow up variants in tumor-only samples. Generalizes the filtering scheme used in VarDict post-processing: https://github.com/AstraZeneca-NGS/VarDict/blob/9ffec9168e91534fac5fb74b3ec7bdd2badd3464/vcf2txt.pl#L190 The goal is to build up a standard set of prioritization filter...
[ "bcbio.pipeline.datadict.get_tools_on", "bcbio.variation.vcfutils.bgzip_and_index", "collections.namedtuple", "bcbio.variation.population.run_vcfanno", "bcbio.utils.file_exists", "re.compile", "bcbio.utils.LazyImport", "locale.getpreferredencoding", "csv.writer", "bcbio.utils.splitext_plus", "bc...
[((965, 996), 'bcbio.utils.LazyImport', 'utils.LazyImport', (['"""geneimpacts"""'], {}), "('geneimpacts')\n", (981, 996), False, 'from bcbio import utils\n'), ((1006, 1032), 'bcbio.utils.LazyImport', 'utils.LazyImport', (['"""cyvcf2"""'], {}), "('cyvcf2')\n", (1022, 1032), False, 'from bcbio import utils\n'), ((2884, 2...
import sys import argparse import os import cv2 import yaml from PIL import Image from importlib.machinery import SourceFileLoader import torch from torch import nn from tqdm import tqdm import numpy as np import matplotlib.pyplot as plt import pandas import numpy __filedir__ = os.path.dirname(os.path.realpath(__file_...
[ "feature_graph.models.dtoid.network.Network", "PIL.Image.fromarray", "os.listdir", "pandas.read_csv", "numpy.where", "torch.load", "tqdm.tqdm", "os.path.join", "os.path.realpath", "numpy.stack", "torch.cat" ]
[((296, 322), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (312, 322), False, 'import os\n'), ((668, 692), 'feature_graph.models.dtoid.network.Network', 'network_module.Network', ([], {}), '()\n', (690, 692), True, 'import feature_graph.models.dtoid.network as network_module\n'), ((860, 9...
from setuptools import setup with open('README.rst', 'r') as f: long_description = f.read() setup( name='stickbugml', version='1.0.4', description='A framework to organize the process of designing supervised machine learning systems', keywords='stick bug, ml, machine learning, ai, artificial intel...
[ "setuptools.setup" ]
[((98, 1019), 'setuptools.setup', 'setup', ([], {'name': '"""stickbugml"""', 'version': '"""1.0.4"""', 'description': '"""A framework to organize the process of designing supervised machine learning systems"""', 'keywords': '"""stick bug, ml, machine learning, ai, artificial intelligence, framework, organization, organ...
import urllib.request,json from .models import Article import requests from newsapi import NewsApiClient api_key = None sources_url = None def configure_request(app): global api_key,sources_url,articles_url api_key = app.config['NEWS_API_KEY'] # sources_url = app.config['NEWS_SOURCES_BASE_URL'] # articles_url ...
[ "requests.get", "newsapi.NewsApiClient" ]
[((365, 395), 'newsapi.NewsApiClient', 'NewsApiClient', ([], {'api_key': '"""<KEY>"""'}), "(api_key='<KEY>')\n", (378, 395), False, 'from newsapi import NewsApiClient\n'), ((540, 565), 'requests.get', 'requests.get', (['sources_url'], {}), '(sources_url)\n', (552, 565), False, 'import requests\n'), ((872, 898), 'reques...
# coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import absolute_import, division, print_function, unicode_literals import http.server import socketserver import threading import time def get_delayed_han...
[ "threading.Thread", "time.sleep" ]
[((651, 696), 'threading.Thread', 'threading.Thread', ([], {'target': 'server.serve_forever'}), '(target=server.serve_forever)\n', (667, 696), False, 'import threading\n'), ((430, 447), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (440, 447), False, 'import time\n')]
from django.shortcuts import render from django.shortcuts import render,redirect from django.contrib.auth.models import User from django.contrib import auth import os import platform def base(request): return render(request,"home.html") def login(request): if request.method=="POST": user=auth.authent...
[ "django.shortcuts.render", "django.contrib.auth.authenticate", "django.contrib.auth.login", "platform.system", "django.shortcuts.redirect", "os.system", "django.contrib.auth.logout" ]
[((214, 242), 'django.shortcuts.render', 'render', (['request', '"""home.html"""'], {}), "(request, 'home.html')\n", (220, 242), False, 'from django.shortcuts import render, redirect\n'), ((698, 718), 'django.contrib.auth.logout', 'auth.logout', (['request'], {}), '(request)\n', (709, 718), False, 'from django.contrib ...
import os def test_stack_functions(stack_data): errMsg = 'Error: \'stack.yml\' file does not contain any functions.' assert 'functions' in stack_data, errMsg def test_git_ignore(stack_dir): path = os.path.join(stack_dir, '.gitignore') errMsg = f'Error: No \'.gitignore\' file found in {stack_dir}.' ...
[ "os.path.realpath", "os.path.join" ]
[((213, 250), 'os.path.join', 'os.path.join', (['stack_dir', '""".gitignore"""'], {}), "(stack_dir, '.gitignore')\n", (225, 250), False, 'import os\n'), ((442, 492), 'os.path.join', 'os.path.join', (['stack_dir', "stack_function['handler']"], {}), "(stack_dir, stack_function['handler'])\n", (454, 492), False, 'import o...
import os import random import numpy as np import tensorflow as tf random.seed(1234) def load_or_initialize_model(sess, saver, model_name, model_path): sess.run(tf.global_variables_initializer()) if os.path.isfile(model_path+model_name+'/'+model_name+'.ckpt.meta'): saver.restore(sess, model_path+mo...
[ "numpy.mean", "random.choice", "random.shuffle", "os.makedirs", "random.seed", "os.path.isfile", "tensorflow.global_variables_initializer", "numpy.array", "os.path.isdir" ]
[((69, 86), 'random.seed', 'random.seed', (['(1234)'], {}), '(1234)\n', (80, 86), False, 'import random\n'), ((212, 285), 'os.path.isfile', 'os.path.isfile', (["(model_path + model_name + '/' + model_name + '.ckpt.meta')"], {}), "(model_path + model_name + '/' + model_name + '.ckpt.meta')\n", (226, 285), False, 'import...
import logging import unittest from unittest.mock import Mock, MagicMock from genie.libs.clean.stages.iosxe.cat9k.stages import TftpBoot from genie.libs.clean.stages.tests.utils import CommonStageTests, create_test_device from pyats.aetest.steps import Steps from pyats.results import Passed, Failed from pyats.aetest...
[ "unittest.mock.Mock", "genie.libs.clean.stages.tests.utils.create_test_device", "pyats.aetest.steps.Steps", "genie.libs.clean.stages.iosxe.cat9k.stages.TftpBoot", "logging.disable" ]
[((485, 518), 'logging.disable', 'logging.disable', (['logging.CRITICAL'], {}), '(logging.CRITICAL)\n', (500, 518), False, 'import logging\n'), ((632, 642), 'genie.libs.clean.stages.iosxe.cat9k.stages.TftpBoot', 'TftpBoot', ([], {}), '()\n', (640, 642), False, 'from genie.libs.clean.stages.iosxe.cat9k.stages import Tft...
import json import pkgutil import textwrap from twitter.aurora.config.schema import base as base_schema from pystachio.config import Config as PystachioConfig class AuroraConfigLoader(PystachioConfig): SCHEMA_MODULES = [] @classmethod def assembled_schema(cls, schema_modules): default_schema = [super(Aur...
[ "json.loads", "pkgutil.iter_modules", "twitter.aurora.config.schema.base.Job.json_load" ]
[((1009, 1047), 'pkgutil.iter_modules', 'pkgutil.iter_modules', (['package.__path__'], {}), '(package.__path__)\n', (1029, 1047), False, 'import pkgutil\n'), ((1648, 1677), 'twitter.aurora.config.schema.base.Job.json_load', 'base_schema.Job.json_load', (['fp'], {}), '(fp)\n', (1673, 1677), True, 'from twitter.aurora.co...
import numpy as np def class_count(data_holder): unique, counts = np.unique(data_holder.target, return_counts=True) return unique, counts
[ "numpy.unique" ]
[((71, 120), 'numpy.unique', 'np.unique', (['data_holder.target'], {'return_counts': '(True)'}), '(data_holder.target, return_counts=True)\n', (80, 120), True, 'import numpy as np\n')]
import json # import logging from .utils import is_invalid_params from .exceptions import ( JSONRPCInvalidParams, JSONRPCInvalidRequest, JSONRPCInvalidRequestException, JSONRPCMethodNotFound, JSONRPCParseError, JSONRPCServerError, JSONRPCDispatchException, ) from .jsonrpc1 import JSONRPC10Re...
[ "json.loads" ]
[((1276, 1299), 'json.loads', 'json.loads', (['request_str'], {}), '(request_str)\n', (1286, 1299), False, 'import json\n')]
"""eval_yolo.py This script is for evaluating mAP (accuracy) of YOLO models. """ import os import sys import json import argparse import cv2 import pycuda.autoinit # This is needed for initializing CUDA driver from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval from progressbar import prog...
[ "os.listdir", "pycocotools.cocoeval.COCOeval", "argparse.ArgumentParser", "json.dumps", "pycocotools.coco.COCO", "os.path.join", "progressbar.progressbar", "os.path.isfile", "utils.yolo_with_plugins.TrtYOLO", "os.path.isdir", "sys.exit" ]
[((678, 719), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'desc'}), '(description=desc)\n', (701, 719), False, 'import argparse\n'), ((2193, 2210), 'progressbar.progressbar', 'progressbar', (['jpgs'], {}), '(jpgs)\n', (2204, 2210), False, 'from progressbar import progressbar\n'), ((3490, ...
"""Define tests for the device's onboard sensors.""" import json import aiohttp import pytest from aiolookin import async_get_device from aiolookin.errors import SensorError from .common import TEST_IP_ADDRESS @pytest.mark.asyncio async def test_invalid_sensor(aresponses, device_server, sensor_list): """Test t...
[ "aiohttp.ClientSession", "aiolookin.async_get_device", "pytest.raises", "json.dumps" ]
[((678, 701), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (699, 701), False, 'import aiohttp\n'), ((2262, 2285), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (2283, 2285), False, 'import aiohttp\n'), ((3046, 3069), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {})...