code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
#!/usr/bin/python
import sys
from os.path import join,exists,dirname
import numpy as np
from numpy.random import randint
from sklearn.datasets import load_svmlight_file
from torch.autograd import Function, Variable
import torch.nn as nn
import torch.optim as optim
import torch
from torch import FloatTensor
from uda_c... | [
"torch.nn.Sigmoid",
"torch.nn.ReLU",
"sklearn.datasets.load_svmlight_file",
"torch.nn.Sequential",
"numpy.exp",
"sys.stderr.write",
"torch.nn.BCELoss",
"torch.cuda.is_available",
"numpy.sum",
"os.path.dirname",
"torch.nn.Linear",
"sys.exit",
"numpy.random.randint",
"torch.nn.LogSoftmax",
... | [((2921, 2946), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2944, 2946), False, 'import torch\n'), ((3248, 3307), 'sys.stderr.write', 'sys.stderr.write', (["('Reading source data from %s\\n' % args[0])"], {}), "('Reading source data from %s\\n' % args[0])\n", (3264, 3307), False, 'import sy... |
import importlib
import logging
import os
from ..utils import elapsed
logger = logging.getLogger(__name__)
#
# Input formats is a dictionary of supported format names and the accepted
# file extensions
#
# The first file will be parsed by read() function and the addfile will be
# parsed by readadd()
#
# Typically... | [
"logging.getLogger",
"os.path.splitext",
"importlib.import_module"
] | [((81, 108), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (98, 108), False, 'import logging\n'), ((4754, 4801), 'importlib.import_module', 'importlib.import_module', (["('.' + 'dome')", '__name__'], {}), "('.' + 'dome', __name__)\n", (4777, 4801), False, 'import importlib\n'), ((2071, 2... |
import numpy as np
def dist(x, y, norm=2):
# x: N x D
# y: M x D
n = x.shape[0]
m = y.shape[0]
d = x.shape[1]
assert d == y.shape[1]
x = np.expand_dims(x, axis=1) # (n,d)->(n,1,d)
y = np.expand_dims(y, axis=0) # (m,d)->(1,m,d)
# x = np.repeat(x, m, axis=1) # (n,1,d)->(n,m,d)
... | [
"numpy.abs",
"numpy.power",
"numpy.argmax",
"numpy.max",
"numpy.array",
"numpy.zeros",
"numpy.expand_dims",
"numpy.arange"
] | [((167, 192), 'numpy.expand_dims', 'np.expand_dims', (['x'], {'axis': '(1)'}), '(x, axis=1)\n', (181, 192), True, 'import numpy as np\n'), ((219, 244), 'numpy.expand_dims', 'np.expand_dims', (['y'], {'axis': '(0)'}), '(y, axis=0)\n', (233, 244), True, 'import numpy as np\n'), ((1145, 1170), 'numpy.zeros', 'np.zeros', (... |
from models import model
from datetime import datetime
import re
class MainModel(model.Model):
def postQuestion(self, title, body, tagsList, poster):
"""
Inserts question posts into the database
Parameters
----------
title : str
title of question
body : str
body of question
poster: str
usern... | [
"datetime.datetime.now",
"re.compile"
] | [((2771, 2821), 're.compile', 're.compile', (["('.* ' + keyword + ' .*')", 're.IGNORECASE'], {}), "('.* ' + keyword + ' .*', re.IGNORECASE)\n", (2781, 2821), False, 'import re\n'), ((1854, 1868), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1866, 1868), False, 'from datetime import datetime\n')] |
"""
email handler
"""
import os
import json
import pandas as pd
from datetime import datetime
from datacoco_email_tools import Email
class Handler(object):
def __init__(self, config):
self.environment = config["hambot"]["environment"]
self.aws_conf = config["aws"]
self.aws_key = self.aws_... | [
"os.path.exists",
"datacoco_email_tools.Email.send_mail",
"json.dumps",
"datacoco_email_tools.Email.send_email_with_attachment"
] | [((1047, 1093), 'os.path.exists', 'os.path.exists', (['"""diagnostic_query_results.csv"""'], {}), "('diagnostic_query_results.csv')\n", (1061, 1093), False, 'import os\n'), ((1180, 1233), 'json.dumps', 'json.dumps', (['result'], {'indent': '(4)', 'default': 'json_serializer'}), '(result, indent=4, default=json_serializ... |
import csv
import copy
import math
def apply_mask(mask, value):
# format value as bits
valbits = list("{0:036b}".format(int(value)))
for i,v in enumerate(mask):
if v != "X":
valbits[i] = v
valbits = "".join(valbits)
return int(valbits,2)
if __name__ == "__main__":
# l... | [
"csv.reader"
] | [((421, 440), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (431, 440), False, 'import csv\n')] |
import os
from conans import ConanFile, CMake, tools
from conans.errors import ConanInvalidConfiguration
class EnttConan(ConanFile):
name = "entt"
description = "Gaming meets modern C++ - a fast and reliable entity-component system (ECS) and much more"
topics = ("conan," "entt", "gaming", "entity", "ecs"... | [
"conans.tools.Version",
"os.rename",
"conans.CMake",
"conans.errors.ConanInvalidConfiguration",
"os.path.join",
"conans.tools.check_min_cppstd",
"conans.tools.get"
] | [((1452, 1497), 'conans.tools.Version', 'tools.Version', (['self.settings.compiler.version'], {}), '(self.settings.compiler.version)\n', (1465, 1497), False, 'from conans import ConanFile, CMake, tools\n'), ((1729, 1782), 'conans.tools.get', 'tools.get', ([], {}), "(**self.conan_data['sources'][self.version])\n", (1738... |
# -*- coding: utf-8 -*-
from .Qt import QtCore, QtGui
from .Vector import Vector
from .SRTTransform import SRTTransform
import pyqtgraph as pg
import numpy as np
import scipy.linalg
class SRTTransform3D(pg.Transform3D):
"""4x4 Transform matrix that can always be represented as a combination of 3 matrices: scale * ... | [
"numpy.abs",
"pyqtgraph.Transform3D.__init__",
"GraphicsView.GraphicsView",
"numpy.cross",
"pyqtgraph.Transform3D.translate",
"pyqtgraph.Transform3D.setToIdentity",
"pyqtgraph.Vector",
"widgets.TestROI",
"pyqtgraph.Transform3D.scale",
"numpy.dot",
"pyqtgraph.Transform3D.rotate",
"numpy.arctan2... | [((8374, 8401), 'GraphicsView.GraphicsView', 'GraphicsView.GraphicsView', ([], {}), '()\n', (8399, 8401), False, 'import GraphicsView\n'), ((10192, 10244), 'widgets.TestROI', 'widgets.TestROI', (['(19, 19)', '(22, 22)'], {'invertible': '(True)'}), '((19, 19), (22, 22), invertible=True)\n', (10207, 10244), False, 'impor... |
import pytest
from wemake_python_styleguide.violations.complexity import (
TooDeepAccessViolation,
)
from wemake_python_styleguide.visitors.ast.complexity.access import (
AccessVisitor,
)
# boundary expressions
subscript_access = 'my_matrix[0][0][0][0]'
attribute_access = 'self.attr.inner.wrapper.value'
mixed... | [
"pytest.mark.parametrize",
"wemake_python_styleguide.visitors.ast.complexity.access.AccessVisitor"
] | [((619, 743), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""code"""', '[subscript_access, attribute_access, mixed_access, mixed_with_calls_access,\n call_chain]'], {}), "('code', [subscript_access, attribute_access,\n mixed_access, mixed_with_calls_access, call_chain])\n", (642, 743), False, 'import... |
#@title Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under... | [
"tensorflow.keras.datasets.mnist.load_data",
"tensorflow.keras.layers.Dropout",
"os.path.dirname",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.models.load_model",
"tensorflow.losses.SparseCategoricalCrossentropy",
"tensorflow.keras.callbacks.ModelCheckpoint",
"tensorflow.train.latest_checkpoint... | [((1876, 1911), 'tensorflow.keras.datasets.mnist.load_data', 'tf.keras.datasets.mnist.load_data', ([], {}), '()\n', (1909, 1911), True, 'import tensorflow as tf\n'), ((2696, 2728), 'os.path.dirname', 'os.path.dirname', (['checkpoint_path'], {}), '(checkpoint_path)\n', (2711, 2728), False, 'import os\n'), ((2795, 2894),... |
from .utils import DefaultModelVocabResizer
from .model_structure import ModelStructure
import torch
from torch import nn
class T5VocabResizer(DefaultModelVocabResizer):
model_name : str = 't5'
@classmethod
def set_embeddings(cls, model, token_ids):
def _prun(old_weight, token_ids):
p... | [
"torch.LongTensor",
"torch.nn.Embedding"
] | [((1709, 1755), 'torch.nn.Embedding', 'nn.Embedding', (['pruned_num_tokens', 'embedding_dim'], {}), '(pruned_num_tokens, embedding_dim)\n', (1721, 1755), False, 'from torch import nn\n'), ((1951, 1997), 'torch.nn.Embedding', 'nn.Embedding', (['pruned_num_tokens', 'embedding_dim'], {}), '(pruned_num_tokens, embedding_di... |
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 12 12:22:38 2020
@author: emc1977
"""
import math as math
def function ( x ):
import numpy as np
#Note that here I have negatived all terms, converting the minimiser into a maximiser.
#The functino value is returned as a negative, though in reality it ... | [
"time.ctime",
"numpy.sqrt",
"math.sin",
"time.time",
"platform.python_version"
] | [((3104, 3119), 'numpy.sqrt', 'np.sqrt', (['machep'], {}), '(machep)\n', (3111, 3119), True, 'import numpy as np\n'), ((3130, 3145), 'numpy.sqrt', 'np.sqrt', (['machep'], {}), '(machep)\n', (3137, 3145), True, 'import numpy as np\n'), ((3881, 3892), 'time.time', 'time.time', ([], {}), '()\n', (3890, 3892), False, 'impo... |
from my_code import legs
def test_inc():
assert 20 == legs(2,2,2)
assert 34 == legs(5,2,4)
assert 22 == legs(1,3,2)
| [
"my_code.legs"
] | [((60, 73), 'my_code.legs', 'legs', (['(2)', '(2)', '(2)'], {}), '(2, 2, 2)\n', (64, 73), False, 'from my_code import legs\n'), ((89, 102), 'my_code.legs', 'legs', (['(5)', '(2)', '(4)'], {}), '(5, 2, 4)\n', (93, 102), False, 'from my_code import legs\n'), ((118, 131), 'my_code.legs', 'legs', (['(1)', '(3)', '(2)'], {}... |
import json
import numpy
import time
import pyspark
from azureml.core.model import Model
from pyspark.ml import PipelineModel
from azureml.monitoring import ModelDataCollector
from mmlspark import LightGBMRegressor
from mmlspark import LightGBMRegressionModel
def init():
try:
# One-time initialization of ... | [
"json.loads",
"pyspark.ml.PipelineModel.load",
"azureml.monitoring.ModelDataCollector",
"json.dumps",
"time.strftime",
"azureml.core.model.Model.get_model_path",
"numpy.array",
"pyspark.sql.SparkSession.builder.appName"
] | [((2187, 2217), 'json.dumps', 'json.dumps', (["{'result': result}"], {}), "({'result': result})\n", (2197, 2217), False, 'import json\n'), ((512, 603), 'azureml.monitoring.ModelDataCollector', 'ModelDataCollector', (['model_name'], {'identifier': '"""inputs"""', 'feature_names': "['json_input_data']"}), "(model_name, i... |
from typing import Dict, Union, Callable, Iterable
from abc import ABC
from textwrap import dedent
from datetime import datetime
from functools import partial
import swimport
from swimport.model import FileSource
import swimport.swim as swim_module
class Pool(ABC):
"""a class for pools or pools with partial ar... | [
"datetime.datetime.now",
"functools.partial",
"swimport.model.FileSource",
"textwrap.dedent"
] | [((1319, 1368), 'functools.partial', 'partial', (['self.__func__', '*self.args'], {}), '(self.__func__, *self.args, **self.kwargs)\n', (1326, 1368), False, 'from functools import partial\n'), ((3560, 3582), 'functools.partial', 'partial', (['ret'], {}), '(ret, **kwargs)\n', (3567, 3582), False, 'from functools import p... |
import yaml
import os
from ..directories import flying
class InfoManager:
all_files = dict()
def __init__(self, from_file_name1: str, to_file_name2: str):
self.file_name = to_file_name2
self.from_file_name1 = from_file_name1
if set([self.file_name]) & set(InfoManager.all_files.keys()... | [
"yaml.safe_load",
"yaml.dump"
] | [((581, 598), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (595, 598), False, 'import yaml\n'), ((689, 728), 'yaml.dump', 'yaml.dump', (['self._data_loaded', 'yaml_file'], {}), '(self._data_loaded, yaml_file)\n', (698, 728), False, 'import yaml\n')] |
import os
from xml.etree import ElementTree
from twin_sister import dependency
from .status import ERROR, FAIL, PASS
def locate_suite(root):
return root if "testsuite" == root.tag else root.find(".//testsuite")
def extract_status(filename):
with dependency(open)(filename, "r") as f:
xml = f.read()... | [
"xml.etree.ElementTree.fromstring",
"twin_sister.dependency",
"os.path.join"
] | [((340, 367), 'xml.etree.ElementTree.fromstring', 'ElementTree.fromstring', (['xml'], {}), '(xml)\n', (362, 367), False, 'from xml.etree import ElementTree\n'), ((680, 699), 'twin_sister.dependency', 'dependency', (['os.walk'], {}), '(os.walk)\n', (690, 699), False, 'from twin_sister import dependency\n'), ((260, 276),... |
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from distutils.version import LooseVersion
import numpy as np
import pytest
from common.layer_test_class import check_ir_version
from common.tf_layer_test_class import CommonTFLayerTest
from common.utils.tf_utils import permute_nchw_to_... | [
"tensorflow.compat.v1.placeholder",
"numpy.copy",
"common.layer_test_class.check_ir_version",
"distutils.version.LooseVersion",
"pytest.mark.skip",
"tensorflow.nn.log_softmax",
"pytest.mark.parametrize",
"openvino.tools.mo.front.common.partial_infer.utils.int64_array",
"unit_tests.utils.graph.build_... | [((5612, 5666), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""params"""', 'test_data_precommit'], {}), "('params', test_data_precommit)\n", (5635, 5666), False, 'import pytest\n'), ((6395, 6439), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""params"""', 'test_data'], {}), "('params', test_da... |
# small helper script to check results
import fnmatch
import os
import shutil
import time
import json
import sys
liste = []
CHECKFOLDER = sys.argv[1]
"""
# we do not need the pickle files here
for root, dirnames, filenames in os.walk(CHECKFOLDER):
for filename in fnmatch.filter(filenames, 'scenario.pickle'):
... | [
"os.path.exists",
"os.makedirs",
"os.path.join",
"shutil.copyfile",
"fnmatch.filter",
"os.walk"
] | [((590, 610), 'os.walk', 'os.walk', (['CHECKFOLDER'], {}), '(CHECKFOLDER)\n', (597, 610), False, 'import os\n'), ((873, 908), 'os.path.join', 'os.path.join', (['CHECKFOLDER', '"""errors"""'], {}), "(CHECKFOLDER, 'errors')\n", (885, 908), False, 'import os\n'), ((916, 944), 'os.path.exists', 'os.path.exists', (['error_f... |
import sqlite3
from pathlib import Path
from typing import List, NamedTuple, Union
from dtsdb import sqlite_util
class ScannedFile(NamedTuple):
path: Path
sdid: str
class ScannerState(object):
def __init__(self, conn: sqlite3.Connection, inbox_path: Union[str, Path]) -> None:
# TODO(fyhuang): w... | [
"dtsdb.sqlite_util.ensure_table_matches",
"pathlib.Path"
] | [((422, 438), 'pathlib.Path', 'Path', (['inbox_path'], {}), '(inbox_path)\n', (426, 438), False, 'from pathlib import Path\n'), ((747, 798), 'dtsdb.sqlite_util.ensure_table_matches', 'sqlite_util.ensure_table_matches', (['self.conn', 'schema'], {}), '(self.conn, schema)\n', (779, 798), False, 'from dtsdb import sqlite_... |
#! /usr/bin/env python
# Author: <NAME> (srinivas . zinka [at] gmail . com)
# Copyright (c) 2014 <NAME>
# License: New BSD License.
import numpy as np
# from mayavi import mlab
from scipy import integrate
from scipy.special import sph_harm
# adjusting "matplotlib" label fonts
from matplotlib import rc
rc('text', u... | [
"numpy.reshape",
"scipy.integrate.quad",
"numpy.array",
"numpy.zeros",
"matplotlib.rc",
"numpy.cos",
"numpy.sin"
] | [((308, 331), 'matplotlib.rc', 'rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (310, 331), False, 'from matplotlib import rc\n'), ((619, 652), 'numpy.zeros', 'np.zeros', (['(N, 1)'], {'dtype': '"""complex"""'}), "((N, 1), dtype='complex')\n", (627, 652), True, 'import numpy as np\n'), ((983, 10... |
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_3
#Pacotes
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet
from ryu.lib.packe... | [
"ryu.lib.packet.packet.Packet",
"os.getenv",
"ryu.topology.api.get_switch",
"ryu.base.app_manager.require_app",
"ryu.lib.packet.ipv4.ipv4",
"pickle.load",
"os.path.isfile",
"networkx.MultiGraph",
"networkx.dijkstra_path",
"ryu.lib.packet.udp.udp",
"ryu.lib.packet.arp.arp",
"os.popen",
"ryu.c... | [((882, 899), 'os.getenv', 'os.getenv', (['"""HOME"""'], {}), "('HOME')\n", (891, 899), False, 'import os\n'), ((24825, 24870), 'ryu.base.app_manager.require_app', 'app_manager.require_app', (['"""ryu.app.ofctl_rest"""'], {}), "('ryu.app.ofctl_rest')\n", (24848, 24870), False, 'from ryu.base import app_manager\n'), ((2... |
import argparse
import sys
import fmf
from fmfexporter.fmf_adapter import FMFAdapter
from fmfexporter.adapters import *
"""
Common arguments for the fmfexporter tool.
"""
class FMFExporterArgParser(object):
"""
Common argument parser for fmfexporter tool.
The arguments defined here must be provided, no ... | [
"fmfexporter.fmf_adapter.FMFAdapter.get_available_adapters",
"fmfexporter.fmf_adapter.FMFAdapter.get_adapter_class",
"argparse.ArgumentParser",
"fmfexporter.fmf_adapter.FMFAdapter.get_adapter",
"sys.exit"
] | [((437, 480), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""fmfexporter"""'}), "(prog='fmfexporter')\n", (460, 480), False, 'import argparse\n'), ((528, 563), 'fmfexporter.fmf_adapter.FMFAdapter.get_available_adapters', 'FMFAdapter.get_available_adapters', ([], {}), '()\n', (561, 563), False, ... |
import gym
from gym import core, spaces
from .gol import utils
import argparse
import itertools
import cv2
import numpy as np
import torch
from torch import ByteTensor, Tensor
from torch.nn import Conv2d, Parameter
from torch.nn.init import zeros_
from .world import World
class GameOfLifeEnv(core.Env):
def ... | [
"numpy.ones",
"gym.spaces.Discrete",
"numpy.zeros",
"torch.cuda.is_available",
"cv2.destroyAllWindows",
"cv2.waitKey",
"cv2.namedWindow"
] | [((3262, 3285), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3283, 3285), False, 'import cv2\n'), ((636, 681), 'gym.spaces.Discrete', 'spaces.Discrete', (['(self.num_tools * size * size)'], {}), '(self.num_tools * size * size)\n', (651, 681), False, 'from gym import core, spaces\n'), ((1779, 182... |
# -*- coding: utf-8 -*-
# Copyright 2018 NTT Communications
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by app... | [
"Common.send",
"os.path.getsize",
"os.path.exists",
"Common.get_item_config_path",
"os.makedirs",
"robot.libraries.DateTime.convert_time",
"re.match",
"robot.libraries.BuiltIn.BuiltIn",
"Common.get_result_path",
"shutil.rmtree"
] | [((2834, 2870), 'Common.send', 'Common.send', (['avaproxy', '"""ava::logout"""'], {}), "(avaproxy, 'ava::logout')\n", (2845, 2870), False, 'import Common\n'), ((3365, 3430), 'Common.send', 'Common.send', (['avaproxy', "('ava::send_file/%d/config.spf' % file_size)"], {}), "(avaproxy, 'ava::send_file/%d/config.spf' % fil... |
# %%
import pandas as pd
from fdevices.hantek.configs import *
from fdevices.hantek.constants import *
from fdevices.hantek.helpers import *
# MDSplus PyDevice Description
from fdevices.hantek.scopes import HT6000SCOPE
## Client script to Hanteck Scope ##
"""
THis code below forms a basic test client to connect to a... | [
"fdevices.hantek.scopes.HT6000SCOPE",
"pandas.read_csv"
] | [((3850, 3873), 'fdevices.hantek.scopes.HT6000SCOPE', 'HT6000SCOPE', ([], {}), '(**defaults)\n', (3861, 3873), False, 'from fdevices.hantek.scopes import HT6000SCOPE\n'), ((4981, 5022), 'pandas.read_csv', 'pd.read_csv', (['"""test_data.csv"""'], {'header': 'None'}), "('test_data.csv', header=None)\n", (4992, 5022), Tru... |
from tempfile import TemporaryDirectory
import os
from docutils.parsers.rst import Directive, directives
from docutils.nodes import raw
from pygments.lexers import get_lexer_by_name, guess_lexer
from pygments.styles import get_all_styles, get_style_by_name
from pygments.formatters import HtmlFormatter
from pygments.u... | [
"tempfile.TemporaryDirectory",
"pygments.highlight",
"os.path.join",
"pygments.formatters.HtmlFormatter",
"pygments.styles.get_style_by_name",
"docutils.nodes.raw",
"os.path.dirname",
"pygments.lexers.guess_lexer",
"pygments.styles.get_all_styles",
"pygments.lexers.get_lexer_by_name"
] | [((3581, 3615), 'pygments.styles.get_style_by_name', 'get_style_by_name', (['self.theme_name'], {}), '(self.theme_name)\n', (3598, 3615), False, 'from pygments.styles import get_all_styles, get_style_by_name\n'), ((3962, 3988), 'pygments.formatters.HtmlFormatter', 'HtmlFormatter', ([], {'style': 'Style'}), '(style=Styl... |
from unittest import mock
import pytest
from atlas.modules.transformer.base.models import ResourceFieldMap
from atlas.modules.transformer.artillery.models import Task, constants
from atlas.modules.transformer import interface
class TestTask:
@pytest.fixture(scope='class')
def open_api(self):
return... | [
"unittest.mock.MagicMock",
"unittest.mock.call",
"atlas.modules.transformer.interface.OpenAPITaskInterface",
"atlas.modules.transformer.base.models.ResourceFieldMap",
"unittest.mock.PropertyMock",
"pytest.fixture",
"atlas.modules.transformer.artillery.models.Task"
] | [((252, 281), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""class"""'}), "(scope='class')\n", (266, 281), False, 'import pytest\n'), ((321, 353), 'atlas.modules.transformer.interface.OpenAPITaskInterface', 'interface.OpenAPITaskInterface', ([], {}), '()\n', (351, 353), False, 'from atlas.modules.transformer im... |
from typing import Dict, Optional, List
import requests
from requests.exceptions import ConnectionError as requestConnectionError
from dataset_tools import QuestionCase
from entity_linking.base_entitity_linking_system import EntityLinkingSystem, EntityLinkingDict
from mapping.mapper import MapEntitiesDBpediaToWikidat... | [
"mapping.mapper.MapEntitiesDBpediaToWikidata"
] | [((1052, 1103), 'mapping.mapper.MapEntitiesDBpediaToWikidata', 'MapEntitiesDBpediaToWikidata', (['WIKIDATA_ENDPOINT_URL'], {}), '(WIKIDATA_ENDPOINT_URL)\n', (1080, 1103), False, 'from mapping.mapper import MapEntitiesDBpediaToWikidata\n')] |
"""Modules for find which files should be linted."""
import os
import sys
from typing import Tuple, List, Dict, Callable
from git import Repo
import structlog
# Get relative imports to work when the package is not installed on the PYTHONPATH.
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.p... | [
"structlog.get_logger",
"os.path.exists",
"buildscripts.linter.git.get_module_paths",
"buildscripts.patch_builds.change_data.find_changed_files_in_repos",
"os.environ.get",
"os.path.realpath",
"git.Repo"
] | [((642, 672), 'structlog.get_logger', 'structlog.get_logger', (['__name__'], {}), '(__name__)\n', (662, 672), False, 'import structlog\n'), ((911, 933), 'buildscripts.linter.git.get_module_paths', 'git.get_module_paths', ([], {}), '()\n', (931, 933), False, 'from buildscripts.linter import git\n'), ((2032, 2080), 'buil... |
# -*- coding: utf-8 -*-
import os
import configparser
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DATA_PATH = os.path.join(BASE_PATH, "data")
LOG_PATH = os.path.join(BASE_PATH, "log")
CONFIG_PATH = os.path.join(BASE_PATH, "config", "config.ini")
IMAGE_PATH = os.path.join(BASE_PATH, "image")... | [
"os.path.abspath",
"os.path.exists",
"os.path.join",
"configparser.ConfigParser"
] | [((139, 170), 'os.path.join', 'os.path.join', (['BASE_PATH', '"""data"""'], {}), "(BASE_PATH, 'data')\n", (151, 170), False, 'import os\n'), ((182, 212), 'os.path.join', 'os.path.join', (['BASE_PATH', '"""log"""'], {}), "(BASE_PATH, 'log')\n", (194, 212), False, 'import os\n'), ((227, 274), 'os.path.join', 'os.path.joi... |
# Copyright (c) 2019 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
import os
import pickle
import pytest
from tempfile import TemporaryDirectory
from signac.core.h5store import H5StoreManager
try:
import h5py # noqa
H5PY = True... | [
"tempfile.TemporaryDirectory",
"pickle.dumps",
"os.path.join",
"signac.core.h5store.H5StoreManager",
"pytest.raises",
"pytest.mark.skipif",
"pytest.fixture"
] | [((361, 430), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not H5PY)'], {'reason': '"""test requires the h5py package"""'}), "(not H5PY, reason='test requires the h5py package')\n", (379, 430), False, 'import pytest\n'), ((465, 493), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n'... |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not... | [
"numpy.mean",
"plano.error",
"plano.stop_process",
"shlex.split",
"plano.unique_id",
"time.sleep",
"resource.getpagesize",
"numpy.array",
"plano.call",
"plano.file_size",
"os.sysconf",
"plano.start_process",
"numpy.percentile",
"time.time",
"json.dump"
] | [((16552, 16575), 'resource.getpagesize', '_resource.getpagesize', ([], {}), '()\n', (16573, 16575), True, 'import resource as _resource\n'), ((16487, 16531), 'os.sysconf', '_os.sysconf', (["_os.sysconf_names['SC_CLK_TCK']"], {}), "(_os.sysconf_names['SC_CLK_TCK'])\n", (16498, 16531), True, 'import os as _os\n'), ((481... |
from ninja.ninja_syntax import Writer
import StringIO
from sys import stdout
from os.path import join, splitext, relpath, split
import os
from clyde2.common import is_c, is_cpp
from clyde2.common import pprint_color, dict_contains
from clyde2.rtems import *
# Tools to walk over the tree collecting includes
import fun... | [
"StringIO.StringIO",
"ninja.ninja_syntax.Writer",
"os.path.join",
"os.path.splitext",
"clyde2.common.is_c",
"os.getcwd",
"clyde2.common.is_cpp",
"functools.partial",
"os.path.relpath"
] | [((2570, 2589), 'os.path.relpath', 'relpath', (['path', 'root'], {}), '(path, root)\n', (2577, 2589), False, 'from os.path import join, splitext, relpath, split\n'), ((2786, 2817), 'os.path.join', 'join', (['"""prefix"""', '"""include"""', 'name'], {}), "('prefix', 'include', name)\n", (2790, 2817), False, 'from os.pat... |
import os
import gevent.monkey
gevent.monkey.patch_all()
# import logging
import multiprocessing
#debug = True
dirs="./logs"
if not os.path.exists(dirs):
os.makedirs(dirs)
# loglevel = 'info'
bind = '0.0.0.0:5000'
pidfile = r'./logs/gunicorn.pid'
accesslog = r"./logs/micro_access.log"
errorlog = r"./... | [
"os.path.exists",
"multiprocessing.cpu_count",
"os.makedirs"
] | [((141, 161), 'os.path.exists', 'os.path.exists', (['dirs'], {}), '(dirs)\n', (155, 161), False, 'import os\n'), ((168, 185), 'os.makedirs', 'os.makedirs', (['dirs'], {}), '(dirs)\n', (179, 185), False, 'import os\n'), ((358, 385), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (383, 385), ... |
import logging
import numpy as np
from numpy.linalg import norm
from scipy.stats import moment
from scipy.special import cbrt
def common_usr(molecule, ctd=None, cst=None, fct=None, ftf=None, atoms_type=None):
"""Function used in USR and USRCAT function
Parameters
----------
molecule : oddt.toolkit.M... | [
"numpy.hstack",
"numpy.column_stack",
"numpy.array",
"numpy.linalg.norm",
"numpy.mean",
"numpy.cross",
"numpy.abs",
"numpy.amin",
"numpy.nan_to_num",
"scipy.stats.moment",
"logging.warning",
"numpy.isnan",
"numpy.std",
"numpy.fabs",
"numpy.append",
"numpy.sum",
"numpy.zeros",
"nump... | [((1859, 1884), 'numpy.linalg.norm', 'norm', (['(atoms - ctd)'], {'axis': '(1)'}), '(atoms - ctd, axis=1)\n', (1863, 1884), False, 'from numpy.linalg import norm\n'), ((1970, 1995), 'numpy.linalg.norm', 'norm', (['(atoms - cst)'], {'axis': '(1)'}), '(atoms - cst, axis=1)\n', (1974, 1995), False, 'from numpy.linalg impo... |
#!/usr/bin/env python
#
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
"""A tiny web server.
This is intended to be used for testing.
"""
import BaseHTTPServer
import logging
import os
import... | [
"logging.getLogger",
"os.path.exists",
"shutil.copyfileobj",
"os.path.join",
"SimpleHTTPServer.SimpleHTTPRequestHandler.end_headers",
"os.getcwd",
"os.chdir",
"os.path.isdir",
"SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET",
"sys.exit",
"urlparse.urlsplit",
"logging.info",
"logging.error"... | [((979, 1013), 'os.path.join', 'os.path.join', (['*SAFE_DIR_COMPONENTS'], {}), '(*SAFE_DIR_COMPONENTS)\n', (991, 1013), False, 'import os\n'), ((1100, 1173), 'logging.error', 'logging.error', (['"""httpd.py should only be run from the %s"""', 'SAFE_DIR_SUFFIX'], {}), "('httpd.py should only be run from the %s', SAFE_DI... |
from starcluster.clustersetup import ClusterSetup
from starcluster.logger import log
class UCSCInstaller(ClusterSetup):
def run(self, nodes, master, user, user_shell, volumes):
for node in nodes:
log.info("Installing UCSC-Tools 287 on %s" % (node.alias))
node.ssh.execute('mkdir -p /opt/software/ucsc/287')
... | [
"starcluster.logger.log.info"
] | [((203, 259), 'starcluster.logger.log.info', 'log.info', (["('Installing UCSC-Tools 287 on %s' % node.alias)"], {}), "('Installing UCSC-Tools 287 on %s' % node.alias)\n", (211, 259), False, 'from starcluster.logger import log\n')] |
# ===============================================================================
# Copyright 2020 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICE... | [
"traits.api.Enum",
"traits.api.Str",
"traits.api.on_trait_change",
"csv.DictReader",
"traitsui.editors.EnumEditor",
"traitsui.api.View",
"traits.api.Dict",
"traitsui.api.Item",
"traits.api.Button",
"traits.api.Range",
"os.path.basename",
"csv.Sniffer",
"traits.api.Bool",
"traitsui.api.UIte... | [((2378, 2391), 'traits.api.Enum', 'Enum', (['MARKERS'], {}), '(MARKERS)\n', (2382, 2391), False, 'from traits.api import Str, Enum, Dict, File, Float, Range, List, HasTraits, Button, Int, Color, Bool, on_trait_change\n'), ((2428, 2444), 'traits.api.Range', 'Range', (['(0)', '(360)', '(0)'], {}), '(0, 360, 0)\n', (2433... |
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 10 13:24:45 2021
@author: admin
"""
import numpy as np
import struct
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import torch.nn.functional as F
import os
# 测试集文件
test_images_file = 'MNIST/t10k-images.idx3-ubyte'
# 测试集标签文件
test_labels_file = 'MNIS... | [
"struct.calcsize",
"numpy.eye",
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.load",
"torch.max",
"torch.from_numpy",
"torch.nn.Conv2d",
"torch.nn.MaxPool2d",
"numpy.empty",
"torch.nn.Linear",
"struct.unpack_from"
] | [((727, 775), 'struct.unpack_from', 'struct.unpack_from', (['fmt_header', 'bin_data', 'offset'], {}), '(fmt_header, bin_data, offset)\n', (745, 775), False, 'import struct\n'), ((932, 959), 'struct.calcsize', 'struct.calcsize', (['fmt_header'], {}), '(fmt_header)\n', (947, 959), False, 'import struct\n'), ((1253, 1295)... |
from src.managers.core.logging_manager import logging_manager
from src.utils.common_routines import quit
import pygame
class event_manager:
def __init__(self):
self.callbacks = {}
logging_manager().log.debug("Event manager initialized.")
def get_events(self):
events = pygame.event.ge... | [
"src.managers.core.logging_manager.logging_manager",
"src.utils.common_routines.quit",
"pygame.event.get"
] | [((305, 323), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (321, 323), False, 'import pygame\n'), ((447, 453), 'src.utils.common_routines.quit', 'quit', ([], {}), '()\n', (451, 453), False, 'from src.utils.common_routines import quit\n'), ((203, 220), 'src.managers.core.logging_manager.logging_manager', 'l... |
import json
import csv
# Reference the JSON file that is created using the API
filename = 'trending.json'
f = open(filename)
tiktok_data = json.load(f)
# Writing into CSV
with open('tiktok-trending.csv', 'w', newline='') as file:
writer = csv.writer(file)
writer.writerow(["TikTok ID", "User Verified", "Music... | [
"json.load",
"csv.writer"
] | [((141, 153), 'json.load', 'json.load', (['f'], {}), '(f)\n', (150, 153), False, 'import json\n'), ((246, 262), 'csv.writer', 'csv.writer', (['file'], {}), '(file)\n', (256, 262), False, 'import csv\n')] |
# coding: utf-8
"""
Finnhub API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
fro... | [
"six.iteritems",
"finnhub.configuration.Configuration"
] | [((4233, 4266), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (4246, 4266), False, 'import six\n'), ((1324, 1339), 'finnhub.configuration.Configuration', 'Configuration', ([], {}), '()\n', (1337, 1339), False, 'from finnhub.configuration import Configuration\n')] |
import dryscrape
from bs4 import BeautifulSoup
session = dryscrape.Session()
def getSoup(url):
"""Returns BeatifulSoup object of the given URL.
Arguments:
url = URL of the corresponding webpage
Returns:
BeatifulSoup object of the given URL
"""
try:
session.visit(url)
... | [
"bs4.BeautifulSoup",
"dryscrape.Session"
] | [((58, 77), 'dryscrape.Session', 'dryscrape.Session', ([], {}), '()\n', (75, 77), False, 'import dryscrape\n'), ((387, 418), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response', '"""lxml"""'], {}), "(response, 'lxml')\n", (400, 418), False, 'from bs4 import BeautifulSoup\n')] |
import math
import hydrostats as hs
import hydrostats.data as hd
import numpy as np
import pandas as pd
def solve_gumbel1(std, xbar, rp):
"""
Solves the Gumbel Type I pdf = exp(-exp(-b))
where b is the covariate
"""
# xbar = statistics.mean(year_max_flow_list)
# std = statistics.stdev(year_ma... | [
"numpy.nanpercentile",
"pandas.merge",
"math.log",
"hydrostats.make_table",
"hydrostats.data.merge_data",
"pandas.DataFrame",
"numpy.transpose"
] | [((595, 643), 'hydrostats.data.merge_data', 'hd.merge_data', ([], {'sim_df': 'simulated', 'obs_df': 'observed'}), '(sim_df=simulated, obs_df=observed)\n', (608, 643), True, 'import hydrostats.data as hd\n'), ((665, 713), 'hydrostats.data.merge_data', 'hd.merge_data', ([], {'sim_df': 'corrected', 'obs_df': 'observed'}),... |
# This file is part of the Indico plugins.
# Copyright (C) 2017 - 2021 <NAME>, <NAME>, CERN
#
# The Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License;
# see the LICENSE file for more details.
from indico.util.i18n import make_bound_gettext
_ = make_bo... | [
"indico.util.i18n.make_bound_gettext"
] | [((313, 349), 'indico.util.i18n.make_bound_gettext', 'make_bound_gettext', (['"""payment_sixpay"""'], {}), "('payment_sixpay')\n", (331, 349), False, 'from indico.util.i18n import make_bound_gettext\n')] |
from crummycm.validation.types.values.element.numeric import Numeric
A_EX_TEMP = {"my_num": Numeric(default_value=int(0), required=False, is_type=int)}
# type
A_required_EX_TEMP = {"my_num": Numeric(required=True, is_type=float)}
# # type
A_int_EX_TEMP = {"my_num": Numeric(is_type=int)}
A_float_EX_TEMP = {"my_num": Nu... | [
"crummycm.validation.types.values.element.numeric.Numeric"
] | [((192, 229), 'crummycm.validation.types.values.element.numeric.Numeric', 'Numeric', ([], {'required': '(True)', 'is_type': 'float'}), '(required=True, is_type=float)\n', (199, 229), False, 'from crummycm.validation.types.values.element.numeric import Numeric\n'), ((267, 287), 'crummycm.validation.types.values.element.... |
""" Layer service """
from geetiles.errors import LayerNotFound
from geetiles.utils.request import request_to_eacw_api
class LayerService(object):
@staticmethod
def execute(config):
response = request_to_eacw_api(config)
if not response or response.get('errors'):
raise LayerNotFo... | [
"geetiles.errors.LayerNotFound",
"geetiles.utils.request.request_to_eacw_api"
] | [((213, 240), 'geetiles.utils.request.request_to_eacw_api', 'request_to_eacw_api', (['config'], {}), '(config)\n', (232, 240), False, 'from geetiles.utils.request import request_to_eacw_api\n'), ((310, 350), 'geetiles.errors.LayerNotFound', 'LayerNotFound', ([], {'message': '"""Layer not found"""'}), "(message='Layer n... |
import ui, console
import os
import math
def save_action(sender):
with open('image_file.png', 'wb') as fp:
fp.write(img.to_png())
console.hud_alert('image saved in the file image_file.png')
def showimage_action(sender):
img.show()
def make_polygon(num_sides, x=0, y=0, radius=100, phase=0... | [
"console.hud_alert",
"ui.Image",
"ui.Path.rect",
"ui.ImageContext",
"ui.View",
"ui.Path",
"math.cos",
"ui.ImageView",
"ui.ButtonItem",
"ui.set_color",
"math.sin"
] | [((1164, 1195), 'ui.View', 'ui.View', ([], {'frame': '(0, 0, 500, 500)'}), '(frame=(0, 0, 500, 500))\n', (1171, 1195), False, 'import ui, console\n'), ((1203, 1239), 'ui.ImageView', 'ui.ImageView', ([], {'frame': '(0, 0, 500, 500)'}), '(frame=(0, 0, 500, 500))\n', (1215, 1239), False, 'import ui, console\n'), ((1302, 1... |
#!/usr/bin/env python3
"""
This script demonstrates how to use the MAML implementation of L2L.
Each task i consists of learning the parameters of a Normal distribution N(mu_i, sigma_i).
The parameters mu_i, sigma_i are themselves sampled from a distribution N(mu, sigma).
"""
import torch as th
from torch import nn, ... | [
"learn2learn.algorithms.MAML",
"torch.distributions.Normal",
"torch.zeros",
"torch.randn",
"torch.ones"
] | [((796, 831), 'learn2learn.algorithms.MAML', 'l2l.algorithms.MAML', (['model'], {'lr': '(0.01)'}), '(model, lr=0.01)\n', (815, 831), True, 'import learn2learn as l2l\n'), ((653, 685), 'torch.distributions.Normal', 'dist.Normal', (['self.mu', 'self.sigma'], {}), '(self.mu, self.sigma)\n', (664, 685), True, 'from torch i... |
import os
from distutils.util import strtobool
from dotenv import load_dotenv
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
dotenv_file = os.path.join(os.path.dirname(BASE_DIR), ".env")
if os.path.isfile(dotenv_file):
load_dotenv(dotenv_file)
SECRET_KEY = os.environ.get(
'SECRET_KEY'... | [
"os.getenv",
"os.environ.get",
"os.path.join",
"dotenv.load_dotenv",
"os.path.isfile",
"os.path.dirname",
"os.path.abspath"
] | [((217, 244), 'os.path.isfile', 'os.path.isfile', (['dotenv_file'], {}), '(dotenv_file)\n', (231, 244), False, 'import os\n'), ((288, 360), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {'default': '"""SUPffw-HeoKL3-K3Y-F0R-MY-PR0J3CT"""'}), "('SECRET_KEY', default='SUPffw-HeoKL3-K3Y-F0R-MY-PR0J3CT')\n", (... |
import numpy as np
from typing import Callable
from ..problem import Problem
def rescale(points, lb: np.ndarray, ub: np.ndarray) -> np.ndarray:
"""
Rescale points from [0, 1] to [lb, ub].
Parameters
----------
points: ndarray, shape=(n_starts, dim)
Points in bounds [lb, ub]
lb, ub: n... | [
"numpy.argsort",
"numpy.zeros",
"numpy.zeros_like",
"numpy.empty"
] | [((1425, 1450), 'numpy.zeros', 'np.zeros', (['(n_starts, dim)'], {}), '((n_starts, dim))\n', (1433, 1450), True, 'import numpy as np\n'), ((2119, 2145), 'numpy.zeros_like', 'np.zeros_like', (['startpoints'], {}), '(startpoints)\n', (2132, 2145), True, 'import numpy as np\n'), ((2243, 2264), 'numpy.empty', 'np.empty', (... |
from datetime import date
atual = date.today().year
nome = str(input('Digite seu nome: ')).strip().title().split()
ano_Nasc = int(input(f'{nome[0]}, digite o ano de seu nascimento com 4 dígitos (0000): '))
if ano_Nasc < 100: # caso o usuário digite o ano com apenas dois dígitos (se vc conhecer um jeito mais fácil de fa... | [
"datetime.date.today"
] | [((34, 46), 'datetime.date.today', 'date.today', ([], {}), '()\n', (44, 46), False, 'from datetime import date\n')] |
from math import log, ceil
minRange = 273025
maxRange = 767253
def hasRepeat(number):
lastDigit = None
for digit in str(number):
if (lastDigit is not None and digit == lastDigit):
return True
lastDigit = digit
return False
def alwaysIncreasing(number):
lastDigit = Non... | [
"math.log"
] | [((519, 534), 'math.log', 'log', (['number', '(10)'], {}), '(number, 10)\n', (522, 534), False, 'from math import log, ceil\n')] |
import os
from . import PyScoreDraft
ScoreDraftPath_old= os.path.dirname(__file__)
ScoreDraftPath=""
#\\escaping fix
for ch in ScoreDraftPath_old:
if ch=="\\":
ScoreDraftPath+="/"
else:
ScoreDraftPath+=ch
if os.name == 'nt':
os.environ["PATH"]+=";"+ScoreDraftPath
elif os.name == "posix":
os.environ["PATH"]+="... | [
"os.path.isfile",
"os.path.dirname",
"os.path.isdir",
"os.listdir"
] | [((58, 83), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (73, 83), False, 'import os\n'), ((1811, 1842), 'os.path.isdir', 'os.path.isdir', (['PERC_SAMPLE_ROOT'], {}), '(PERC_SAMPLE_ROOT)\n', (1824, 1842), False, 'import os\n'), ((2395, 2427), 'os.path.isdir', 'os.path.isdir', (['INSTR_SAMPL... |
import unittest
from aws_ec2 import EC2Wrapper
from aws_ec2 import SubnetNotFoundError
from aws_ec2 import ImageNotFoundError
class TestEC2Wrapper(unittest.TestCase):
def setUp(self):
self.wrapper = EC2Wrapper()
def tearDown(self):
super().tearDown()
def test_find_by_name(self):
... | [
"unittest.main",
"aws_ec2.EC2Wrapper"
] | [((4023, 4038), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4036, 4038), False, 'import unittest\n'), ((215, 227), 'aws_ec2.EC2Wrapper', 'EC2Wrapper', ([], {}), '()\n', (225, 227), False, 'from aws_ec2 import EC2Wrapper\n')] |
import logging
import numpy as np
from luigi.util import requires
from netCDF4 import Dataset, Group, Variable
from iasi.file import CopyNetcdfFile, MoveVariables
from iasi.quadrant import Quadrant
from iasi.util import root_group_of
logger = logging.getLogger(__name__)
class CompositionException(Exception):
... | [
"logging.getLogger",
"numpy.ma.is_masked",
"iasi.quadrant.Quadrant.for_disassembly",
"iasi.util.root_group_of"
] | [((247, 274), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (264, 274), False, 'import logging\n'), ((964, 989), 'iasi.util.root_group_of', 'root_group_of', (['self.group'], {}), '(self.group)\n', (977, 989), False, 'from iasi.util import root_group_of\n'), ((1727, 1790), 'iasi.quadrant.... |
import os
import shutil
import subprocess
import pandas as pd
from oemof.tools.logger import define_logging
from pandas.testing import assert_frame_equal
import yaml
def setup_logging(log_path):
define_logging(logpath=log_path, logfile="oemoflex.log")
def load_yaml(file_path):
with open(file_path, "r") as ... | [
"os.listdir",
"pandas.read_csv",
"subprocess.run",
"os.path.join",
"os.path.split",
"oemof.tools.logger.define_logging",
"yaml.safe_load",
"os.path.commonpath",
"shutil.rmtree",
"pandas.DataFrame",
"pandas.testing.assert_frame_equal",
"pandas.concat",
"os.walk",
"os.path.relpath"
] | [((202, 258), 'oemof.tools.logger.define_logging', 'define_logging', ([], {'logpath': 'log_path', 'logfile': '"""oemoflex.log"""'}), "(logpath=log_path, logfile='oemoflex.log')\n", (216, 258), False, 'from oemof.tools.logger import define_logging\n'), ((690, 803), 'pandas.read_csv', 'pd.read_csv', (['filepath'], {'head... |
import torch
from .. import utils
MODULE = torch
FP16_FUNCS = [
# Low level functions wrapped by torch.nn layers.
# The wrapper layers contain the weights which are then passed in as a parameter
# to these functions.
'conv1d',
'conv2d',
'conv3d',
'conv_transpose1d',
'conv_transpose2d'... | [
"torch.__version__.split"
] | [((861, 889), 'torch.__version__.split', 'torch.__version__.split', (['"""."""'], {}), "('.')\n", (884, 889), False, 'import torch\n')] |
import copy
from reportlab.platypus import Table, TableStyle, SimpleDocTemplate, Paragraph, Spacer, Image,ListFlowable, ListItem, PageBreak
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.units import cm
from reportlab.lib.enums import TA_LEFT, TA_RIGHT, TA_CENTER, TA_JUSTIFY
fr... | [
"copy.copy",
"reportlab.lib.styles.getSampleStyleSheet",
"reportlab.lib.styles.ParagraphStyle"
] | [((908, 929), 'reportlab.lib.styles.getSampleStyleSheet', 'getSampleStyleSheet', ([], {}), '()\n', (927, 929), False, 'from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle\n'), ((2956, 2985), 'reportlab.lib.styles.ParagraphStyle', 'ParagraphStyle', (['"""Sub-Heading"""'], {}), "('Sub-Heading')\n", (2970... |
"""
Funciona para las semanas 21 a 29 de los reportes del 2020
"""
from .BaseClasses import ProcessFileWF
from openpyxl import load_workbook
from openpyxl.styles import PatternFill
import re
from openpyxl.styles.colors import Color
from copy import copy
from pathlib import Path
class main(ProcessFileWF):
def __ini... | [
"openpyxl.load_workbook",
"copy.copy",
"re.fullmatch",
"pathlib.Path"
] | [((4872, 4926), 'openpyxl.load_workbook', 'load_workbook', ([], {'filename': 'self.input_file', 'keep_vba': '(True)'}), '(filename=self.input_file, keep_vba=True)\n', (4885, 4926), False, 'from openpyxl import load_workbook\n'), ((5045, 5100), 'openpyxl.load_workbook', 'load_workbook', ([], {'filename': 'self.output_fi... |
# Author: <NAME> <<EMAIL>>
# License: BSD 3 clause
import warnings
import libsvmdata
def fetch_libsvm(dataset, replace=False, normalize=True, min_nnz=3):
"""
This function is deprecated, we now rely on the libsvmdata package.
Parameters
----------
dataset: string
Name of the dataset.
... | [
"warnings.simplefilter",
"warnings.warn",
"libsvmdata.fetch_libsvm"
] | [((556, 602), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""', 'FutureWarning'], {}), "('always', FutureWarning)\n", (577, 602), False, 'import warnings\n'), ((607, 775), 'warnings.warn', 'warnings.warn', (['"""celer.datasets.fetch_libsvm is deprecated and will be removed in version 0.6. Use the ligh... |
# write a python program to multiply three numbers
num1 = 1.5
num2 = 6.3
num3 = -2.3
product = num1 * num2 * num3
print(f'Product: {product}')
# write a python function that when given two numbers, would divide the first number by second number and return the quotient and remainder
def divide_first_number_by_second(n... | [
"math.sin"
] | [((15314, 15329), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (15322, 15329), False, 'import math\n')] |
"""
@author: <NAME>, University of Washington, Seattle, July 2019
@email: dflemin3 (at) uw (dot) edu
This script produces initial conditions for a synthetic population of ultracool
dwarfs to examine LXUV/Lbol as a function of time. All initial conditions are
sampled from the prior distributions used to constrain the ... | [
"pandas.DataFrame",
"os.path.join",
"pandas.read_csv"
] | [((1064, 1125), 'pandas.read_csv', 'pd.read_csv', (['"""mcInitialConditions.csv"""'], {'index_col': '(0)', 'header': '(0)'}), "('mcInitialConditions.csv', index_col=0, header=0)\n", (1075, 1125), True, 'import pandas as pd\n'), ((1209, 1346), 'pandas.DataFrame', 'pd.DataFrame', (["{'dLbolAge': lum, 'dLXUVAge': lumXUV, ... |
import os
import cv2
import sys
import math
import pyprind
import torch
import numpy as np
import torch.tensor as Tensor
# import torch.nn.functional as F
import torchvision.transforms as transforms
import dl_modules.dataset as ds
epsilon = 0.008
def enhance_images(folder: str, denoise_strength: int,
... | [
"cv2.calcHist",
"pyprind.ProgBar",
"torch.abs",
"os.makedirs",
"numpy.ones",
"numpy.random.rand",
"torch.stack",
"cv2.filter2D",
"cv2.addWeighted",
"os.path.isdir",
"dl_modules.dataset.Dataset",
"cv2.cvtColor",
"torch.utils.data.DataLoader",
"torch.no_grad",
"torchvision.transforms.ToTen... | [((624, 703), 'dl_modules.dataset.Dataset', 'ds.Dataset', (['folder'], {'scale': 'ds.scale', 'normalization': 'transform', 'downscaling': '"""none"""'}), "(folder, scale=ds.scale, normalization=transform, downscaling='none')\n", (634, 703), True, 'import dl_modules.dataset as ds\n'), ((742, 844), 'torch.utils.data.Data... |
# pylint: disable=redefined-outer-name,protected-access
# pylint: disable=missing-function-docstring,missing-module-docstring,missing-class-docstring
import pytest
from awesome_panel_extensions.site import Site
@pytest.fixture
def site():
return Site(name="awesome-panel.org")
def test_site(site, au... | [
"awesome_panel_extensions.site.Site"
] | [((262, 292), 'awesome_panel_extensions.site.Site', 'Site', ([], {'name': '"""awesome-panel.org"""'}), "(name='awesome-panel.org')\n", (266, 292), False, 'from awesome_panel_extensions.site import Site\n')] |
from typing import Tuple, List
import argparse
import random
from pathlib import Path
from itertools import chain
from functools import reduce
import cv2
import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
from tensorflow.keras.utils import Sequence
from tensorflow.keras import optimizers as op... | [
"numpy.random.rand",
"tensorflow.keras.layers.BatchNormalization",
"tensorflow.keras.layers.average",
"tensorflow.keras.layers.Input",
"tensorflow.keras.layers.Conv2D",
"argparse.ArgumentParser",
"tensorflow.keras.datasets.cifar10.load_data",
"numpy.empty",
"numpy.random.seed",
"tensorflow.keras.m... | [((477, 494), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (488, 494), False, 'import random\n'), ((499, 519), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (513, 519), True, 'import numpy as np\n'), ((8822, 8885), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'descriptio... |
import cv2
import json
import numpy as np
from rich import print
from PIL import ImageFile
import torch
from torchvision import transforms
from torch.utils.data import Dataset, DataLoader
from constants import NORM_MEAN, NORM_STD, DPAC_AGE_LABEL_TO_IDX, DPAC_GENDER_LABEL_TO_IDX, \
DPAC_EMOTION_LABEL_TO_IDX, IMG_H... | [
"torch.manual_seed",
"numpy.reshape",
"torchvision.transforms.ToPILImage",
"torchvision.transforms.RandomHorizontalFlip",
"torch.tensor",
"torchvision.transforms.ColorJitter",
"torchvision.transforms.Normalize",
"torch.utils.data.DataLoader",
"json.load",
"cv2.resize",
"torchvision.transforms.To... | [((447, 467), 'torch.manual_seed', 'torch.manual_seed', (['(0)'], {}), '(0)\n', (464, 467), False, 'import torch\n'), ((6901, 7003), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'pin_memory': '(True)', 'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': 'NUM_WORKERS'}), '(dataset, pin_memory=... |
import sqlite3
from prettytable import PrettyTable
def show_category():
conn = sqlite3.connect("task.db")
cur = conn.cursor()
slct_data = "select distinct category from todo where 1 order by category asc"
cur.execute(slct_data)
records = cur.fetchall()
x = PrettyTable()
x.field_names = ["category"]
for row... | [
"prettytable.PrettyTable",
"sqlite3.connect"
] | [((81, 107), 'sqlite3.connect', 'sqlite3.connect', (['"""task.db"""'], {}), "('task.db')\n", (96, 107), False, 'import sqlite3\n'), ((266, 279), 'prettytable.PrettyTable', 'PrettyTable', ([], {}), '()\n', (277, 279), False, 'from prettytable import PrettyTable\n')] |
#!/usr/bin/env python
from __future__ import division, absolute_import, print_function
__author__ = '<NAME>'
from setuptools import setup, find_packages,Extension
from setuptools.command.install import install
#from distutils.extension import Extension
import distutils.command.install as orig
from distutils.comm... | [
"os.getenv",
"setuptools.command.install.install.run",
"setuptools.setup",
"setuptools.Extension",
"shutil.rmtree",
"json.load",
"distutils.command.build.build.run",
"distutils.sysconfig.get_python_lib",
"glob.glob",
"os.remove"
] | [((2718, 2832), 'setuptools.Extension', 'Extension', (['"""jetkernel/_jetkernel"""'], {'sources': 'src_files', 'swig_opts': "['-v']", 'include_dirs': "['jetkernel_src/include']"}), "('jetkernel/_jetkernel', sources=src_files, swig_opts=['-v'],\n include_dirs=['jetkernel_src/include'])\n", (2727, 2832), False, 'from ... |
################################################################################
# @file pyMeshVtk.py
# @author <NAME>
# @brief
# @version 1.0.0
# @date 2022-02-22
# @copyright Copyright (c) 2022 by <NAME>.
# This work is licensed under terms of the MIT license (<LICENSE>).
############################################... | [
"logging.getLogger",
"logging.StreamHandler",
"argparse.ArgumentParser",
"logging.Formatter",
"h5py.File",
"pymeshfv3d.LessThanFilter",
"pymeshfv3d.set_vtk_celldata",
"pymeshfv3d.write_vtk_grid",
"pymeshfv3d.generate_vtk_grid"
] | [((628, 668), 'logging.StreamHandler', 'logging.StreamHandler', ([], {'stream': 'sys.stderr'}), '(stream=sys.stderr)\n', (649, 668), False, 'import logging\n'), ((851, 891), 'logging.StreamHandler', 'logging.StreamHandler', ([], {'stream': 'sys.stdout'}), '(stream=sys.stdout)\n', (872, 891), False, 'import logging\n'),... |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'Encoder_Control_GUI_ONLY.ui'
#
# Created by: PyQt5 UI code generator 5.15.2
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 im... | [
"PyQt5.QtGui.QIcon",
"PyQt5.QtWidgets.QSpinBox",
"PyQt5.QtWidgets.QApplication",
"PyQt5.QtWidgets.QSizePolicy",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QTextEdit",
"PyQt5.QtWidgets.QStatusBar",
"PyQt5.QtWidgets.QGroupBox",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.Q... | [((11979, 12011), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (12001, 12011), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12025, 12048), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (12046, 12048), False, 'from PyQt5 import QtC... |
import re
import uuid
from subprocess import run
from tempfile import NamedTemporaryFile
from typing import List, Optional
import conda_pack
import yaml
from ...utils import logger
from ..constants import MLServerEnvDeps, MLServerRuntimeEnvDeps
from ..metadata import ModelFramework
def _get_env(conda_env_file_path:... | [
"re.split",
"yaml.safe_dump",
"re.compile",
"subprocess.run",
"uuid.uuid4",
"yaml.safe_load",
"tempfile.NamedTemporaryFile",
"conda_pack.pack"
] | [((1651, 1704), 'subprocess.run', 'run', (['cmd'], {'shell': '(True)', 'check': '(True)', 'capture_output': '(True)'}), '(cmd, shell=True, check=True, capture_output=True)\n', (1654, 1704), False, 'from subprocess import run\n'), ((1716, 1743), 'yaml.safe_load', 'yaml.safe_load', (['proc.stdout'], {}), '(proc.stdout)\n... |
import torch
from copy import deepcopy
import numpy as np
from .torch_triggered_dataset import TorchTriggeredDataset
from .dataset_preprocessor import datasetPreprocessor
class SCDatasetPreprocessor(datasetPreprocessor):
def __init__(self, dataset, trigger, trigger_models, tokenizer):
super().__init__(dat... | [
"numpy.unique",
"torch.stack",
"torch.exp",
"numpy.isin",
"numpy.array",
"torch.tensor",
"numpy.argwhere",
"torch.sum",
"copy.deepcopy",
"torch.no_grad",
"torch.zeros_like",
"torch.cat"
] | [((5558, 5573), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5571, 5573), False, 'import torch\n'), ((5656, 5671), 'copy.deepcopy', 'deepcopy', (['batch'], {}), '(batch)\n', (5664, 5671), False, 'from copy import deepcopy\n'), ((6169, 6195), 'torch.stack', 'torch.stack', (['probabilities'], {}), '(probabilities... |
import logging
from farm.modeling.tokenization import Tokenizer, tokenize_with_metadata, truncate_sequences
from transformers import BertTokenizer, RobertaTokenizer, XLNetTokenizer
import re
def test_basic_loading(caplog):
tokenizer = Tokenizer.load(
pretrained_model_name_or_path="bert-base-cased",
... | [
"farm.modeling.tokenization.truncate_sequences",
"farm.modeling.tokenization.Tokenizer.load",
"farm.modeling.tokenization.tokenize_with_metadata",
"re.sub"
] | [((242, 329), 'farm.modeling.tokenization.Tokenizer.load', 'Tokenizer.load', ([], {'pretrained_model_name_or_path': '"""bert-base-cased"""', 'do_lower_case': '(True)'}), "(pretrained_model_name_or_path='bert-base-cased',\n do_lower_case=True)\n", (256, 329), False, 'from farm.modeling.tokenization import Tokenizer, ... |
import cv2
import time
import sys
import numpy as np
class ObjectDetection():
def __init__(self):
self.INPUT_WIDTH = 640
self.INPUT_HEIGHT = 640
self.SCORE_THRESHOLD = 0.2
self.NMS_THRESHOLD = 0.4
self.CONFIDENCE_THRESHOLD = 0.4
self.class_list = self.load_classes()... | [
"cv2.dnn.blobFromImage",
"cv2.rectangle",
"cv2.putText",
"time.time_ns",
"cv2.minMaxLoc",
"numpy.zeros",
"numpy.array",
"cv2.VideoCapture",
"cv2.dnn.NMSBoxes",
"cv2.dnn.readNet"
] | [((462, 476), 'time.time_ns', 'time.time_ns', ([], {}), '()\n', (474, 476), False, 'import time\n'), ((606, 654), 'cv2.VideoCapture', 'cv2.VideoCapture', (['self.cap_device', 'cv2.CAP_DSHOW'], {}), '(self.cap_device, cv2.CAP_DSHOW)\n', (622, 654), False, 'import cv2\n'), ((829, 947), 'cv2.dnn.readNet', 'cv2.dnn.readNet... |
from menpodetect.pico import load_pico_frontal_face_detector
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
pico_detector = load_pico_frontal_face_detector()
pcs = pico_detector(takeo_copy)
assert len(pcs) == 1
asser... | [
"menpodetect.pico.load_pico_frontal_face_detector",
"menpo.io.import_builtin_asset.takeo_ppm"
] | [((93, 129), 'menpo.io.import_builtin_asset.takeo_ppm', 'mio.import_builtin_asset.takeo_ppm', ([], {}), '()\n', (127, 129), True, 'import menpo.io as mio\n'), ((216, 249), 'menpodetect.pico.load_pico_frontal_face_detector', 'load_pico_frontal_face_detector', ([], {}), '()\n', (247, 249), False, 'from menpodetect.pico i... |
from os import listdir, environ
from os.path import isfile, join
from discord.ext import commands
from logging import StreamHandler
from dotenv import dotenv_values
import discord.ext.commands.view
import discord
import sys
import traceback
import logging
# load config from dot env file
config = None
if "PYCHARM_HOST... | [
"logging.getLogger",
"logging.basicConfig",
"logging.StreamHandler",
"os.listdir",
"discord.ext.commands.Bot",
"os.path.join",
"traceback.print_exc",
"dotenv.dotenv_values"
] | [((1002, 1029), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1019, 1029), False, 'import logging\n'), ((1516, 1601), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': "config['prefix']", 'description': "config['description']"}), "(command_prefix=config['prefix'], desc... |
"""Реализация разделов сайта для работы с пользователями."""
import os
from flask import Blueprint, flash, redirect, render_template, request, url_for
from flask_login import current_user, login_required, login_user, logout_user
from werkzeug.urls import url_parse
from webapp.account.models import Account
from webapp... | [
"flask.render_template",
"flask.request.args.get",
"flask.flash",
"webapp.user.forms.RegistrationForm",
"werkzeug.urls.url_parse",
"flask_login.login_user",
"flask_login.logout_user",
"os.path.join",
"webapp.user.models.User",
"flask.url_for",
"flask.redirect",
"webapp.db.db.session.commit",
... | [((595, 643), 'flask.Blueprint', 'Blueprint', (['"""user"""', '__name__'], {'url_prefix': '"""/users"""'}), "('user', __name__, url_prefix='/users')\n", (604, 643), False, 'from flask import Blueprint, flash, redirect, render_template, request, url_for\n'), ((938, 949), 'webapp.user.forms.LoginForm', 'LoginForm', ([], ... |
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may c... | [
"oci.util.formatted_flat_dict",
"oci.util.value_allowed_none_or_none_sentinel"
] | [((12115, 12140), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (12134, 12140), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n'), ((10817, 10885), 'oci.util.value_allowed_none_or_none_sentinel', 'value_allowed_none_or_none_sent... |
from django.db import models
from PIL import Image
class Animal(models.Model):
category = models.CharField(max_length=30)
gender = models.CharField(max_length=20, blank=True)
picture = models.ImageField(upload_to='animals/photos/')
adopted = models.BooleanField(default=False)
def __str__(self):
... | [
"django.db.models.ImageField",
"PIL.Image.open",
"django.db.models.CharField",
"django.db.models.BooleanField"
] | [((96, 127), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (112, 127), False, 'from django.db import models\n'), ((141, 184), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'blank': '(True)'}), '(max_length=20, blank=True)\n', (157, 1... |
# ##### BEGIN MIT LICENSE BLOCK #####
#
# MIT License
#
# Copyright (c) 2021 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rig... | [
"bpy.ops.export_jma.export"
] | [((2338, 2365), 'bpy.ops.export_jma.export', 'bpy.ops.export_jma.export', ([], {}), '()\n', (2363, 2365), False, 'import bpy\n')] |
from __future__ import print_function
from builtins import zip
from builtins import str
from builtins import range
from past.builtins import basestring
from builtins import object
import os
import collections
import numpy as np
import pandas as pd
import datetime
from abc import ABCMeta
from osgeo import gdal, ogr, osr... | [
"future.utils.with_metaclass",
"osgeo.ogr.CreateGeometryFromWkb",
"builtins.str",
"osgeo.gdal.AllRegister",
"builtins.range",
"osgeo.ogr.UseExceptions",
"osgeo.ogr.GetDriverCount",
"girs.feat.geom.is_topology_2d",
"osgeo.osr.CoordinateTransformation",
"os.path.exists",
"osgeo.ogr.CreateGeometryF... | [((475, 494), 'osgeo.ogr.UseExceptions', 'ogr.UseExceptions', ([], {}), '()\n', (492, 494), False, 'from osgeo import gdal, ogr, osr\n'), ((17133, 17164), 'future.utils.with_metaclass', 'with_metaclass', (['ABCMeta', 'object'], {}), '(ABCMeta, object)\n', (17147, 17164), False, 'from future.utils import with_metaclass\... |
from unittesting import DeferrableTestCase
from GitSavvy.tests.parameterized import parameterized as p
from GitSavvy.core.commands.log_graph import describe_graph_line
examples = [
(
"|",
{},
None
),
(
"● a3062b2 (HEAD -> optimize-graph-render, origin/optimize-graph-render... | [
"GitSavvy.tests.parameterized.parameterized.expand",
"GitSavvy.core.commands.log_graph.describe_graph_line"
] | [((2030, 2048), 'GitSavvy.tests.parameterized.parameterized.expand', 'p.expand', (['examples'], {}), '(examples)\n', (2038, 2048), True, 'from GitSavvy.tests.parameterized import parameterized as p\n'), ((2133, 2173), 'GitSavvy.core.commands.log_graph.describe_graph_line', 'describe_graph_line', (['input_line', 'remote... |
"""
This module implements features related to parsing the actual SAML response data
and pulling specific pieces of information from the contents of the response document.
In large part, the functionality builds on the python3-saml package produced by OneLogin.
"""
import re
from onelogin.saml2.utils import OneLogin... | [
"saml_reader.saml.errors.IsASamlRequest",
"saml_reader.saml.oli.OLISamlParser",
"saml_reader.saml.errors.DataTypeInvalid",
"re.compile",
"onelogin.saml2.utils.OneLogin_Saml2_Utils.b64decode",
"saml_reader.saml.errors.SamlResponseEncryptedError",
"lxml.etree.tostring",
"re.findall",
"urllib.parse.unq... | [((1098, 1121), 'saml_reader.saml.oli.OLISamlParser', 'OLISamlParser', (['response'], {}), '(response)\n', (1111, 1121), False, 'from saml_reader.saml.oli import OLISamlParser\n'), ((4959, 4993), 're.findall', 're.findall', (['"""(?i)sha(1|256)$"""', 'uri'], {}), "('(?i)sha(1|256)$', uri)\n", (4969, 4993), False, 'impo... |
#!/usr/bin/env python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applic... | [
"soc.views.helper.decorators.view",
"soc.modules.ghop.views.helper.access.GHOPChecker",
"soc.logic.dicts.merge"
] | [((3321, 3355), 'soc.views.helper.decorators.view', 'decorators.view', (['view.acceptInvite'], {}), '(view.acceptInvite)\n', (3336, 3355), False, 'from soc.views.helper import decorators\n'), ((3364, 3391), 'soc.views.helper.decorators.view', 'decorators.view', (['view.admin'], {}), '(view.admin)\n', (3379, 3391), Fals... |
# <NAME> 2014-2020
# mlxtend Machine Learning Library Extensions
# Author: <NAME> <<EMAIL>>
#
# License: BSD 3 clause
import pytest
import numpy as np
from mlxtend.externals.estimator_checks import NotFittedError
from mlxtend.utils import assert_raises
from mlxtend.regressor import StackingRegressor
from sklearn.linea... | [
"sklearn.model_selection.GridSearchCV",
"numpy.random.rand",
"sklearn.linear_model.Lasso",
"numpy.array",
"mlxtend.utils.assert_raises",
"numpy.sin",
"sklearn.ensemble.RandomForestRegressor",
"numpy.random.random",
"numpy.testing.assert_almost_equal",
"numpy.random.seed",
"scipy.sparse.csr_matri... | [((745, 762), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (759, 762), True, 'import numpy as np\n'), ((927, 937), 'numpy.sin', 'np.sin', (['X2'], {}), '(X2)\n', (933, 937), True, 'import numpy as np\n'), ((942, 962), 'numpy.random.random', 'np.random.random', (['(40)'], {}), '(40)\n', (958, 962), Tru... |
"""
Provides utilities for working with image files.
"""
import logging, imghdr
try:
import Image as PIL
except ImportError:
try:
from PIL import Image as PIL
except:
PIL = None
log = logging.getLogger(__name__)
def image_type( filename, image=None ):
format = ''
if PIL is not Non... | [
"logging.getLogger",
"PIL.Image.open",
"imghdr.what"
] | [((214, 241), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (231, 241), False, 'import logging, imghdr\n'), ((598, 619), 'imghdr.what', 'imghdr.what', (['filename'], {}), '(filename)\n', (609, 619), False, 'import logging, imghdr\n'), ((439, 457), 'PIL.Image.open', 'PIL.open', (['filenam... |
""" Features selection
- select_features (func) : features selection following method
"""
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
import pandas as pd
import numpy as np
class FeatSelector(object):
"""features selection following method
- pca : use pca to redu... | [
"sklearn.decomposition.PCA",
"numpy.cumsum",
"sklearn.preprocessing.StandardScaler"
] | [((6398, 6403), 'sklearn.decomposition.PCA', 'PCA', ([], {}), '()\n', (6401, 6403), False, 'from sklearn.decomposition import PCA\n'), ((2112, 2117), 'sklearn.decomposition.PCA', 'PCA', ([], {}), '()\n', (2115, 2117), False, 'from sklearn.decomposition import PCA\n'), ((6251, 6267), 'sklearn.preprocessing.StandardScale... |
import fastapi
import sql
import schemas
import datetime
import uuid
from fastapi.security import OAuth2PasswordBearer
import asyncio
import logging
import ignition
loop = asyncio.get_event_loop()
server = ignition.Server(10, ignition.get_logger(__name__, logging.INFO, stdout=True), loop=loop)
router = fastapi.APIRo... | [
"fastapi.security.OAuth2PasswordBearer",
"fastapi.HTTPException",
"ignition.get_logger",
"schemas.process.ProcessResponse",
"sql.crud.Token",
"fastapi.APIRouter",
"sql.database.Session",
"datetime.datetime.now",
"sql.crud.Snippet",
"asyncio.get_event_loop",
"fastapi.Depends"
] | [((174, 198), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (196, 198), False, 'import asyncio\n'), ((307, 343), 'fastapi.APIRouter', 'fastapi.APIRouter', ([], {'tags': "['Snippets']"}), "(tags=['Snippets'])\n", (324, 343), False, 'import fastapi\n'), ((351, 381), 'fastapi.security.OAuth2Passwor... |
import psycopg2
import pytest
import sys
from schema_migrations import MigrationController, STATUS_OK, STATUS_PENDING
@pytest.fixture(scope='session')
def databases(request):
db_base = 'schema_migrations_{0}{1}{2}'.format(*sys.version_info)
database_names = [
'{0}_{1}'.format(db_base, i) for i in rang... | [
"pytest.fixture",
"schema_migrations.MigrationController",
"psycopg2.connect"
] | [((121, 152), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (135, 152), False, 'import pytest\n'), ((364, 397), 'psycopg2.connect', 'psycopg2.connect', ([], {'user': '"""postgres"""'}), "(user='postgres')\n", (380, 397), False, 'import psycopg2\n'), ((637, 670), 'psycopg2.... |
from collections import defaultdict
import time
import gevent.monkey
gevent.monkey.patch_all()
from gevent.queue import Queue
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from wavefront.controller import App as WfController
import logging
#from antelope import brttpkt
#from wa... | [
"logging.getLogger",
"sqlalchemy.engine_from_config",
"pyramid.config.Configurator",
"sys.exit",
"time.time",
"wavefront.controller.App"
] | [((376, 410), 'logging.getLogger', 'logging.getLogger', (['"""wavefront-web"""'], {}), "('wavefront-web')\n", (393, 410), False, 'import logging\n'), ((681, 695), 'wavefront.controller.App', 'WfController', ([], {}), '()\n', (693, 695), True, 'from wavefront.controller import App as WfController\n'), ((547, 558), 'sys.... |
#!/usr/bin/env python3
# Copyright (C) 2017-2022 The btclib developers
#
# This file is part of btclib. It is subject to the license terms in the
# LICENSE file found in the top-level directory of this distribution.
#
# No part of btclib including this file, may be copied, modified, propagated,
# or distributed except... | [
"btclib.base58._b58encode",
"btclib.base58._b58decode_to_int",
"btclib.base58._b58encode_from_int",
"btclib.base58.b58encode",
"pytest.raises",
"btclib.base58.b58decode",
"btclib.base58._b58decode"
] | [((1620, 1645), 'btclib.base58.b58encode', 'b58encode', (["b'hello world'"], {}), "(b'hello world')\n", (1629, 1645), False, 'from btclib.base58 import _b58decode, _b58decode_to_int, _b58encode, _b58encode_from_int, b58decode, b58encode\n'), ((1650, 1672), 'btclib.base58.b58decode', 'b58decode', (['encoded', '(11)'], {... |
#!/usr/bin/env python
#
# Copyright 2016-present <NAME>.
#
# Licensed under the MIT License.
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://opensource.org/licenses/mit-license.html
#
# Unless required by applicable law or agreed to in writing, sof... | [
"numpy.abs",
"numpy.multiply",
"numpy.sqrt",
"json.dumps",
"util.validation.OneOfType",
"numpy.tanh",
"util.validation.MShape",
"numpy.square",
"numpy.logaddexp",
"numpy.log",
"numpy.exp",
"numpy.cosh",
"copy.deepcopy",
"warnings.warn",
"util.validation.MType",
"numpy.vectorize",
"nu... | [((1900, 1940), 'util.validation.MType', 'MType', ([], {'size': 'int', 'name': 'str', 'metric': '(str,)'}), '(size=int, name=str, metric=(str,))\n', (1905, 1940), False, 'from util.validation import MShape, MType, OneOfType\n'), ((4842, 4881), 'util.validation.MType', 'MType', ([], {'as_json': 'bool', 'beautify_json': ... |
from __future__ import annotations
from typing import TYPE_CHECKING
import random
from enum import Enum
from configuration import config
from src.genotype.mutagen.option import Option
from src.genotype.neat.gene import Gene
if TYPE_CHECKING:
pass
class NodeType(Enum):
INPUT = 0
HIDDEN = 1
OUTPUT =... | [
"random.choices",
"random.choice"
] | [((1003, 1031), 'random.choice', 'random.choice', (['[False, True]'], {}), '([False, True])\n', (1016, 1031), False, 'import random\n'), ((634, 724), 'random.choices', 'random.choices', (['[False, True]'], {'weights': '[1 - config.lossy_chance, config.lossy_chance]'}), '([False, True], weights=[1 - config.lossy_chance,... |
import yaml
# TODO: add function should be changed
class HParams(object):
# Hyperparameter class using yaml
def __init__(self, **kwargs):
self.__dict__ = kwargs
def add(self, **kwargs):
# change is needed - if key is existed, do not update.
self.__dict__.update(kwargs)
def up... | [
"yaml.load",
"yaml.dump"
] | [((473, 500), 'yaml.dump', 'yaml.dump', (['self.__dict__', 'f'], {}), '(self.__dict__, f)\n', (482, 500), False, 'import yaml\n'), ((758, 770), 'yaml.load', 'yaml.load', (['f'], {}), '(f)\n', (767, 770), False, 'import yaml\n')] |
from pybeans import AppTool
import os, sys, shutil, time
APP = AppTool('githook', os.getcwd())
def now():
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) | [
"time.localtime",
"os.getcwd"
] | [((84, 95), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (93, 95), False, 'import os, sys, shutil, time\n'), ((155, 171), 'time.localtime', 'time.localtime', ([], {}), '()\n', (169, 171), False, 'import os, sys, shutil, time\n')] |
import json
import colors
from colors import color
class Profile(object):
'''
This class is template for storing Profile of WhatsApp user
'''
def __init__(self, username, contact, about=None, common_groups=None):
self.username = username
self.contact = contact
self.about = a... | [
"colors.color"
] | [((918, 972), 'colors.color', 'color', (['"""sender:"""'], {'fg': '"""green"""', 'bg': '"""black"""', 'style': '"""bold"""'}), "('sender:', fg='green', bg='black', style='bold')\n", (923, 972), False, 'from colors import color\n'), ((1011, 1063), 'colors.color', 'color', (['"""date:"""'], {'fg': '"""green"""', 'bg': '"... |
import time
import math
from typing import Any, Dict, Sequence
from coba.utilities import PackageChecker
from coba.simulations import Context, Action
from coba.learners.core import Learner, Key
class RegCBLearner(Learner):
"""A learner using the RegCB algorithm by Foster et al.
and the online bin search ... | [
"coba.utilities.PackageChecker.sklearn",
"sklearn.preprocessing.PolynomialFeatures",
"scipy.sparse.issparse",
"math.log",
"numpy.array",
"numpy.dot",
"numpy.outer",
"numpy.char.array",
"time.time",
"sklearn.feature_extraction.FeatureHasher"
] | [((1651, 1689), 'coba.utilities.PackageChecker.sklearn', 'PackageChecker.sklearn', (['"""RegCBLearner"""'], {}), "('RegCBLearner')\n", (1673, 1689), False, 'from coba.utilities import PackageChecker\n'), ((2615, 2701), 'sklearn.preprocessing.PolynomialFeatures', 'PolynomialFeatures', ([], {'degree': 'max_x_term', 'incl... |
import random
import pygame
from . import map_generator, traps
from .pathfinder import Pathfinder
from .tile import Tile
TILE_SIZE = 16
BARRIER_SIZE = 10
def grid_walk(start, end):
start = list(start)
dx = end[0] - start[0]
dy = end[1] - start[1]
nx = abs(dx)
ny = abs(dy)
sign_x = 1 if dx >... | [
"pygame.Rect"
] | [((1189, 1212), 'pygame.Rect', 'pygame.Rect', (['(0)', '(0)', '(2)', '(2)'], {}), '(0, 0, 2, 2)\n', (1200, 1212), False, 'import pygame\n'), ((3842, 3940), 'pygame.Rect', 'pygame.Rect', (['(loc[0] * self.tile_size)', '(loc[1] * self.tile_size)', 'self.tile_size', 'self.tile_size'], {}), '(loc[0] * self.tile_size, loc[1... |