code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
"""
Centrographic measures for point patterns
TODO
- testing
- documentation
"""
__author__ = "<NAME> <EMAIL>"
__all__ = ['mbr', 'hull', 'mean_center', 'weighted_mean_center',
'manhattan_median', 'std_distance', 'euclidean_median', 'ellipse',
'skyum', 'dtot',"_circle"]
import sys
import nump... | [
"numpy.median",
"numpy.sqrt",
"pysal.lib.cg.Ray",
"scipy.optimize.minimize",
"numpy.asarray",
"scipy.spatial.ConvexHull",
"numpy.lexsort",
"numpy.dot",
"warnings.warn",
"numpy.cos",
"copy.deepcopy",
"numpy.sin",
"pysal.lib.cg.is_clockwise",
"numpy.arctan"
] | [((1284, 1302), 'numpy.asarray', 'np.asarray', (['points'], {}), '(points)\n', (1294, 1302), True, 'import numpy as np\n'), ((1935, 1953), 'numpy.asarray', 'np.asarray', (['points'], {}), '(points)\n', (1945, 1953), True, 'import numpy as np\n'), ((1962, 1980), 'scipy.spatial.ConvexHull', 'ConvexHull', (['points'], {})... |
import os
defaults = {
"SPYTEST_LOGS_TIME_FMT_ELAPSED": "0",
"SPYTEST_LOGS_MODULE_ONLY_SUPPORT" : "0",
"SPYTEST_NO_CONSOLE_LOG": "0",
"SPYTEST_PROMPTS_FILENAME": None,
"SPYTEST_TEXTFSM_INDEX_FILENAME": "index",
"SPYTEST_UI_POSITIVE_CASES_ONLY": "0",
"SPYTEST_REPEAT_MODULE_SUPPORT": "0",
... | [
"os.path.exists",
"os.path.isabs",
"os.getenv",
"os.makedirs",
"os.path.join",
"os.getcwd"
] | [((4880, 4921), 'os.getenv', 'os.getenv', (['"""SPYTEST_LOGS_PATH"""', 'user_root'], {}), "('SPYTEST_LOGS_PATH', user_root)\n", (4889, 4921), False, 'import os\n'), ((5263, 5287), 'os.getenv', 'os.getenv', (['name', 'cur_def'], {}), '(name, cur_def)\n', (5272, 5287), False, 'import os\n'), ((4851, 4862), 'os.getcwd', '... |
# pylint: disable=no-value-for-parameter,invalid-name,unexpected-keyword-arg
"""Negative Multinomial Log Likelihood."""
import tensorflow as tf
from deepr.layers import base
class MultiLogLikelihood(base.Layer):
"""Negative Multinomial Log Likelihood."""
def __init__(self, **kwargs):
super().__init... | [
"tensorflow.cast",
"tensorflow.nn.log_softmax"
] | [((847, 872), 'tensorflow.nn.log_softmax', 'tf.nn.log_softmax', (['logits'], {}), '(logits)\n', (864, 872), True, 'import tensorflow as tf\n'), ((932, 960), 'tensorflow.cast', 'tf.cast', (['classes', 'tf.float32'], {}), '(classes, tf.float32)\n', (939, 960), True, 'import tensorflow as tf\n')] |
import subprocess
import os
import argparse
import time
from operator import add
DATA_LOC='/home/boubin/Images/'
CUBE_LOC='/home/boubin/SoftwarePilot/DistributedRL/Data/'
def consoleLog(string):
print("#################################################")
print("##############DRL Controller:")
print(string)
print("... | [
"time.sleep",
"os.getcwd",
"os.chdir",
"os.popen",
"os.mkdir",
"subprocess.call"
] | [((463, 474), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (472, 474), False, 'import os\n'), ((476, 504), 'os.chdir', 'os.chdir', (['"""../docker-hadoop"""'], {}), "('../docker-hadoop')\n", (484, 504), False, 'import os\n'), ((506, 553), 'subprocess.call', 'subprocess.call', (["['docker-compose', 'up', '-d']"], {}), "(... |
'''
Author: <NAME>
Created: 13. April 2019
242. Valid Anagram
'''
import collections
class Solution:
def isAnagram(self, s: str, t: str) -> bool:
counter1 = collections.Counter(s)
counter2 = collections.Counter(t)
if counter1 == counter2:
return True
return... | [
"collections.Counter"
] | [((180, 202), 'collections.Counter', 'collections.Counter', (['s'], {}), '(s)\n', (199, 202), False, 'import collections\n'), ((223, 245), 'collections.Counter', 'collections.Counter', (['t'], {}), '(t)\n', (242, 245), False, 'import collections\n')] |
# ======================================================================================================================
# * Weighted Holistic Atom Localization and Entity Shape (WHALES) descriptors *
# v. 1, May 2018
# --------------------------------------------------------------------------------------------------... | [
"mol_properties.get_coordinates_and_prop",
"numpy.where",
"numpy.delete",
"rdkit.Chem.SanitizeMol",
"numpy.concatenate",
"numpy.full",
"lcm.lmahal",
"numpy.round"
] | [((2986, 3007), 'lcm.lmahal', 'lcm.lmahal', (['coords', 'w'], {}), '(coords, w)\n', (2996, 3007), False, 'import lcm\n'), ((3663, 3678), 'numpy.where', 'np.where', (['(w < 0)'], {}), '(w < 0)\n', (3671, 3678), True, 'import numpy as np\n'), ((3797, 3817), 'numpy.delete', 'np.delete', (['res', 'a', '(0)'], {}), '(res, a... |
"""Plot energy dispersion example."""
import matplotlib.pyplot as plt
import astropy.units as u
import numpy as np
from gammapy.irf import EnergyDispersion
ebounds = np.logspace(-1, 2, 101) * u.TeV
energy_dispersion = EnergyDispersion.from_gauss(e_true=ebounds,
e_reco=eb... | [
"gammapy.irf.EnergyDispersion.from_gauss",
"numpy.logspace",
"matplotlib.pyplot.show"
] | [((219, 289), 'gammapy.irf.EnergyDispersion.from_gauss', 'EnergyDispersion.from_gauss', ([], {'e_true': 'ebounds', 'e_reco': 'ebounds', 'sigma': '(0.3)'}), '(e_true=ebounds, e_reco=ebounds, sigma=0.3)\n', (246, 289), False, 'from gammapy.irf import EnergyDispersion\n'), ((419, 429), 'matplotlib.pyplot.show', 'plt.show'... |
# Copyright (C) 2019 GreenWaves Technologies
# All rights reserved.
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE file for details.
from collections import OrderedDict
from graph.types import Conv2DParameters, FcParameters
from utils.node_id import NodeId
from... | [
"utils.stats_funcs.calculate_qsnrs",
"utils.node_id.NodeId",
"collections.OrderedDict",
"utils.stats_funcs.astats"
] | [((771, 791), 'utils.stats_funcs.astats', 'astats', (['node.weights'], {}), '(node.weights)\n', (777, 791), False, 'from utils.stats_funcs import astats, calculate_qsnrs\n'), ((816, 882), 'utils.stats_funcs.calculate_qsnrs', 'calculate_qsnrs', (['node.weights', "weights['ibits']"], {'force_ideal': '(False)'}), "(node.w... |
from django.contrib import admin
from .models import LoggedCommit
from mentor.models import Mentor
def approve_status(modeladmin, request, queryset):
queryset.update(status = "APPROVED")
approve_status.short_description = "Mark selected Commits as approved"
class LoggedCommitAdmin(admin.ModelAdmin):
list_filt... | [
"django.contrib.admin.site.register",
"mentor.models.Mentor.objects.get"
] | [((818, 870), 'django.contrib.admin.site.register', 'admin.site.register', (['LoggedCommit', 'LoggedCommitAdmin'], {}), '(LoggedCommit, LoggedCommitAdmin)\n', (837, 870), False, 'from django.contrib import admin\n'), ((662, 701), 'mentor.models.Mentor.objects.get', 'Mentor.objects.get', ([], {'handle': 'request.user'})... |
import re
from functools import reduce
from typing import Pattern, List, Iterable
patterns: List[Pattern[str]] = [
re.compile(pattern=r'ObjectId\((.*)\)'),
re.compile(pattern=r'ISODate\((.*)\)'),
re.compile(pattern=r'NumberLong\((.*)\)'),
re.compile(pattern=r'NumberInt\((.*)\)'),
re.compile(pattern... | [
"re.compile"
] | [((120, 160), 're.compile', 're.compile', ([], {'pattern': '"""ObjectId\\\\((.*)\\\\)"""'}), "(pattern='ObjectId\\\\((.*)\\\\)')\n", (130, 160), False, 'import re\n'), ((165, 204), 're.compile', 're.compile', ([], {'pattern': '"""ISODate\\\\((.*)\\\\)"""'}), "(pattern='ISODate\\\\((.*)\\\\)')\n", (175, 204), False, 'im... |
from django.conf.urls import *
from django.conf import settings
from django.contrib.auth.decorators import login_required, permission_required
from django.conf.urls.static import static
from django.views.generic import TemplateView
#These are needed for the urls below
import signbank.pages.views
import signbank.dictio... | [
"django.views.generic.TemplateView.as_view",
"signbank.dictionary.adminviews.HandshapeListView.as_view",
"signbank.dictionary.adminviews.GlossSheetView.as_view",
"signbank.dictionary.adminviews.GlossListView.as_view",
"signbank.dictionary.adminviews.DatasetManagerView.as_view",
"signbank.dictionary.adminv... | [((1013, 1033), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (1031, 1033), False, 'from django.contrib import admin\n'), ((1136, 1202), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""numbersigns/numbersigns.html"""'}), "(template_name='num... |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import... | [
"pulumi.getter",
"pulumi.set",
"pulumi.get"
] | [((1546, 1578), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dnsRecords"""'}), "(name='dnsRecords')\n", (1559, 1578), False, 'import pulumi\n'), ((2029, 2062), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""namespaceId"""'}), "(name='namespaceId')\n", (2042, 2062), False, 'import pulumi\n'), ((2394, 2429), ... |
__author__ = '<EMAIL>'
from setuptools import setup
setup(
name="sqlitecookiejar",
description='FileCookieJar using SQLite files for persistence',
py_modules=['sqlitecookiejar'],
version='1.0.2',
long_description=__doc__,
zip_safe=False,
author_email='<EMAIL>',
url='https://github.com/... | [
"setuptools.setup"
] | [((54, 685), 'setuptools.setup', 'setup', ([], {'name': '"""sqlitecookiejar"""', 'description': '"""FileCookieJar using SQLite files for persistence"""', 'py_modules': "['sqlitecookiejar']", 'version': '"""1.0.2"""', 'long_description': '__doc__', 'zip_safe': '(False)', 'author_email': '"""<EMAIL>"""', 'url': '"""https... |
#!/usr/bin/env/python
from typing import Tuple, List, Any, Sequence
import tensorflow as tf
import time
import os
import json
import numpy as np
import pickle
import random
import utils
from utils import MLP, dataset_info, ThreadedIterator, graph_to_adj_mat, SMALL_NUMBER, LARGE_NUMBER, graph_to_adj_mat
import csv
cla... | [
"tensorflow.local_variables_initializer",
"tensorflow.transpose",
"tensorflow.set_random_seed",
"tensorflow.variables_initializer",
"tensorflow.Graph",
"tensorflow.Session",
"tensorflow.placeholder",
"json.dumps",
"numpy.random.seed",
"os.getpid",
"tensorflow.ConfigProto",
"tensorflow.train.Ad... | [((2081, 2145), 'os.path.join', 'os.path.join', (['log_dir', "('%s_log_%s.json' % (self.run_id, dataset))"], {}), "(log_dir, '%s_log_%s.json' % (self.run_id, dataset))\n", (2093, 2145), False, 'import os\n'), ((2177, 2231), 'os.path.join', 'os.path.join', (['log_dir', "('%s_model.pickle' % self.run_id)"], {}), "(log_di... |
import json
from traceback import format_exc
from functools import partial
from flask import request, current_app, jsonify
from werkzeug.exceptions import default_exceptions
from devtools.web import merge_fields
def http_exception_handler(error, debug):
code = getattr(error, 'code', 500)
description = json.du... | [
"devtools.web.merge_fields",
"functools.partial",
"traceback.format_exc",
"flask.current_app.logger.error"
] | [((825, 869), 'flask.current_app.logger.error', 'current_app.logger.error', (['req'], {'exc_info': '(True)'}), '(req, exc_info=True)\n', (849, 869), False, 'from flask import request, current_app, jsonify\n'), ((324, 345), 'devtools.web.merge_fields', 'merge_fields', (['request'], {}), '(request)\n', (336, 345), False,... |
import sys
import _jpype
import jpype
from jpype.types import *
from jpype import JPackage, java
import common
import pytest
try:
import numpy as np
except ImportError:
pass
class ZZZTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
def testShutdown(self):
... | [
"jpype._core._JTerminate",
"common.JPypeTestCase.setUp",
"jpype.shutdownJVM"
] | [((255, 287), 'common.JPypeTestCase.setUp', 'common.JPypeTestCase.setUp', (['self'], {}), '(self)\n', (281, 287), False, 'import common\n'), ((513, 532), 'jpype.shutdownJVM', 'jpype.shutdownJVM', ([], {}), '()\n', (530, 532), False, 'import jpype\n'), ((587, 612), 'jpype._core._JTerminate', 'jpype._core._JTerminate', (... |
import logging
import os
from typing import Optional, List
import pandas as pd
from .sketch import sketch_fasta, sketch_fastqs
from .parser import mash_dist_output_to_dataframe
from ..utils import run_command
from ..const import MASH_REFSEQ_MSH
def mash_dist_refseq(sketch_path: str, mash_bin: str = "mash") -> str:
... | [
"os.path.exists",
"logging.info",
"os.remove"
] | [((596, 623), 'os.path.exists', 'os.path.exists', (['sketch_path'], {}), '(sketch_path)\n', (610, 623), False, 'import os\n'), ((2247, 2343), 'logging.info', 'logging.info', (['"""Parsed Mash dist output into Pandas DataFrame with %s rows"""', 'df_mash.shape[0]'], {}), "('Parsed Mash dist output into Pandas DataFrame w... |
from Screens import PluginBrowser as PBBase
from Screens.InfoBarGenerics import InfoBarNotifications
OriginalPluginBrowser = PBBase.PluginBrowser
if not issubclass(OriginalPluginBrowser, InfoBarNotifications):
class PluginBrowser(OriginalPluginBrowser, InfoBarNotifications):
def __init__(self, *args, **kwargs):
... | [
"Screens.InfoBarGenerics.InfoBarNotifications.__init__"
] | [((492, 527), 'Screens.InfoBarGenerics.InfoBarNotifications.__init__', 'InfoBarNotifications.__init__', (['self'], {}), '(self)\n', (521, 527), False, 'from Screens.InfoBarGenerics import InfoBarNotifications\n')] |
from matplotlib import pyplot as plt
plt.xkcd()
ages_x = [18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55]
py_dev_y = [20046, 17100, 20000, 24744, 30500, 37732, 41247, 45372, 48876, 53850, 57287, 63016, ... | [
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.xkcd",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.title",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((39, 49), 'matplotlib.pyplot.xkcd', 'plt.xkcd', ([], {}), '()\n', (47, 49), True, 'from matplotlib import pyplot as plt\n'), ((524, 566), 'matplotlib.pyplot.plot', 'plt.plot', (['ages_x', 'py_dev_y'], {'label': '"""Python"""'}), "(ages_x, py_dev_y, label='Python')\n", (532, 566), True, 'from matplotlib import pyplot ... |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pandas as pd
from pandas.api.types import is_string_dtype, is_numeric_dtype
import logging
import os
import os.path as osp
import numpy as np
import json
from ray.tune.util import flatten_dict
logger =... | [
"logging.getLogger",
"pandas.api.types.is_numeric_dtype",
"pandas.api.types.is_string_dtype",
"os.path.join",
"json.load",
"numpy.argwhere",
"pandas.DataFrame",
"os.walk"
] | [((321, 348), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (338, 348), False, 'import logging\n'), ((1563, 1581), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (1575, 1581), True, 'import pandas as pd\n'), ((1703, 1727), 'pandas.api.types.is_numeric_dtype', 'is_numeric... |
import socket
from socket import *
import os, sys
if len(sys.argv) > 1:
try:
serverPort = int(sys.argv[1])
except:
print("Error: " + sys.argv[1] + " is not a valid port number")
exit()
if serverPort < 0 or serverPort > 65535:
print("Error: " + str(serverPort) + " is not a va... | [
"socket"
] | [((396, 424), 'socket', 'socket', (['AF_INET', 'SOCK_STREAM'], {}), '(AF_INET, SOCK_STREAM)\n', (402, 424), False, 'import socket\n')] |
import json
import os
import pytest
import requests
from tests.acceptance.helpers import ENDPOINT_ACTIVATE
from tests.acceptance.helpers import ENDPOINT_CONFIG
from tests.acceptance.helpers import create_and_validate_request_and_response
from tests.acceptance.helpers import sort_response
expected_activate_ab = """[
... | [
"json.loads",
"os.getenv",
"pytest.mark.parametrize",
"pytest.raises",
"tests.acceptance.helpers.create_and_validate_request_and_response"
] | [((1075, 1406), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""experiment_key, expected_response, expected_status_code"""', "[('ab_test1', expected_activate_ab, 200), ('',\n expected_activate_ab_empty_experimentKey, 200), ('invalid exper key',\n expected_activate_ab_invalid_experimentKey, 200)]"], {'... |
import gmsh
import math
import time
import threading
# This example shows how to implement a custom user interface running
# computationally expensive calculations in separate threads. The threads can
# update the user interface in real-time.
gmsh.initialize()
# hide the standard Gmsh modules
gmsh.option.setNumber("... | [
"gmsh.onelab.setString",
"gmsh.option.setNumber",
"gmsh.onelab.getNumber",
"gmsh.fltk.wait",
"gmsh.onelab.setNumber",
"gmsh.onelab.getString",
"gmsh.fltk.update",
"gmsh.onelab.set",
"gmsh.fltk.unlock",
"gmsh.initialize",
"gmsh.fltk.awake",
"math.cos",
"gmsh.fltk.lock",
"gmsh.finalize",
"... | [((245, 262), 'gmsh.initialize', 'gmsh.initialize', ([], {}), '()\n', (260, 262), False, 'import gmsh\n'), ((297, 347), 'gmsh.option.setNumber', 'gmsh.option.setNumber', (['"""General.ShowModuleMenu"""', '(0)'], {}), "('General.ShowModuleMenu', 0)\n", (318, 347), False, 'import gmsh\n'), ((1073, 1100), 'gmsh.onelab.set... |
import FWCore.ParameterSet.Config as cms
process = cms.Process("TEST")
process.load("FWCore.Framework.test.cmsExceptionsFatal_cff")
process.load("SimGeneral.HepPDTESSource.pythiapdt_cfi")
process.source = cms.Source("EmptySource")
process.generator = cms.EDFilter("Pythia8GeneratorFilter",
maxEventsToPrint = cms... | [
"FWCore.ParameterSet.Config.Schedule",
"FWCore.ParameterSet.Config.untracked.double",
"FWCore.ParameterSet.Config.untracked.string",
"FWCore.ParameterSet.Config.vstring",
"FWCore.ParameterSet.Config.double",
"FWCore.ParameterSet.Config.EndPath",
"FWCore.ParameterSet.Config.Source",
"FWCore.ParameterSe... | [((52, 71), 'FWCore.ParameterSet.Config.Process', 'cms.Process', (['"""TEST"""'], {}), "('TEST')\n", (63, 71), True, 'import FWCore.ParameterSet.Config as cms\n'), ((208, 233), 'FWCore.ParameterSet.Config.Source', 'cms.Source', (['"""EmptySource"""'], {}), "('EmptySource')\n", (218, 233), True, 'import FWCore.Parameter... |
# This script will build the zipped version of the mod that is ready for release on the mod portal
# It will also first bump versions and the changelog, and commit and push the changes to Github
# This needs to run in the directory that contains your Factorio installation as well as the mod project folder
# Takes the m... | [
"pathlib.Path",
"pathlib.Path.cwd",
"datetime.datetime.today",
"platform.system",
"git.Repo",
"json.load",
"re.sub",
"json.dump"
] | [((727, 744), 'platform.system', 'platform.system', ([], {}), '()\n', (742, 744), False, 'import platform\n'), ((751, 761), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (759, 761), False, 'from pathlib import Path\n'), ((769, 792), 'git.Repo', 'git.Repo', (['(cwd / MODNAME)'], {}), '(cwd / MODNAME)\n', (777, 792),... |
from lib.bot import bot
VERSION = "0.0.27" # Part 31
bot.run(VERSION) | [
"lib.bot.bot.run"
] | [((55, 71), 'lib.bot.bot.run', 'bot.run', (['VERSION'], {}), '(VERSION)\n', (62, 71), False, 'from lib.bot import bot\n')] |
# -*- coding: UTF-8 -*-
from app.controller.base_controller import BaseController
from app.validator.journal_entry_validator import JournalEntryValidator
from app.service.journal_entry_service import JournalEntryService
from app.entity.journal_entry_entity import JournalEntryEntity
'''
Journal Entry Controller Contro... | [
"app.service.journal_entry_service.JournalEntryService",
"app.validator.journal_entry_validator.JournalEntryValidator",
"app.entity.journal_entry_entity.JournalEntryEntity"
] | [((576, 597), 'app.service.journal_entry_service.JournalEntryService', 'JournalEntryService', ([], {}), '()\n', (595, 597), False, 'from app.service.journal_entry_service import JournalEntryService\n'), ((625, 648), 'app.validator.journal_entry_validator.JournalEntryValidator', 'JournalEntryValidator', ([], {}), '()\n'... |
from unittest import mock
import os
import pytest
from . import signature
@pytest.fixture
def client():
return signature.SignedClientMixin()
def test_nbg_certificate(client):
"""
Ensure that the signed client always returns the appropriate NBG
certificate.
"""
current_dir = os.path.dirname... | [
"os.path.dirname",
"os.path.join",
"unittest.mock.patch"
] | [((305, 330), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (320, 330), False, 'import os\n'), ((347, 381), 'os.path.join', 'os.path.join', (['current_dir', '"""certs"""'], {}), "(current_dir, 'certs')\n", (359, 381), False, 'import os\n'), ((2079, 2151), 'unittest.mock.patch', 'mock.patch',... |
from typing import Any
from hypothesis import given
from lz.functional import (curry,
identity)
from tests import strategies
@given(strategies.scalars)
def test_currying(object_: Any) -> None:
curried = curry(identity)
result = curried(object_)
assert result is object_
| [
"hypothesis.given",
"lz.functional.curry"
] | [((157, 182), 'hypothesis.given', 'given', (['strategies.scalars'], {}), '(strategies.scalars)\n', (162, 182), False, 'from hypothesis import given\n'), ((238, 253), 'lz.functional.curry', 'curry', (['identity'], {}), '(identity)\n', (243, 253), False, 'from lz.functional import curry, identity\n')] |
"""
Functions related to STAAR
"""
import numpy as np
from scipy.stats import cauchy
c = cauchy()
def cct(pvals, weights=None):
"""
Python port of the CCT function as defined in the STAAR R-package (https://github.com/xihaoli/STAAR/blob/2f67fafec591a45e81a54eca24564b09ce90e252/R/CCT.R)
An analytical... | [
"scipy.stats.cauchy",
"numpy.isnan",
"numpy.ones_like",
"numpy.tan"
] | [((94, 102), 'scipy.stats.cauchy', 'cauchy', ([], {}), '()\n', (100, 102), False, 'from scipy.stats import cauchy\n'), ((1630, 1649), 'numpy.ones_like', 'np.ones_like', (['pvals'], {}), '(pvals)\n', (1642, 1649), True, 'import numpy as np\n'), ((1025, 1040), 'numpy.isnan', 'np.isnan', (['pvals'], {}), '(pvals)\n', (103... |
import os
import cv2
import typer
from tqdm import tqdm
import importlib.util
from tqdm.contrib.concurrent import process_map
from typing import Callable, Optional, Any, Sequence, Tuple
from distortme.base_types import Image
from distortme.main_utils import print_delimiter
from distortme.files_utils import images, cre... | [
"cv2.imwrite",
"tqdm.contrib.concurrent.process_map",
"distortme.main_utils.print_delimiter",
"distortme.files_utils.create_folders",
"tqdm.tqdm",
"os.path.join",
"typer.echo",
"os.path.basename",
"typer.Exit",
"os.path.abspath",
"distortme.files_utils.images",
"cv2.imread"
] | [((1596, 1641), 'distortme.main_utils.print_delimiter', 'print_delimiter', (['"""Apply custom preprosessing"""'], {}), "('Apply custom preprosessing')\n", (1611, 1641), False, 'from distortme.main_utils import print_delimiter\n'), ((789, 821), 'os.path.basename', 'os.path.basename', (['path_to_module'], {}), '(path_to_... |
import matlab.engine
import argparse
import torch
from torch.autograd import Variable
import numpy as np
import time, math, glob
import scipy.io as sio
import cv2
parser = argparse.ArgumentParser(description="PyTorch EDSR Eval")
parser.add_argument("--cuda", action="store_true", help="use cuda?")
parser.add... | [
"numpy.clip",
"numpy.mean",
"argparse.ArgumentParser",
"torch.load",
"scipy.io.loadmat",
"torch.from_numpy",
"numpy.array",
"torch.cuda.is_available",
"math.log10",
"time.time",
"glob.glob"
] | [((182, 238), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch EDSR Eval"""'}), "(description='PyTorch EDSR Eval')\n", (205, 238), False, 'import argparse\n'), ((1233, 1264), 'glob.glob', 'glob.glob', (["(opt.dataset + '/*.*')"], {}), "(opt.dataset + '/*.*')\n", (1242, 1264), False... |
from trading_ig import IGService
from trading_ig.config import config
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# if you need to cache to DB your requests
from datetime import timedelta
import requests_cache
from predefined_functions.initialisation import Initialisation
class... | [
"logging.getLogger",
"predefined_functions.initialisation.Initialisation",
"logging.basicConfig"
] | [((95, 122), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (112, 122), False, 'import logging\n'), ((373, 412), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (392, 412), False, 'import logging\n'), ((432, 459), 'logging.getLogger... |
import setpath
import functions
import json
registered=True
def convert(data):
if isinstance(data, basestring):
return str(data)
elif isinstance(data, collections.Mapping):
return dict(map(convert, data.iteritems()))
elif isinstance(data, collections.Iterable):
return type(data)(map... | [
"functions.vtable.vtbase.VTGenerator",
"sys.setdefaultencoding",
"json.loads"
] | [((2036, 2084), 'functions.vtable.vtbase.VTGenerator', 'functions.vtable.vtbase.VTGenerator', (['dummycoding'], {}), '(dummycoding)\n', (2071, 2084), False, 'import functions\n'), ((816, 848), 'json.loads', 'json.loads', (["dictargs['metadata']"], {}), "(dictargs['metadata'])\n", (826, 848), False, 'import json\n'), ((... |
from flask import url_for
from app.api.serializer import get_alias_infos_with_pagination_v3
from app.config import PAGE_LIMIT
from app.extensions import db
from app.models import User, ApiKey, Alias, Contact, EmailLog, Mailbox
def test_get_alias_infos_with_pagination_v3(flask_client):
user = User.create(
... | [
"app.models.Mailbox.create",
"app.extensions.db.session.commit",
"app.models.User.create",
"app.api.serializer.get_alias_infos_with_pagination_v3",
"app.models.Alias.query.first"
] | [((300, 399), 'app.models.User.create', 'User.create', ([], {'email': '"""a@b.c"""', 'password': '"""password"""', 'name': '"""Test User"""', 'activated': '(True)', 'commit': '(True)'}), "(email='a@b.c', password='password', name='Test User', activated\n =True, commit=True)\n", (311, 399), False, 'from app.models im... |
from unittest.mock import sentinel
from preacher.core.scheduling.listener import Listener
def test_listener():
listener = Listener()
listener.on_end(sentinel.status)
listener.on_scenario(sentinel.scenario)
| [
"preacher.core.scheduling.listener.Listener"
] | [((129, 139), 'preacher.core.scheduling.listener.Listener', 'Listener', ([], {}), '()\n', (137, 139), False, 'from preacher.core.scheduling.listener import Listener\n')] |
import unittest
import ray
#import ray.test.test_functions as test_functions
#import ray.test.test_utils
import time
@ray.remote
def f():
time.sleep(1)
return 1
if __name__ == "__main__":
ray.init()
results = ray.get([f.remote() for i in range(4)])
print("results: ", results)
| [
"ray.init",
"time.sleep"
] | [((143, 156), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (153, 156), False, 'import time\n'), ((202, 212), 'ray.init', 'ray.init', ([], {}), '()\n', (210, 212), False, 'import ray\n')] |
import os, warnings
warnings.filterwarnings("ignore")
def init_():
"""creates directories needed to run repo"""
data_path = "data"
raw_path = "data/raw"
raw_c_path = "data/raw/train_c"
raw_r_path = "data/raw/train_r"
if not os.path.isdir(data_path):
os.mkdir(data_path)
... | [
"os.path.join",
"os.path.isdir",
"os.mkdir",
"warnings.filterwarnings"
] | [((21, 54), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (44, 54), False, 'import os, warnings\n'), ((253, 277), 'os.path.isdir', 'os.path.isdir', (['data_path'], {}), '(data_path)\n', (266, 277), False, 'import os, warnings\n'), ((287, 306), 'os.mkdir', 'os.mkdir', (['d... |
import json
import logging
from random import choice
from time import sleep
import requests
from proxy_pool import IpPool
import threading
class Data5UProxy(IpPool):
def __init__(self, api_url):
super().__init__(api_url)
self.refresh_thread = GetIpThread(self.api_url, self.ip_pool, self.cond)
... | [
"json.loads",
"requests.Session",
"logging.debug",
"requests.adapters.HTTPAdapter",
"time.sleep",
"logging.info"
] | [((410, 432), 'logging.info', 'logging.info', (['"""请求新的ip"""'], {}), "('请求新的ip')\n", (422, 432), False, 'import logging\n'), ((506, 521), 'json.loads', 'json.loads', (['res'], {}), '(res)\n', (516, 521), False, 'import json\n'), ((971, 1011), 'logging.debug', 'logging.debug', (['f"""remove {ip} from pool!"""'], {}), "... |
from pathlib import Path
from urllib.parse import urlparse
import requests
from django.core.files.base import ContentFile
from django.core.management.base import BaseCommand
from places.models import Image
from places.models import Place
class Command(BaseCommand):
help = 'Upload place from Json'
def add_a... | [
"django.core.files.base.ContentFile",
"urllib.parse.urlparse",
"requests.get",
"places.models.Image",
"places.models.Place.objects.get_or_create"
] | [((505, 545), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(False)'}), '(url, allow_redirects=False)\n', (517, 545), False, 'import requests\n'), ((653, 934), 'places.models.Place.objects.get_or_create', 'Place.objects.get_or_create', ([], {'title': "place_detail['title']", 'defaults': "{'description_... |
# coding: utf-8
import pprint
import re
import six
class AddDevice:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json ... | [
"six.iteritems"
] | [((9663, 9696), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (9676, 9696), False, 'import six\n')] |
import subprocess
import numpy as np
from matplotlib import pyplot as plt
import os
cmd = f'go run main.go'.replace('\\', '/')
print(cmd)
subprocess.check_output(cmd, shell=True)
data = np.genfromtxt('out.csv', delimiter=",")
print(data)
plt.plot(data[:, 0], data[:, 1], label="Track")
plt.plot(data[:, 2], data[:, ... | [
"subprocess.check_output",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"os.remove",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.title",
"numpy.genfromtxt",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((140, 180), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (163, 180), False, 'import subprocess\n'), ((189, 228), 'numpy.genfromtxt', 'np.genfromtxt', (['"""out.csv"""'], {'delimiter': '""","""'}), "('out.csv', delimiter=',')\n", (202, 228), True, 'import... |
# Authors: <NAME> <<EMAIL>>
# License: BSD
import glob
import os.path as op
import numpy as np
import pytest
from mne import what, create_info
from mne.datasets import testing
from mne.io import RawArray
from mne.preprocessing import ICA
from mne.utils import requires_sklearn
data_path = testing.data_path(download=... | [
"mne.datasets.testing.data_path",
"mne.create_info",
"os.path.join",
"os.path.splitext",
"mne.what",
"mne.preprocessing.ICA",
"numpy.random.RandomState",
"pytest.warns"
] | [((293, 326), 'mne.datasets.testing.data_path', 'testing.data_path', ([], {'download': '(False)'}), '(download=False)\n', (310, 326), False, 'from mne.datasets import testing\n'), ((485, 500), 'mne.preprocessing.ICA', 'ICA', ([], {'max_iter': '(1)'}), '(max_iter=1)\n', (488, 500), False, 'from mne.preprocessing import ... |
import pandas as pd
import pickle
import numpy as np
from keras import backend as K
import tensorflow as tf
import os
from tune_hyperparameters import TuneNeuralNet
# Load training data and stopwords
train_data = pd.read_pickle('../../../data/train_data.pkl')
with open('../../../data/stopwords.pkl', 'rb') as f:
st... | [
"pandas.read_pickle",
"tensorflow.compat.v1.ConfigProto",
"tune_hyperparameters.TuneNeuralNet",
"keras.backend.set_session",
"pickle.load",
"numpy.linspace",
"tensorflow.compat.v1.Session"
] | [((214, 260), 'pandas.read_pickle', 'pd.read_pickle', (['"""../../../data/train_data.pkl"""'], {}), "('../../../data/train_data.pkl')\n", (228, 260), True, 'import pandas as pd\n'), ((423, 616), 'tensorflow.compat.v1.ConfigProto', 'tf.compat.v1.ConfigProto', ([], {'intra_op_parallelism_threads': 'NUM_PARALLEL_EXEC_UNIT... |
from tkinter import Tk, Frame, BOTH, Button
class Menu:
def __init__(self):
self.window = Tk()
self.state = "inMenu"
self.main()
def on_closing(self):
self.window.destroy()
del self
def __del__(self):
pass
def playAsPlayer(self):
self.state = "... | [
"tkinter.Tk",
"tkinter.Frame",
"tkinter.Button"
] | [((103, 107), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (105, 107), False, 'from tkinter import Tk, Frame, BOTH, Button\n'), ((607, 625), 'tkinter.Frame', 'Frame', (['self.window'], {}), '(self.window)\n', (612, 625), False, 'from tkinter import Tk, Frame, BOTH, Button\n'), ((1437, 1500), 'tkinter.Button', 'Button', (['fra... |
"""
test_plugin.py - end-to-end testing of the EXCALIBUR plugin in an ODIN server instance
<NAME>, STFC Application Engineering Group
"""
from nose.tools import *
import requests
import json
from odin.testing.utils import OdinTestServer
from excalibur.fem import ExcaliburFem
from excalibur.adapter import ExcaliburA... | [
"json.dumps"
] | [((1552, 1578), 'json.dumps', 'json.dumps', (['connect_params'], {}), '(connect_params)\n', (1562, 1578), False, 'import json\n')] |
from typing import Any, List, Optional
import numpy as np
from rdkit.Chem import rdchem, rdmolfiles, rdmolops, rdDistGeom, rdPartialCharges
class MolFeatureExtractionError(Exception):
pass
def one_hot(x: Any, allowable_set: List[Any]) -> List[int]:
"""One hot encode labels.
If label `x` is not include... | [
"numpy.eye",
"rdkit.Chem.rdPartialCharges.ComputeGasteigerCharges",
"numpy.power",
"rdkit.Chem.rdmolops.AssignStereochemistry",
"rdkit.Chem.rdmolops.RemoveHs",
"rdkit.Chem.rdmolops.GetAdjacencyMatrix",
"numpy.diag",
"numpy.array",
"numpy.zeros",
"rdkit.Chem.rdchem.HybridizationType.names.values",
... | [((2724, 2769), 'rdkit.Chem.rdPartialCharges.ComputeGasteigerCharges', 'rdPartialCharges.ComputeGasteigerCharges', (['mol'], {}), '(mol)\n', (2764, 2769), False, 'from rdkit.Chem import rdchem, rdmolfiles, rdmolops, rdDistGeom, rdPartialCharges\n'), ((2806, 2841), 'rdkit.Chem.rdmolops.AssignStereochemistry', 'rdmolops.... |
from discord.ext import commands
import discord
import pymongo
from json import load as json_load
from os import remove
from math import ceil
import json
import asyncio
from cogs.utils import Defaults
with open('config.json', 'r', encoding='utf8') as f:
config = json_load(f)
prefix = config['pr... | [
"cogs.utils.Defaults.error_warning_send",
"discord.Colour",
"discord.ext.commands.guild_only",
"discord.File",
"cogs.utils.Defaults.set_footer",
"discord.ext.commands.bot_has_permissions",
"discord.ext.commands.group",
"json.load",
"pymongo.MongoClient",
"cogs.utils.Defaults.error_fatal_send",
"... | [((379, 411), 'pymongo.MongoClient', 'pymongo.MongoClient', (['mongodb_url'], {}), '(mongodb_url)\n', (398, 411), False, 'import pymongo\n'), ((283, 295), 'json.load', 'json_load', (['f'], {}), '(f)\n', (292, 295), True, 'from json import load as json_load\n'), ((1342, 1363), 'discord.ext.commands.guild_only', 'command... |
# A Cuboctahedron, brought to you by PharaohCola13
import mpl_toolkits.mplot3d.axes3d as p3
import matplotlib.pyplot as plt
from matplotlib import *
from numpy import *
from mpl_toolkits.mplot3d.art3d import *
from matplotlib.animation import *
name = "Cuboctahedron"
def shape(fig, alpha, color, edge_c, edge_w, grid... | [
"mpl_toolkits.mplot3d.axes3d.Axes3D",
"matplotlib.pyplot.axis"
] | [((954, 968), 'mpl_toolkits.mplot3d.axes3d.Axes3D', 'p3.Axes3D', (['fig'], {}), '(fig)\n', (963, 968), True, 'import mpl_toolkits.mplot3d.axes3d as p3\n'), ((998, 1012), 'matplotlib.pyplot.axis', 'plt.axis', (['grid'], {}), '(grid)\n', (1006, 1012), True, 'import matplotlib.pyplot as plt\n'), ((1014, 1031), 'matplotlib... |
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 02 16:32:18 2017
@author: <NAME> <EMAIL>
"""
import json
#import cPickle as pickle
import twit_token
import unicodedata as uniD
import os
import nltk
import re
from pymongo import MongoClient
#MongoDB credentials and collections
#DBname = 'test-tree'
#DBname = 'test_r... | [
"twit_token.ize",
"os.walk",
"pymongo.MongoClient",
"json.load",
"re.sub",
"json.dump"
] | [((427, 440), 'pymongo.MongoClient', 'MongoClient', ([], {}), '()\n', (438, 440), False, 'from pymongo import MongoClient\n'), ((450, 477), 'pymongo.MongoClient', 'MongoClient', (['DBhost', 'DBport'], {}), '(DBhost, DBport)\n', (461, 477), False, 'from pymongo import MongoClient\n'), ((1201, 1218), 'os.walk', 'os.walk'... |
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import json
import os
import random
import re
import warnings
from collections import defaultdict
from con... | [
"indico.modules.events.Event.get",
"sqlalchemy.orm.load_only",
"indico.util.fs.chmod_umask",
"zipfile.ZipFile",
"indico.util.i18n._",
"indico.modules.events.contributions.models.contributions.Contribution.query.with_parent",
"flask.session.setdefault",
"indico.util.user.principal_from_identifier",
"... | [((3476, 3521), 'indico.modules.events.Event.get', 'Event.get', (["args['event_id']"], {'is_deleted': '(False)'}), "(args['event_id'], is_deleted=False)\n", (3485, 3521), False, 'from indico.modules.events import Event\n'), ((17240, 17331), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(False)',... |
from bluepy.btle import Scanner, DefaultDelegate, Peripheral, BTLEManagementError, BTLEDisconnectError
import struct
import time
import threading
# thread to read weight from the scale
class ReadWeightThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.name = READ_TH... | [
"threading.Thread.__init__",
"bluepy.btle.DefaultDelegate.__init__",
"bluepy.btle.Scanner",
"threading.enumerate",
"bluepy.btle.Peripheral",
"struct.pack",
"time.sleep",
"time.time"
] | [((4178, 4202), 'struct.pack', 'struct.pack', (['"""<BB"""', '(1)', '(0)'], {}), "('<BB', 1, 0)\n", (4189, 4202), False, 'import struct\n'), ((4410, 4469), 'struct.pack', 'struct.pack', (['"""<BBBBBBBB"""', '(32)', '(8)', '(21)', '(218)', '(185)', '(232)', '(37)', '(221)'], {}), "('<BBBBBBBB', 32, 8, 21, 218, 185, 232,... |
import tensorflow as tf
from sparkflow.pipeline_util import PysparkReaderWriter
import numpy as np
from pyspark.ml.param import Param, Params, TypeConverters
from pyspark.ml.param.shared import HasInputCol, HasPredictionCol, HasLabelCol
from pyspark.ml.base import Estimator
from pyspark.ml import Model
from pyspark.ml... | [
"json.loads",
"pyspark.SparkContext._active_spark_context.getConf",
"pyspark.ml.param.Params._dummy",
"numpy.asarray",
"sparkflow.ml_util.convert_weights_to_json",
"sparkflow.ml_util.predict_func"
] | [((1728, 1753), 'numpy.asarray', 'np.asarray', (['data[inp_col]'], {}), '(data[inp_col])\n', (1738, 1753), True, 'import numpy as np\n'), ((1765, 1790), 'numpy.asarray', 'np.asarray', (['data[inp_col]'], {}), '(data[inp_col])\n', (1775, 1790), True, 'import numpy as np\n'), ((1955, 1970), 'pyspark.ml.param.Params._dumm... |
import os
import json
import sys
from tqdm import tqdm
img_folder = '/opt/tiger/minist/datasets/groot_voc/JPEGImages'
exist_img = os.listdir(img_folder)
print('exist_img nums {}'.format(len(exist_img)))
exist_img = [img[:-4] for img in exist_img]
# print(exist_img[:5])
new_img_path = '/opt/tiger/minist/data_process/... | [
"os.listdir",
"tqdm.tqdm"
] | [((132, 154), 'os.listdir', 'os.listdir', (['img_folder'], {}), '(img_folder)\n', (142, 154), False, 'import os\n'), ((554, 561), 'tqdm.tqdm', 'tqdm', (['f'], {}), '(f)\n', (558, 561), False, 'from tqdm import tqdm\n')] |
import json
import logging
import os
import sys
from argparse import ArgumentParser
import matplotlib.pyplot as plt
import numpy as np
from sklearn.metrics import confusion_matrix, accuracy_score, precision_score, recall_score, f1_score
from transformers import AutoTokenizer
from src.data.bitext import WMT14Transform... | [
"logging.getLogger",
"logging.StreamHandler",
"sklearn.metrics.precision_score",
"sklearn.metrics.recall_score",
"transformers.AutoTokenizer.from_pretrained",
"logging.info",
"os.path.exists",
"argparse.ArgumentParser",
"matplotlib.pyplot.xlabel",
"numpy.ndenumerate",
"matplotlib.pyplot.yticks",... | [((399, 415), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (413, 415), False, 'from argparse import ArgumentParser\n'), ((2195, 2214), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (2212, 2214), False, 'import logging\n'), ((2746, 2805), 'transformers.AutoTokenizer.from_pretrained', 'Aut... |
"""
Copyright (c) Facebook, Inc. and its affiliates.
"""
import sys
import os
import unittest
from locobot.agent.loco_memory import LocoAgentMemory
from locobot.agent.loco_memory_nodes import DetectedObjectNode, HumanPoseNode
from locobot.test.utils import get_fake_detection, get_fake_humanpose
class MemoryTests(unit... | [
"locobot.agent.loco_memory_nodes.HumanPoseNode.create",
"locobot.agent.loco_memory.LocoAgentMemory",
"locobot.agent.loco_memory_nodes.DetectedObjectNode.create",
"unittest.main",
"locobot.test.utils.get_fake_humanpose",
"locobot.test.utils.get_fake_detection"
] | [((1368, 1383), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1381, 1383), False, 'import unittest\n'), ((458, 475), 'locobot.agent.loco_memory.LocoAgentMemory', 'LocoAgentMemory', ([], {}), '()\n', (473, 475), False, 'from locobot.agent.loco_memory import LocoAgentMemory\n'), ((539, 636), 'locobot.test.utils.ge... |
from records_mover.url.s3.awscli import aws_cli
from mock import patch, call
import unittest
class TestAwsCli(unittest.TestCase):
@patch("records_mover.url.s3.awscli.dict")
@patch("records_mover.url.s3.awscli.os")
@patch("records_mover.url.s3.awscli.create_clidriver")
def test_aws_cli(self,
... | [
"mock.call",
"mock.patch",
"records_mover.url.s3.awscli.aws_cli"
] | [((137, 178), 'mock.patch', 'patch', (['"""records_mover.url.s3.awscli.dict"""'], {}), "('records_mover.url.s3.awscli.dict')\n", (142, 178), False, 'from mock import patch, call\n'), ((184, 223), 'mock.patch', 'patch', (['"""records_mover.url.s3.awscli.os"""'], {}), "('records_mover.url.s3.awscli.os')\n", (189, 223), F... |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the MIT license.
# See the LICENSE file in the project root for more information.
from dataclasses import dataclass
from operator import floordiv
from pathlib import Path
from re import search
from tex... | [
"winreg.QueryValueEx",
"winreg.OpenKey",
"pathlib.Path",
"dataclasses.dataclass",
"winreg.ConnectRegistry",
"re.search"
] | [((1161, 1183), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (1170, 1183), False, 'from dataclasses import dataclass\n'), ((1462, 1484), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (1471, 1484), False, 'from dataclasses import dataclass\n'... |
"""
Copyright BOOSTRY Co., Ltd.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distr... | [
"app.errors.NotSupportedError",
"app.log.get_logger"
] | [((725, 741), 'app.log.get_logger', 'log.get_logger', ([], {}), '()\n', (739, 741), False, 'from app import log, config\n'), ((1058, 1103), 'app.errors.NotSupportedError', 'NotSupportedError', ([], {'method': '"""GET"""', 'url': 'req.path'}), "(method='GET', url=req.path)\n", (1075, 1103), False, 'from app.errors impor... |
from __future__ import absolute_import
import numpy as np
from matplotlib import pyplot as plt
import param
from ...core.options import Store
from ...interface.pandas import DFrame, DataFrameView, pd
from .element import ElementPlot
from .plot import mpl_rc_context
class DFrameViewPlot(ElementPlot):
"""
DF... | [
"param.Number",
"param.ObjectSelector",
"matplotlib.pyplot.gcf",
"param.Boolean",
"param.Parameter"
] | [((651, 801), 'param.Parameter', 'param.Parameter', ([], {'default': '"""square"""', 'doc': '"""\n Aspect ratio defaults to square, \'equal\' or numeric values\n are also supported."""'}), '(default=\'square\', doc=\n """\n Aspect ratio defaults to square, \'equal\' or numeric values\n ar... |
import logging
import os
import torch
from dataset import MonoDataset
from torch.utils.data import DataLoader
from tqdm import tqdm
from transformers import T5ForConditionalGeneration
from transformers import T5Tokenizer
from utils import calculate_bleu_score
from utils import load_config
from finetune_t5 import EXPE... | [
"transformers.T5Tokenizer.from_pretrained",
"dataset.MonoDataset",
"os.listdir",
"utils.calculate_bleu_score",
"tqdm.tqdm",
"os.path.join",
"logging.warning",
"logging.root.setLevel",
"torch.cuda.is_available",
"torch.no_grad",
"logging.info",
"transformers.T5ForConditionalGeneration.from_pret... | [((340, 377), 'logging.root.setLevel', 'logging.root.setLevel', (['logging.NOTSET'], {}), '(logging.NOTSET)\n', (361, 377), False, 'import logging\n'), ((2330, 2362), 'logging.info', 'logging.info', (['"""Loading model..."""'], {}), "('Loading model...')\n", (2342, 2362), False, 'import logging\n'), ((2716, 2791), 'tra... |
import psychopy.core
import psychopy.event
import psychopy.visual
import pandas as pd
import numpy as np
import psychopy.gui
import psychopy.sound
import os
import yaml
import json
from pathlib import Path
import random
from Stimulus import Stimulus
class SBDM_Data:
def __init__(self, df):
self.df = df
... | [
"Stimulus.Stimulus"
] | [((587, 628), 'Stimulus.Stimulus', 'Stimulus', ([], {'name': 'stim', 'show': 'show', 'cued': 'cued'}), '(name=stim, show=show, cued=cued)\n', (595, 628), False, 'from Stimulus import Stimulus\n')] |
import json
import logging
import arcade
from typing import List
logger = logging.getLogger(__name__)
class LevelDataHandler:
DEFAULT_LEVEL_SPEED = -1
def __init__(self):
with open("level_settings.json") as f:
self.level_settings = json.load(f)
def get_level_speed(self, level_number... | [
"logging.getLogger",
"json.load"
] | [((75, 102), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (92, 102), False, 'import logging\n'), ((264, 276), 'json.load', 'json.load', (['f'], {}), '(f)\n', (273, 276), False, 'import json\n')] |
import genologics
import requests_cache
from genologics.lims_utils import lims
import logging
logging.basicConfig(level=logging.INFO)
# NOTE: You must remove the cache file to get fresh data!
requests_cache.install_cache()
samples = lims.get_samples(projectname="Covid19")
limit = None
all_udfs = set()
reported = di... | [
"logging.basicConfig",
"genologics.lims_utils.lims.get_samples",
"requests_cache.install_cache"
] | [((95, 134), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (114, 134), False, 'import logging\n'), ((195, 225), 'requests_cache.install_cache', 'requests_cache.install_cache', ([], {}), '()\n', (223, 225), False, 'import requests_cache\n'), ((237, 276), 'genolo... |
import argparse
from .shared import glob_paths, print_utf8, has_magic
import glob
import os
import sys
from . import chunks
import math
from itertools import count
def tabulate(lens, rows, columns):
data = []
sizes = []
for chunk in chunks(lens, rows):
size = max(chunk) + 1
si... | [
"os.get_terminal_size",
"math.ceil",
"argparse.ArgumentParser",
"sys.stdout.isatty",
"glob.glob",
"os.walk"
] | [((1415, 1469), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""lists directory"""'}), "(description='lists directory')\n", (1438, 1469), False, 'import argparse\n'), ((1654, 1673), 'sys.stdout.isatty', 'sys.stdout.isatty', ([], {}), '()\n', (1671, 1673), False, 'import sys\n'), ((655, 67... |
import torch.nn as nn
import torch.nn.functional as F
import params as P
import utils
class Net(nn.Module):
# Layer names
FLAT = 'flat'
FC5 = 'fc5'
RELU5 = 'relu5'
BN5 = 'bn5'
FC6 = 'fc6'
CLASS_SCORES = FC6 # Symbolic name of the layer providing the class scores as output
def __init__(self, input_shape=P.IN... | [
"torch.nn.functional.dropout",
"torch.nn.BatchNorm1d",
"torch.nn.Linear",
"torch.nn.functional.relu",
"utils.shape2size"
] | [((478, 512), 'utils.shape2size', 'utils.shape2size', (['self.input_shape'], {}), '(self.input_shape)\n', (494, 512), False, 'import utils\n'), ((591, 622), 'torch.nn.Linear', 'nn.Linear', (['self.input_size', '(300)'], {}), '(self.input_size, 300)\n', (600, 622), True, 'import torch.nn as nn\n'), ((697, 716), 'torch.n... |
# Copyright (c) 2020 NVIDIA Corporation
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, d... | [
"trimesh.transformations.inverse_matrix",
"trimesh.transformations.rotation_matrix",
"numpy.eye",
"trimesh.transformations.compose_matrix",
"trimesh.collision.fcl.Box",
"trimesh.points.PointCloud",
"os.path.join",
"trimesh.load",
"numpy.linspace",
"numpy.array",
"trimesh.util.concatenate",
"tr... | [((1526, 1535), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (1532, 1535), True, 'import numpy as np\n'), ((2319, 2344), 'trimesh.transformations.inverse_matrix', 'tra.inverse_matrix', (['value'], {}), '(value)\n', (2337, 2344), True, 'import trimesh.transformations as tra\n'), ((3131, 3140), 'numpy.eye', 'np.eye', (... |
from datapackage_pipelines_mojp.common.processors.sync import CommonSyncProcessor
from .clearmash.test_convert import get_clearmash_convert_resource_data
from .common import (assert_processor, get_mock_settings, assert_dict, given_empty_elasticsearch_instance,
es_doc)
from tests.clearmash.test_conv... | [
"tests.clearmash.test_convert.get_downloaded_docs",
"tests.clearmash.test_convert.image"
] | [((9276, 9317), 'tests.clearmash.test_convert.get_downloaded_docs', 'get_clearmash_downloaded_docs', (['entity_ids'], {}), '(entity_ids)\n', (9305, 9317), True, 'from tests.clearmash.test_convert import get_downloaded_docs as get_clearmash_downloaded_docs, image as get_clearmash_image\n'), ((9438, 9493), 'tests.clearma... |
import serial
import paho.mqtt.client as mqtt
import json
from datetime import datetime
from o2_helper import GetO2Voltage
import numpy as np
import socket
ser = serial.Serial('/dev/ttyUSB0', 9600)
ser.readline()
ser.readline()
THINGSBOARD_HOST = '192.168.0.200'
ACCESS_TOKEN = socket.gethostname()
client = mqtt.Clien... | [
"o2_helper.GetO2Voltage",
"numpy.linalg.solve",
"paho.mqtt.client.Client",
"json.dumps",
"numpy.array",
"datetime.datetime.now",
"serial.Serial",
"socket.gethostname"
] | [((163, 198), 'serial.Serial', 'serial.Serial', (['"""/dev/ttyUSB0"""', '(9600)'], {}), "('/dev/ttyUSB0', 9600)\n", (176, 198), False, 'import serial\n'), ((280, 300), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (298, 300), False, 'import socket\n'), ((310, 323), 'paho.mqtt.client.Client', 'mqtt.Clien... |
from nettools.core.ipv4_network import IPv4Network
from nettools.utils.ip_class import FourBytesLiteral
from nettools.utils.utils import Utils
from nettools.utils.errors import IPOffNetworkRangeException
from rth.core.errors import *
class NetworkCreator:
"""
This class is the virtual environment which conta... | [
"nettools.utils.ip_class.FourBytesLiteral",
"nettools.core.ipv4_network.IPv4Network",
"nettools.utils.utils.Utils.mask_length_to_literal",
"nettools.utils.utils.Utils.ip_before",
"nettools.utils.utils.Utils.netr_to_literal"
] | [((6393, 6437), 'nettools.utils.utils.Utils.netr_to_literal', 'Utils.netr_to_literal', (['current.network_range'], {}), '(current.network_range)\n', (6414, 6437), False, 'from nettools.utils.utils import Utils\n'), ((6589, 6632), 'nettools.utils.utils.Utils.netr_to_literal', 'Utils.netr_to_literal', (['subnet.network_r... |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import sys
import os
import time
currentUrl = os.path.dirname(__file__)
parentUrl = os.path.abspath(os.path.join(currentUr... | [
"utils.vis.vis_keypoints",
"sys.path.insert",
"config.default.update_config",
"numpy.array",
"utils.transforms.cam2pixel",
"sys.path.append",
"numpy.arange",
"numpy.mean",
"argparse.ArgumentParser",
"numpy.stack",
"numpy.concatenate",
"torchvision.transforms.ToTensor",
"utils.preprocessing.t... | [((244, 269), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (259, 269), False, 'import os\n'), ((335, 361), 'sys.path.append', 'sys.path.append', (['parentUrl'], {}), '(parentUrl)\n', (350, 361), False, 'import sys\n'), ((298, 333), 'os.path.join', 'os.path.join', (['currentUrl', 'os.pardir'... |
# !/usr/bin/env python
from distutils.core import setup
setup(
name='warpify',
packages=[],
version='0.0.0',
description='Python image warping plugin.',
maintainer='<NAME>',
maintainer_email='<EMAIL>',
license='BSD 3-Clause',
url='https://github.com/transformify-plugins/warpify',
ke... | [
"distutils.core.setup"
] | [((57, 1133), 'distutils.core.setup', 'setup', ([], {'name': '"""warpify"""', 'packages': '[]', 'version': '"""0.0.0"""', 'description': '"""Python image warping plugin."""', 'maintainer': '"""<NAME>"""', 'maintainer_email': '"""<EMAIL>"""', 'license': '"""BSD 3-Clause"""', 'url': '"""https://github.com/transformify-pl... |
#!/usr/bin/python
import cv2
cap = cv2.VideoCapture(0)
ret = cap.set(cv2.CAP_PROP_FRAME_WIDTH,640)
ret = cap.set(cv2.CAP_PROP_FRAME_HEIGHT,480)
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
f = open('dataset.csv','a')
actual_distance = 30 #cm
def drawBoxAndWriteText(findfac... | [
"cv2.rectangle",
"cv2.imshow",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.cvtColor",
"cv2.CascadeClassifier",
"cv2.waitKey"
] | [((39, 58), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (55, 58), False, 'import cv2\n'), ((166, 226), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""haarcascade_frontalface_default.xml"""'], {}), "('haarcascade_frontalface_default.xml')\n", (187, 226), False, 'import cv2\n'), ((1205, 1228)... |
#!/usr/bin/env python
# -*- coding: utf8 -*-
import astropy.io.fits as pyfits
import numpy as np
import matplotlib.pyplot as plt
import cv2
from matplotlib import gridspec
filename = '/home/bquint/Data/SAM/Lateral_Glowing/DARK180s.fits'
data = pyfits.getdata(filename=filename)
print(data.mean())
print(np.median(data... | [
"matplotlib.pyplot.imshow",
"numpy.mean",
"numpy.median",
"cv2.medianBlur",
"numpy.max",
"astropy.io.fits.getdata",
"numpy.min",
"matplotlib.pyplot.show"
] | [((247, 280), 'astropy.io.fits.getdata', 'pyfits.getdata', ([], {'filename': 'filename'}), '(filename=filename)\n', (261, 280), True, 'import astropy.io.fits as pyfits\n'), ((928, 951), 'cv2.medianBlur', 'cv2.medianBlur', (['data', '(5)'], {}), '(data, 5)\n', (942, 951), False, 'import cv2\n'), ((1148, 1171), 'matplotl... |
# Author: <NAME> - <EMAIL>
# Code refactoring: <NAME>, <NAME> 2020
# Copyright (c) 2016
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) a... | [
"torch.unique",
"torch.max",
"torch.sum",
"torch.zeros"
] | [((4811, 4846), 'torch.zeros', 'torch.zeros', (['covCovSize', 'covCovSize'], {}), '(covCovSize, covCovSize)\n', (4822, 4846), False, 'import torch\n'), ((1713, 1727), 'torch.zeros', 'torch.zeros', (['(2)'], {}), '(2)\n', (1724, 1727), False, 'import torch\n'), ((1749, 1798), 'torch.sum', 'torch.sum', (['(ind_ijkl[ind_c... |
import unittest2
import datetime
from consts.event_type import EventType
from datafeeds.usfirst_legacy_event_details_parser import UsfirstLegacyEventDetailsParser
@unittest2.skip
class TestUsfirstLegacyEventDetailsParser(unittest2.TestCase):
def test_parse2012ct(self):
with open('test_data/usfirst_legacy... | [
"datetime.datetime"
] | [((739, 775), 'datetime.datetime', 'datetime.datetime', (['(2012)', '(3)', '(29)', '(0)', '(0)'], {}), '(2012, 3, 29, 0, 0)\n', (756, 775), False, 'import datetime\n'), ((821, 857), 'datetime.datetime', 'datetime.datetime', (['(2012)', '(3)', '(31)', '(0)', '(0)'], {}), '(2012, 3, 31, 0, 0)\n', (838, 857), False, 'impo... |
from typing import Optional
from boa3 import constants
from boa3.neo.vm.opcode.Opcode import Opcode
from boa3.neo.vm.opcode.OpcodeInformation import OpcodeInformation
class OpcodeInfo:
@classmethod
def get_info(cls, opcode: Opcode) -> Optional[OpcodeInformation]:
"""
Gets a binary operation g... | [
"boa3.neo.vm.opcode.OpcodeInformation.OpcodeInformation"
] | [((674, 711), 'boa3.neo.vm.opcode.OpcodeInformation.OpcodeInformation', 'OpcodeInformation', (['Opcode.PUSHINT8', '(1)'], {}), '(Opcode.PUSHINT8, 1)\n', (691, 711), False, 'from boa3.neo.vm.opcode.OpcodeInformation import OpcodeInformation\n'), ((728, 766), 'boa3.neo.vm.opcode.OpcodeInformation.OpcodeInformation', 'Opc... |
# -*- coding: utf-8 -*-
"""
Colour Blindness Plotting
=========================
Defines the colour blindness plotting objects:
- :func:`plot_cvd_simulation_Machado2009`
"""
from __future__ import division
from colour.blindness import cvd_matrix_Machado2009
from colour.plotting import CONSTANTS_COLOUR_STYLE, plot_... | [
"colour.utilities.dot_vector",
"colour.blindness.cvd_matrix_Machado2009",
"colour.plotting.override_style"
] | [((692, 708), 'colour.plotting.override_style', 'override_style', ([], {}), '()\n', (706, 708), False, 'from colour.plotting import CONSTANTS_COLOUR_STYLE, plot_image, override_style\n'), ((2368, 2412), 'colour.blindness.cvd_matrix_Machado2009', 'cvd_matrix_Machado2009', (['deficiency', 'severity'], {}), '(deficiency, ... |
import math
def lcm(a: int, b: int) -> int:
return a // math.gcd(a, b) * b
def solve() -> int:
n, a, b, x, y, z = map(int, input().split())
y = min(y, a * x)
z = min(z, b * x)
if a > b:
a, b = b, a
y, z = z, y
# if y >= \sqrt{N}, we can brute force a because a... | [
"math.gcd"
] | [((66, 80), 'math.gcd', 'math.gcd', (['a', 'b'], {}), '(a, b)\n', (74, 80), False, 'import math\n')] |
#!/usr/bin/env python
# -*- encode: utf-8 -*-
#Copyright 2015 RAPP
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicabl... | [
"os.makedirs",
"rapp_utilities.Utilities",
"os.path.isfile",
"os.path.isdir",
"scipy.io.wavfile.read",
"os.system",
"os.path.expanduser"
] | [((914, 925), 'rapp_utilities.Utilities', 'Utilities', ([], {}), '()\n', (923, 925), False, 'from rapp_utilities import Utilities\n'), ((3671, 3771), 'os.system', 'os.system', (["('sox ' + new_audio + ' -t null /dev/null trim 0.5 2.5 noiseprof ' +\n noise_profile_uri)"], {}), "('sox ' + new_audio + ' -t null /dev/nu... |
import argparse
from combined_network import *
if __name__=="__main__":
parser = argparse.ArgumentParser(description='NN', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('mode', type=str, default='reg', choices=['train', 'test'],
help='choose training the n... | [
"argparse.ArgumentParser"
] | [((87, 189), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""NN"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='NN', formatter_class=argparse.\n ArgumentDefaultsHelpFormatter)\n", (110, 189), False, 'import argparse\n')] |
from array_stack import ArrayStack
def delimiter_matched_v1(expr):
"""Return True if all delimiters are properly match; False otherwise.
>>> delimiter_matched_v1('[(2+x)*(3+y)]')
True
>>> delimiter_matched_v1('{[{(xbcd))]}')
False
"""
left, right = '({[', ')}]'
S = ArrayStack()
... | [
"doctest.testmod",
"array_stack.ArrayStack"
] | [((302, 314), 'array_stack.ArrayStack', 'ArrayStack', ([], {}), '()\n', (312, 314), False, 'from array_stack import ArrayStack\n'), ((709, 721), 'array_stack.ArrayStack', 'ArrayStack', ([], {}), '()\n', (719, 721), False, 'from array_stack import ArrayStack\n'), ((1005, 1022), 'doctest.testmod', 'doctest.testmod', ([],... |
#
# This file is part of LiteX (Adapted from Migen for LiteX usage).
#
# This file is Copyright (c) 2013-2014 <NAME> <<EMAIL>>
# This file is Copyright (c) 2013-2021 <NAME> <<EMAIL>>
# This file is Copyright (c) 2013-2017 <NAME> <<EMAIL>>
# This file is Copyright (c) 2016-2018 whitequark <<EMAIL>>
# This file is Copyri... | [
"litex.build.tools.generated_banner",
"litex.gen.fhdl.memory.memory_emit_verilog",
"collections.defaultdict",
"operator.itemgetter",
"migen.fhdl.conv_output.ConvOutput"
] | [((20872, 20884), 'migen.fhdl.conv_output.ConvOutput', 'ConvOutput', ([], {}), '()\n', (20882, 20884), False, 'from migen.fhdl.conv_output import ConvOutput\n'), ((22334, 22356), 'litex.build.tools.generated_banner', 'generated_banner', (['"""//"""'], {}), "('//')\n", (22350, 22356), False, 'from litex.build.tools impo... |
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Free University
# Berlin, 14195 Berlin, Germany.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source ... | [
"pyemma.coordinates.transform.tica.TICA",
"pyemma.coordinates.pipelines.Discretizer",
"pyemma.coordinates.clustering.kmeans.KmeansClustering",
"pyemma.util.log.getLogger",
"pyemma.coordinates.data.frames_from_file.frames_from_file",
"pyemma.coordinates.clustering.assign.AssignCenters",
"pyemma.coordinat... | [((3302, 3331), 'pyemma.util.log.getLogger', '_getLogger', (['"""coordinates.api"""'], {}), "('coordinates.api')\n", (3312, 3331), True, 'from pyemma.util.log import getLogger as _getLogger\n'), ((5360, 5382), 'pyemma.coordinates.data.featurizer.MDFeaturizer', '_MDFeaturizer', (['topfile'], {}), '(topfile)\n', (5373, 5... |
# import the necessary package
from app.main import db
from .user_service import get_a_user
from app.main.model.incident import Incident
def save_incident(data):
user = get_a_user(data['username'])
incident = Incident(
uid=user.id,
student=data['student'],
quiz=data['quiz'],
at... | [
"app.main.db.session.commit",
"app.main.model.incident.Incident.query.filter_by",
"app.main.model.incident.Incident",
"app.main.db.session.add"
] | [((219, 451), 'app.main.model.incident.Incident', 'Incident', ([], {'uid': 'user.id', 'student': "data['student']", 'quiz': "data['quiz']", 'attempt': "data['attempt']", 'captured_time': "data['captured_time']", 'action': "data['action']", 'image_file': "data['image_file']", 'registered_on': "data['registred_on']"}), "... |
#!/usr/bin/env python
# Based on Stack Overflow answer: http://stackoverflow.com/questions/30656162/migrations-in-stand-alone-django-app
import os
import sys
import django
from django.conf import settings
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__fi... | [
"os.path.abspath",
"django.setup",
"os.path.join",
"django.core.management.call_command"
] | [((3216, 3230), 'django.setup', 'django.setup', ([], {}), '()\n', (3228, 3230), False, 'import django\n'), ((3231, 3264), 'django.core.management.call_command', 'call_command', (['"""test"""', '"""slothauth"""'], {}), "('test', 'slothauth')\n", (3243, 3264), False, 'from django.core.management import call_command\n'), ... |
#!/usr/bin/env python
"@package ReadForceField Read force field from a file and print information out."
from forcebalance.parser import parse_inputs
from forcebalance.forcefield import FF
from forcebalance.nifty import printcool
from sys import argv
import os
import numpy as np
def main():
## Set some basic opti... | [
"numpy.array",
"forcebalance.forcefield.FF",
"forcebalance.parser.parse_inputs"
] | [((451, 472), 'forcebalance.parser.parse_inputs', 'parse_inputs', (['argv[1]'], {}), '(argv[1])\n', (463, 472), False, 'from forcebalance.parser import parse_inputs\n'), ((484, 495), 'forcebalance.forcefield.FF', 'FF', (['options'], {}), '(options)\n', (486, 495), False, 'from forcebalance.forcefield import FF\n'), ((5... |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
fro... | [
"pulumi.getter",
"pulumi.set",
"pulumi.get"
] | [((3482, 3521), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""maxPartitionCount"""'}), "(name='maxPartitionCount')\n", (3495, 3521), False, 'import pulumi\n'), ((3885, 3924), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""minPartitionCount"""'}), "(name='minPartitionCount')\n", (3898, 3924), False, 'import p... |
import eigenlib as lib
lib.geefmeinfo()
# get the documentation (alt-tab werkt ook!)
print(lib.plus3.__doc__)
print()
print(lib.plus3(8.))
print(lib.plus3(np.array([2., 109.])))
print(lib.plus3('stykje tekst'))
| [
"eigenlib.geefmeinfo",
"eigenlib.plus3"
] | [((24, 40), 'eigenlib.geefmeinfo', 'lib.geefmeinfo', ([], {}), '()\n', (38, 40), True, 'import eigenlib as lib\n'), ((127, 141), 'eigenlib.plus3', 'lib.plus3', (['(8.0)'], {}), '(8.0)\n', (136, 141), True, 'import eigenlib as lib\n'), ((187, 212), 'eigenlib.plus3', 'lib.plus3', (['"""stykje tekst"""'], {}), "('stykje t... |
# diagram.py
from diagrams import Diagram, Cluster
from diagrams.aws.network import CloudMap, VPC
with Diagram("AWS Tenancy", show=False, direction="RL"):
with Cluster("dns"):
CloudMap("private dns namespace") << VPC("vpc")
CloudMap("public dns namespace")
| [
"diagrams.aws.network.VPC",
"diagrams.Diagram",
"diagrams.aws.network.CloudMap",
"diagrams.Cluster"
] | [((104, 154), 'diagrams.Diagram', 'Diagram', (['"""AWS Tenancy"""'], {'show': '(False)', 'direction': '"""RL"""'}), "('AWS Tenancy', show=False, direction='RL')\n", (111, 154), False, 'from diagrams import Diagram, Cluster\n'), ((166, 180), 'diagrams.Cluster', 'Cluster', (['"""dns"""'], {}), "('dns')\n", (173, 180), Fa... |
"""
Finds the minimum differrence of a pair of pentagonal numbers
Author: <NAME>
"""
import math
"""
Returns the minimum different of the pair of pentagonal numbers
"""
def minimum_difference(p):
d = float('inf')
for i in range(len(p)-1):
if p[i+1]-p[i]>d:
print('Found the limit, this is d... | [
"math.sqrt"
] | [((479, 508), 'math.sqrt', 'math.sqrt', (['(0.5 ** 2 + 6 * tmp)'], {}), '(0.5 ** 2 + 6 * tmp)\n', (488, 508), False, 'import math\n'), ((609, 643), 'math.sqrt', 'math.sqrt', (['(0.5 ** 2 + 6 * tmp_plus)'], {}), '(0.5 ** 2 + 6 * tmp_plus)\n', (618, 643), False, 'import math\n')] |
import sys
def main():
n, m = map(int, sys.stdin.readline().split())
cnt = [0, 0, 0]
q, r = divmod(m, 4)
# 3 + 3 == 2 + 4 なので、老人は偶奇合わせだけで良い。
if r == 3:
cnt[1] += 1
n -= 1
elif r == 1:
cnt[0] += 1
cnt[1] += 1
q -= 1
n -= 2
el... | [
"sys.stdin.readline",
"sys.exit"
] | [((622, 632), 'sys.exit', 'sys.exit', ([], {}), '()\n', (630, 632), False, 'import sys\n'), ((49, 69), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (67, 69), False, 'import sys\n')] |
# -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
'''
from __future__ import absolute_import, unicode_literals
import ldap
from django_auth_ldap.config import LDAPSearch, GroupOfNamesType
from .common import * # noqa
# SECRET CONFIGURATION
# -----------------------------------------------... | [
"django_auth_ldap.config.GroupOfNamesType",
"django_auth_ldap.config.LDAPSearch"
] | [((2376, 2450), 'django_auth_ldap.config.LDAPSearch', 'LDAPSearch', (['AUTH_LDAP_USER_SEARCH_DN', 'ldap.SCOPE_SUBTREE', '"""(uid=%(user)s)"""'], {}), "(AUTH_LDAP_USER_SEARCH_DN, ldap.SCOPE_SUBTREE, '(uid=%(user)s)')\n", (2386, 2450), False, 'from django_auth_ldap.config import LDAPSearch, GroupOfNamesType\n'), ((2476, ... |
# -*- coding: utf-8 -*-
from pandas_datareader import data
import pandas as pd
import datetime as dt
class ClassifyBar:
__near = 0.005 #Definition of "close enough" used in is_doji method
def __init__(self, open, close, high, low, volume, date = dt.datetime(2018, 5, 3)):
self.open = op... | [
"datetime.datetime"
] | [((272, 295), 'datetime.datetime', 'dt.datetime', (['(2018)', '(5)', '(3)'], {}), '(2018, 5, 3)\n', (283, 295), True, 'import datetime as dt\n')] |
import torch
from torch import nn
from neural.modules import LazyDelta
########## Baselines #############
# this covers MINEBED and SG-BOED (ACE estimator with prior as proposal)
class DesignBaseline(nn.Module):
def __init__(self, design_dim):
super().__init__()
self.register_buffer("prototype", t... | [
"torch.nn.Identity",
"torch.distributions.Normal",
"torch.zeros",
"torch.ones"
] | [((961, 974), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (972, 974), False, 'from torch import nn\n'), ((996, 1030), 'torch.distributions.Normal', 'torch.distributions.Normal', (['(0)', '(0.5)'], {}), '(0, 0.5)\n', (1022, 1030), False, 'import torch\n'), ((319, 342), 'torch.zeros', 'torch.zeros', (['design_d... |
#encoding=utf-8
from nltk.corpus import stopwords
from sklearn.preprocessing import LabelEncoder
from sklearn.pipeline import FeatureUnion
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
from sklearn.linear_model import Ridge
from scipy.sparse import hstack, csr_matrix
import pandas as pd
i... | [
"sklearn.cross_validation.KFold",
"sklearn.preprocessing.LabelEncoder",
"pandas.read_csv",
"re.compile",
"numpy.log",
"numpy.array",
"xgboost.DMatrix",
"re.split",
"nltk.corpus.stopwords.words",
"xgboost.train",
"numpy.empty",
"numpy.concatenate",
"pandas.DataFrame",
"scipy.sparse.csr_matr... | [((2487, 2499), 'gc.collect', 'gc.collect', ([], {}), '()\n', (2497, 2499), False, 'import gc, re\n'), ((2525, 2580), 'pandas.DataFrame', 'pd.DataFrame', (['train_features'], {'columns': "['image_quality']"}), "(train_features, columns=['image_quality'])\n", (2537, 2580), True, 'import pandas as pd\n'), ((2605, 2659), ... |
# -*- coding: utf-8 -*-
r"""
C-Finite Sequences
C-finite infinite sequences satisfy homogenous linear recurrences with constant coefficients:
.. MATH::
a_{n+d} = c_0a_n + c_1a_{n+1} + \cdots + c_{d-1}a_{n+d-1}, \quad d>0.
CFiniteSequences are completely defined by their ordinary generating function (o.g.f., whi... | [
"sage.matrix.berlekamp_massey.berlekamp_massey",
"sage.arith.all.binomial",
"six.moves.range",
"sage.matrix.constructor.Matrix",
"sage.rings.qqbar.QQbar",
"sage.structure.element.FieldElement.__init__",
"six.add_metaclass",
"sage.rings.fraction_field.FractionField",
"sage.matrix.constructor.Matrix.b... | [((5698, 5748), 'six.add_metaclass', 'add_metaclass', (['InheritComparisonClasscallMetaclass'], {}), '(InheritComparisonClasscallMetaclass)\n', (5711, 5748), False, 'from six import add_metaclass\n'), ((5599, 5631), 'sage.rings.polynomial.polynomial_ring_constructor.PolynomialRing', 'PolynomialRing', (['base_ring', 'na... |
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 5 10:02:44 2018
@author: wuxiaochuna
"""
import os
import numpy as np
import time
import PIL
import argparse
parser = argparse.ArgumentParser()
parser.add_agrument('--path_val', type=str, default='..\2011_trainaug\raw_segmentation_results',
help='T... | [
"PIL.Image.fromarray",
"os.listdir",
"PIL.Image.open",
"argparse.ArgumentParser",
"numpy.argmax",
"numpy.array",
"numpy.zeros",
"time.time",
"numpy.bincount"
] | [((169, 194), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (192, 194), False, 'import argparse\n'), ((990, 1022), 'os.listdir', 'os.listdir', (['FLAGS.FLAGS.path_val'], {}), '(FLAGS.FLAGS.path_val)\n', (1000, 1022), False, 'import os\n'), ((1662, 1673), 'time.time', 'time.time', ([], {}), '()... |
#import shared
#import time
#from multiprocessing import Pool, cpu_count
import hashlib
from struct import unpack, pack
import sys
from shared import config, frozen
import shared
#import os
def _set_idle():
if 'linux' in sys.platform:
import os
os.nice(20) # @UndefinedVariable
else:
tr... | [
"sys.getwindowsversion",
"win32api.GetCurrentProcessId",
"os.nice",
"shared.config.getint",
"time.sleep",
"multiprocessing.cpu_count",
"struct.pack",
"win32process.SetPriorityClass",
"multiprocessing.Pool",
"win32api.OpenProcess"
] | [((1663, 1688), 'multiprocessing.Pool', 'Pool', ([], {'processes': 'pool_size'}), '(processes=pool_size)\n', (1667, 1688), False, 'from multiprocessing import Pool, cpu_count\n'), ((266, 277), 'os.nice', 'os.nice', (['(20)'], {}), '(20)\n', (273, 277), False, 'import os\n'), ((1435, 1446), 'multiprocessing.cpu_count', ... |
from hknweb.academics.views.base_viewset import AcademicEntityViewSet
from hknweb.academics.models import Instructor
from hknweb.academics.serializers import InstructorSerializer
class InstructorViewSet(AcademicEntityViewSet):
queryset = Instructor.objects.all()
serializer_class = InstructorSerializer
| [
"hknweb.academics.models.Instructor.objects.all"
] | [((246, 270), 'hknweb.academics.models.Instructor.objects.all', 'Instructor.objects.all', ([], {}), '()\n', (268, 270), False, 'from hknweb.academics.models import Instructor\n')] |