code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
r"""
===============================================================================
pore_topology -- functions for monitoring and adjusting topology
===============================================================================
"""
import scipy as _sp
def get_subscripts(network, shape, **kwargs):
r"""
Retu... | [
"scipy.prod",
"scipy.ones",
"scipy.empty",
"scipy.vstack",
"scipy.average",
"scipy.rand",
"scipy.shape"
] | [((547, 562), 'scipy.prod', '_sp.prod', (['shape'], {}), '(shape)\n', (555, 562), True, 'import scipy as _sp\n'), ((2670, 2701), 'scipy.average', '_sp.average', (['T_nums[T_nums > 1]'], {}), '(T_nums[T_nums > 1])\n', (2681, 2701), True, 'import scipy as _sp\n'), ((684, 700), 'scipy.empty', '_sp.empty', (['shape'], {}),... |
# Copyright (C) 2007 <NAME>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed ... | [
"SpiffWorkflow.Operators.valueof",
"TaskSpec.TaskSpec.__init__",
"SpiffWorkflow.Task.Task.Iterator",
"TaskSpec.TaskSpec._on_complete_hook",
"SpiffWorkflow.Exception.WorkflowException"
] | [((2222, 2269), 'TaskSpec.TaskSpec.__init__', 'TaskSpec.__init__', (['self', 'parent', 'name'], {}), '(self, parent, name, **kwargs)\n', (2239, 2269), False, 'from TaskSpec import TaskSpec\n'), ((2662, 2711), 'SpiffWorkflow.Task.Task.Iterator', 'Task.Iterator', (['my_task', 'my_task.NOT_FINISHED_MASK'], {}), '(my_task,... |
import os
import math
from pathlib import Path
import clip
import torch
from PIL import Image
import numpy as np
import pandas as pd
from common import common_path
# Set the path to the photos
# dataset_version = "lite" # Use "lite" or "full"
# photos_path = Path("unsplash-dataset") / dataset_version / "photos"
ph... | [
"os.path.exists",
"PIL.Image.open",
"pandas.read_csv",
"pathlib.Path",
"os.path.join",
"torch.cuda.is_available",
"numpy.concatenate",
"clip.load",
"pandas.DataFrame",
"torch.no_grad",
"numpy.load",
"numpy.save"
] | [((332, 401), 'os.path.join', 'os.path.join', (['common_path.project_dir', '"""unsplash-dataset/lite/photos"""'], {}), "(common_path.project_dir, 'unsplash-dataset/lite/photos')\n", (344, 401), False, 'import os\n'), ((659, 695), 'clip.load', 'clip.load', (['"""ViT-B/32"""'], {'device': 'device'}), "('ViT-B/32', device... |
import numpy as np
import datajoint as dj
from treadmill_pipeline import project_database_prefix
from ephys.utilities import ingestion, time_sync
from ephys import get_schema_name
schema = dj.schema(project_database_prefix + 'treadmill_pipeline')
reference = dj.create_virtual_module('reference', get_schema_name('re... | [
"ephys.utilities.ingestion.find_input_directory",
"ephys.utilities.ingestion.get_optitrack",
"numpy.array",
"ephys.utilities.ingestion.get_recordings",
"datajoint.schema",
"ephys.get_schema_name"
] | [((192, 249), 'datajoint.schema', 'dj.schema', (["(project_database_prefix + 'treadmill_pipeline')"], {}), "(project_database_prefix + 'treadmill_pipeline')\n", (201, 249), True, 'import datajoint as dj\n'), ((301, 329), 'ephys.get_schema_name', 'get_schema_name', (['"""reference"""'], {}), "('reference')\n", (316, 329... |
from rest_framework import serializers
from management.models.main import MailSetting, LdapSetting, ShopSetting, LegalSetting, Header, CacheSetting, Footer
class MailSettingSerializer(serializers.ModelSerializer):
id = serializers.ReadOnlyField()
class Meta:
model = MailSetting
fields = '__a... | [
"rest_framework.serializers.ReadOnlyField"
] | [((226, 253), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (251, 253), False, 'from rest_framework import serializers\n'), ((395, 422), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (420, 422), False, 'from rest_framework import ... |
"""Unit test optimal lineup functions."""
import unittest
import copy
import ff_espn_api # pylint: disable=import-error
from collections import defaultdict
from fantasy_coty.main import add_to_optimal
class TestAddToOptimal(unittest.TestCase):
"""Test add_to_optimal() function."""
def setUp(self):
"... | [
"fantasy_coty.main.add_to_optimal",
"collections.defaultdict",
"ff_espn_api.BoxPlayer",
"copy.deepcopy",
"unittest.main"
] | [((9838, 9853), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9851, 9853), False, 'import unittest\n'), ((404, 420), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (415, 420), False, 'from collections import defaultdict\n'), ((676, 693), 'collections.defaultdict', 'defaultdict', (['list'], {... |
from pyspark import SparkConf, SparkContext
from pyspark.sql import SparkSession, functions, types
from pyspark.sql.functions import date_format
from pyspark.sql.functions import year, month, dayofmonth
import sys
import json
import argparse
assert sys.version_info >= (3, 5) # make sure we have Python 3.5+
# add more ... | [
"pyspark.sql.functions.arrays_zip",
"argparse.ArgumentParser",
"pyspark.sql.functions.explode",
"pyspark.sql.functions.col",
"pyspark.sql.functions.year",
"pyspark.sql.SparkSession.builder.appName",
"pyspark.sql.functions.count"
] | [((4544, 4569), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4567, 4569), False, 'import argparse\n'), ((644, 672), 'pyspark.sql.functions.year', 'year', (["posts['creation_date']"], {}), "(posts['creation_date'])\n", (648, 672), False, 'from pyspark.sql.functions import year, month, dayofmo... |
import pandas as pd
from suzieq.engines.pandas.engineobj import SqPandasEngine
from suzieq.sqobjects import get_sqobject
class TableObj(SqPandasEngine):
@staticmethod
def table_name():
return 'tables'
def get(self, **kwargs):
"""Show the known tables for which we have information"""
... | [
"pandas.DataFrame",
"suzieq.sqobjects.get_sqobject",
"pandas.DataFrame.from_dict"
] | [((377, 391), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (389, 391), True, 'import pandas as pd\n'), ((1343, 1373), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['tables'], {}), '(tables)\n', (1365, 1373), True, 'import pandas as pd\n'), ((551, 570), 'suzieq.sqobjects.get_sqobject', 'get_sqobject... |
import datetime
from typing import Optional, Type, Generic, List, Tuple
from ..base import T
from .generic import PersistenceVariable
class InMemoryPersistenceVariable(PersistenceVariable, Generic[T]):
def __init__(self, type_: Type[T], keep: datetime.timedelta):
super().__init__(type_, log=True)
... | [
"datetime.datetime.now"
] | [((595, 639), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (616, 639), False, 'import datetime\n'), ((492, 536), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (513, 536), False, 'import d... |
"""
Author: <NAME> - <EMAIL>
Description: Transplant from "https://github.com/xunhuang1995/AdaIN-style/blob/master/train.lua"
"""
import functools
import os
from collections import OrderedDict
import torch
import torch.nn as nn
from torchvision.models import vgg19
from datasets.utils import denorm
from mo... | [
"torch.nn.UpsamplingNearest2d",
"collections.OrderedDict",
"models.helpers.init_weights",
"torch.rand",
"torchvision.models.vgg19",
"torch.nn.Sequential",
"torch.nn.ReflectionPad2d",
"torch.load",
"torch.nn.Conv2d",
"torch.cuda.is_available",
"functools.partial",
"datasets.utils.denorm",
"mo... | [((8174, 8199), 'torch.rand', 'torch.rand', (['(bs, 3, w, h)'], {}), '((bs, 3, w, h))\n', (8184, 8199), False, 'import torch\n'), ((8285, 8323), 'models.blocks.AdaptiveInstanceNorm2d', 'AdaptiveInstanceNorm2d', (['e.out_channels'], {}), '(e.out_channels)\n', (8307, 8323), False, 'from models.blocks import AdaptiveInsta... |
# Generated by Django 3.0.7 on 2020-10-14 07:46
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("posthog", "0086_team_session_recording_opt_in"),
]
operations = [
migrations.AlterField(
model_name... | [
"django.db.models.DateTimeField"
] | [((384, 450), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'null': '(True)'}), '(default=django.utils.timezone.now, null=True)\n', (404, 450), False, 'from django.db import migrations, models\n')] |
"""
Create a cartoon of a tumor given the frequencies of different genotypes.
"""
from .util import *
import pandas as pd
import matplotlib.pyplot as plt
import click
import os
from pathlib import Path
from pymuller import muller
@click.command(help="Plot the evolution of a tumor.")
@click.argument(
"genotype-... | [
"pandas.read_csv",
"pathlib.Path",
"click.option",
"os.path.join",
"pymuller.muller",
"click.Path",
"matplotlib.pyplot.axis",
"click.command",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((236, 288), 'click.command', 'click.command', ([], {'help': '"""Plot the evolution of a tumor."""'}), "(help='Plot the evolution of a tumor.')\n", (249, 288), False, 'import click\n'), ((475, 561), 'click.option', 'click.option', (['"""-c"""', '"""--cells"""'], {'default': '(100)', 'help': '"""Number of cells in slic... |
from django.conf.urls import url
from .views import get_online_users, set_online
app_name = 'online_users'
urlpatterns = [
url(r'^so/$', set_online),
url(r'^ous/$', get_online_users),
]
| [
"django.conf.urls.url"
] | [((129, 153), 'django.conf.urls.url', 'url', (['"""^so/$"""', 'set_online'], {}), "('^so/$', set_online)\n", (132, 153), False, 'from django.conf.urls import url\n'), ((160, 191), 'django.conf.urls.url', 'url', (['"""^ous/$"""', 'get_online_users'], {}), "('^ous/$', get_online_users)\n", (163, 191), False, 'from django... |
#!/usr/bin/py
import pandas as pd
import os
# Holds investing.com candlestick patterns
class CSPatternList:
def __init__(self, path):
self.data = None
with os.scandir(path) as entries:
for e in entries:
if e.is_file() and os.path.splitext(e.path)[1] == '.csv':
... | [
"os.scandir",
"os.path.splitext",
"pandas.read_csv"
] | [((179, 195), 'os.scandir', 'os.scandir', (['path'], {}), '(path)\n', (189, 195), False, 'import os\n'), ((390, 409), 'pandas.read_csv', 'pd.read_csv', (['e.path'], {}), '(e.path)\n', (401, 409), True, 'import pandas as pd\n'), ((273, 297), 'os.path.splitext', 'os.path.splitext', (['e.path'], {}), '(e.path)\n', (289, 2... |
from setuptools import setup
setup(
name='netspeed',
version='0.1',
py_modules=['netspeed'],
install_requires=[
'Click',
'pyspeedtest'
],
entry_points='''
[console_scripts]
netspeed=netspeed:cli
''',
)
| [
"setuptools.setup"
] | [((30, 226), 'setuptools.setup', 'setup', ([], {'name': '"""netspeed"""', 'version': '"""0.1"""', 'py_modules': "['netspeed']", 'install_requires': "['Click', 'pyspeedtest']", 'entry_points': '"""\n [console_scripts]\n netspeed=netspeed:cli\n """'}), '(name=\'netspeed\', version=\'0.1\', py_modules=[\'... |
from django.contrib import admin
from .models import (Dish, Payments, Order, Delivery, OrderItem)
admin.site.register(Dish)
admin.site.register(Payments)
admin.site.register(Order)
admin.site.register(Delivery)
admin.site.register(OrderItem)
| [
"django.contrib.admin.site.register"
] | [((99, 124), 'django.contrib.admin.site.register', 'admin.site.register', (['Dish'], {}), '(Dish)\n', (118, 124), False, 'from django.contrib import admin\n'), ((125, 154), 'django.contrib.admin.site.register', 'admin.site.register', (['Payments'], {}), '(Payments)\n', (144, 154), False, 'from django.contrib import adm... |
import os
from dataclasses import dataclass, field
from typing import AnyStr, Dict, Optional
from urllib.parse import urljoin
@dataclass
class FreshChatConfiguration:
"""
Class represents the base configuration for Freshchat
"""
app_id: str
token: str = field(repr=False)
default_channel_id: O... | [
"os.environ.get",
"dataclasses.field"
] | [((277, 294), 'dataclasses.field', 'field', ([], {'repr': '(False)'}), '(repr=False)\n', (282, 294), False, 'from dataclasses import dataclass, field\n'), ((335, 354), 'dataclasses.field', 'field', ([], {'default': 'None'}), '(default=None)\n', (340, 354), False, 'from dataclasses import dataclass, field\n'), ((400, 41... |
from math import log2
import torch
from torch import nn, einsum
import torch.nn.functional as F
from einops import rearrange
from x_transformers import Encoder, Decoder
# helpers
def exists(val):
return val is not None
def masked_mean(t, mask, dim = 1):
t = t.masked_fill(~mask[:, :, None], 0.)
return t.... | [
"torch.nn.ReLU",
"torch.nn.functional.mse_loss",
"torch.nn.functional.gumbel_softmax",
"x_transformers.Encoder",
"x_transformers.Decoder",
"torch.nn.LayerNorm",
"torch.nn.Conv2d",
"torch.cat",
"torch.arange",
"torch.einsum",
"torch.nn.functional.cross_entropy",
"torch.nn.Linear",
"torch.nn.f... | [((1337, 1366), 'torch.nn.Embedding', 'nn.Embedding', (['num_tokens', 'dim'], {}), '(num_tokens, dim)\n', (1349, 1366), False, 'from torch import nn, einsum\n'), ((1663, 1696), 'torch.nn.functional.gumbel_softmax', 'F.gumbel_softmax', (['logits'], {'tau': '(1.0)'}), '(logits, tau=1.0)\n', (1679, 1696), True, 'import to... |
"Test list input."
# For support of python 2.5
from __future__ import with_statement
import numpy as np
from numpy.testing import assert_equal, assert_array_almost_equal
import bottleneck as bn
# ---------------------------------------------------------------------------
# Check that functions can handle list input
... | [
"numpy.testing.assert_array_almost_equal",
"bottleneck.slow.nn",
"numpy.array",
"numpy.arange",
"bottleneck.nn"
] | [((717, 732), 'numpy.arange', 'np.arange', (['size'], {}), '(size)\n', (726, 732), True, 'import numpy as np\n'), ((1282, 1341), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['actual', 'desired'], {'err_msg': 'err_msg'}), '(actual, desired, err_msg=err_msg)\n', (1307, 1341), False, 'from num... |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Dataset',
fields=[
('id', models.AutoField(prim... | [
"django.db.models.DateField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.URLField",
"django.db.models.CharField"
] | [((299, 392), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'auto_created': '(True)', 'verbose_name': '"""ID"""', 'serialize': '(False)'}), "(primary_key=True, auto_created=True, verbose_name='ID',\n serialize=False)\n", (315, 392), False, 'from django.db import migrations, models\... |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orgs', '0015_auto_20160209_0926'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(verbose_name='ID', seria... | [
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.CharField",
"django.db.models.BooleanField"
] | [((279, 372), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (295, 372), False, 'from django.db import migrations, models\... |
# -*- coding: utf-8 -*-
import sys
import argparse
arg_no = len(sys.argv)
tool_parser = argparse.ArgumentParser(add_help=False)
tool_subparsers = tool_parser.add_subparsers(help='commands', dest='command')
# The rename command.
rename_parser = tool_subparsers.add_parser('rename', help='rename an existing user acco... | [
"argparse.ArgumentParser"
] | [((91, 130), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (114, 130), False, 'import argparse\n')] |
"""
Climate Platform Device for Wiser Smart
https://github.com/tomtomfx/wiserSmartForHA
<EMAIL>
"""
import asyncio
import logging
import voluptuous as vol
from functools import partial
from ruamel.yaml import YAML as yaml
from homeassistant.components.climate import ClimateEntity
from homeassistant.core import cal... | [
"functools.partial",
"homeassistant.helpers.dispatcher.async_dispatcher_connect"
] | [((6136, 6222), 'homeassistant.helpers.dispatcher.async_dispatcher_connect', 'async_dispatcher_connect', (['self.hass', '"""WiserSmartUpdateMessage"""', 'async_update_state'], {}), "(self.hass, 'WiserSmartUpdateMessage',\n async_update_state)\n", (6160, 6222), False, 'from homeassistant.helpers.dispatcher import asy... |
import os
from functools import partial
from multiprocessing import Pool
from typing import Any, Callable, Dict, List, Optional
import numpy as np
import pandas as pd
from tqdm import tqdm
from src.dataset.utils.waveform_preprocessings import preprocess_strain
def id_2_path(
image_id: str,
is_train: bool = ... | [
"os.path.exists",
"src.dataset.utils.waveform_preprocessings.preprocess_strain",
"pandas.merge",
"functools.partial",
"os.path.basename",
"multiprocessing.Pool",
"pandas.DataFrame",
"numpy.load"
] | [((1533, 1546), 'numpy.load', 'np.load', (['path'], {}), '(path)\n', (1540, 1546), True, 'import numpy as np\n'), ((3986, 4127), 'functools.partial', 'partial', (['get_agg_feats'], {'interp_psd': 'interp_psd', 'psds': 'psds', 'window': 'window', 'fs': 'fs', 'fband': 'fband', 'psd_cache_path_suffix': 'psd_cache_path_suf... |
#!/usr/bin/env python3
import os
import boto3
import botocore.exceptions
import argparse
import yaml
from nephele2 import NepheleError
mand_vars = ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY']
perm_error = """\n\nIt seems you have not set up your AWS correctly.
Should you be running this with Awssume? Or have profile... | [
"os.path.exists",
"argparse.FileType",
"argparse.ArgumentParser",
"boto3.Session",
"os.environ.get",
"yaml.safe_load",
"nephele2.NepheleError.UnableToStartEC2Exception"
] | [((5309, 5381), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""CLI Interface to N2."""', 'usage': 'usage'}), "(description='CLI Interface to N2.', usage=usage)\n", (5332, 5381), False, 'import argparse\n'), ((1955, 1984), 'os.path.exists', 'os.path.exists', (['args.key_path'], {}), '(arg... |
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 27 18:16:24 2015
@author: <NAME>
A raíz del cambio previsto:
DESCONEXIÓN DE LA WEB PÚBLICA CLÁSICA DE E·SIOS
La Web pública clásica de e·sios (http://www.esios.ree.es) será desconectada el día 29 de marzo de 2016.
Continuaremos ofreciendo servicio en la nueva Web del Op... | [
"json.loads",
"esiosdata.prettyprinting.print_err",
"re.compile",
"dataweb.requestweb.get_data_en_intervalo",
"pandas.DataFrame",
"pandas.Timestamp",
"esiosdata.esios_config.D_TIPOS_REQ_DEM.keys"
] | [((1184, 1229), 're.compile', 're.compile', (['"""(?P<func>.*)\\\\((?P<json>.*)\\\\);"""'], {}), "('(?P<func>.*)\\\\((?P<json>.*)\\\\);')\n", (1194, 1229), False, 'import re\n'), ((2254, 2283), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {'index': '[0]'}), '(data, index=[0])\n', (2266, 2283), True, 'import pandas as... |
#!#!/usr/bin/env python
import os
from github import Github
from libraries.notify import Notify
import json
print("")
print("Scanning Github repos")
GITHUB_API_KEY = os.environ.get('GITHUB_API_KEY')
WHITELIST = json.loads(os.environ.get('GITHUB_WHITELIST').lower())
GITHUB_SCAN = json.loads(os.environ.get('GITHUB_SCAN... | [
"libraries.notify.Notify",
"os.environ.get",
"github.Github"
] | [((168, 200), 'os.environ.get', 'os.environ.get', (['"""GITHUB_API_KEY"""'], {}), "('GITHUB_API_KEY')\n", (182, 200), False, 'import os\n'), ((343, 377), 'os.environ.get', 'os.environ.get', (['"""SENDGRID_API_KEY"""'], {}), "('SENDGRID_API_KEY')\n", (357, 377), False, 'import os\n'), ((394, 425), 'os.environ.get', 'os.... |
from flask.ext.sqlalchemy import SQLAlchemy
from util import hex_to_rgb, rgb_to_hex
from time2words import relative_time_to_text
from datetime import datetime
from dateutil.tz import tzutc
import pytz
db = SQLAlchemy()
def created_on_default():
return datetime.utcnow()
class Counter(db.Model):
__tablename__ ... | [
"flask.ext.sqlalchemy.SQLAlchemy",
"datetime.datetime.utcnow"
] | [((207, 219), 'flask.ext.sqlalchemy.SQLAlchemy', 'SQLAlchemy', ([], {}), '()\n', (217, 219), False, 'from flask.ext.sqlalchemy import SQLAlchemy\n'), ((258, 275), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (273, 275), False, 'from datetime import datetime\n'), ((1388, 1405), 'datetime.datetime.utc... |
#!/usr/bin/env python3
import os
import setuptools
DIR = os.path.dirname(__file__)
REQUIREMENTS = os.path.join(DIR, "requirements.txt")
with open(REQUIREMENTS) as f:
reqs = f.read().strip().split("\n")
setuptools.setup(
name="rl",
version="0.0.1",
description="Reinforcement Learning: An Introduction... | [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join"
] | [((59, 84), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (74, 84), False, 'import os\n'), ((100, 137), 'os.path.join', 'os.path.join', (['DIR', '"""requirements.txt"""'], {}), "(DIR, 'requirements.txt')\n", (112, 137), False, 'import os\n'), ((432, 458), 'setuptools.find_packages', 'setupto... |
# pylint: disable=missing-docstring
from __future__ import annotations
import hashlib
from io import BytesIO
from pathlib import Path
from typing import Any
import pytest
from beancount.core.compare import hash_entry
from flask import url_for
from flask.testing import FlaskClient
from fava.context import g
from fava... | [
"fava.core.misc.align",
"pathlib.Path",
"beancount.core.compare.hash_entry",
"io.BytesIO",
"flask.url_for",
"fava.json_api.validate_func_arguments",
"pytest.mark.parametrize",
"pytest.raises"
] | [((10148, 10327), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""query_string,result_str"""', "[('balances from year = 2014', '5086.65 USD'), ('nononono',\n 'ERROR: Syntax error near'), ('select sum(day)', '43558')]"], {}), "('query_string,result_str', [(\n 'balances from year = 2014', '5086.65 USD')... |
from luminoso_api.v5_client import LuminosoClient
from luminoso_api.v5_upload import create_project_with_docs, BATCH_SIZE
from unittest.mock import patch
import pytest
BASE_URL = 'http://mock-api.localhost/api/v5/'
DOCS_TO_UPLOAD = [
{'title': 'Document 1', 'text': 'Bonjour', 'extra': 'field'},
{'title': 'Do... | [
"luminoso_api.v5_client.LuminosoClient.connect",
"unittest.mock.patch",
"luminoso_api.v5_upload.create_project_with_docs",
"pytest.raises"
] | [((2250, 2296), 'luminoso_api.v5_client.LuminosoClient.connect', 'LuminosoClient.connect', (['BASE_URL'], {'token': '"""fake"""'}), "(BASE_URL, token='fake')\n", (2272, 2296), False, 'from luminoso_api.v5_client import LuminosoClient\n'), ((5347, 5393), 'luminoso_api.v5_client.LuminosoClient.connect', 'LuminosoClient.c... |
import unittest
import shutil
import tempfile
import numpy as np
# import pandas as pd
# import pymc3 as pm
# from pymc3 import summary
# from sklearn.mixture import BayesianGaussianMixture as skBayesianGaussianMixture
from sklearn.model_selection import train_test_split
from pmlearn.exceptions import NotFittedError
... | [
"numpy.random.normal",
"sklearn.model_selection.train_test_split",
"numpy.array",
"numpy.random.randint",
"tempfile.mkdtemp",
"shutil.rmtree",
"pmlearn.mixture.DirichletProcessMixture"
] | [((601, 628), 'numpy.array', 'np.array', (['[0.35, 0.4, 0.25]'], {}), '([0.35, 0.4, 0.25])\n', (609, 628), True, 'import numpy as np\n'), ((650, 670), 'numpy.array', 'np.array', (['[0, 5, 10]'], {}), '([0, 5, 10])\n', (658, 670), True, 'import numpy as np\n'), ((693, 718), 'numpy.array', 'np.array', (['[0.5, 0.5, 1.0]'... |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from skimage import draw
from skimage import measure
from astropy.io import fits
from astropy import units as u
from astropy import wcs, coordinates
from scipy.ndimage.filters impor... | [
"skimage.draw.circle",
"os.path.exists",
"scipy.ndimage.filters.gaussian_filter",
"os.makedirs",
"astropy.coordinates.SkyCoord",
"numpy.sum",
"numpy.zeros",
"numpy.argwhere",
"skimage.draw.circle_perimeter",
"numpy.concatenate",
"astropy.io.fits.open",
"numpy.moveaxis",
"numpy.loadtxt",
"a... | [((1351, 1460), 'numpy.loadtxt', 'np.loadtxt', (['model_file'], {'dtype': "{'names': ('name', 'ra', 'dec', 'I'), 'formats': ('S10', 'f4', 'f4', 'f4')}"}), "(model_file, dtype={'names': ('name', 'ra', 'dec', 'I'),\n 'formats': ('S10', 'f4', 'f4', 'f4')})\n", (1361, 1460), True, 'import numpy as np\n'), ((1583, 1637),... |
#!/usr/bin/env python3
import os
import sys
import yaml
file_name=sys.argv[1]
file_name = '/root/etcd/' + file_name + '.yaml'
with open(file_name) as f:
y=yaml.safe_load(f)
del y['metadata']['creationTimestamp']
del y['metadata']['generation']
del y['metadata']['resourceVersion']
del y['metad... | [
"yaml.safe_load",
"yaml.dump"
] | [((161, 178), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (175, 178), False, 'import yaml\n'), ((400, 467), 'yaml.dump', 'yaml.dump', (['y', 'outputFile'], {'default_flow_style': '(False)', 'sort_keys': '(False)'}), '(y, outputFile, default_flow_style=False, sort_keys=False)\n', (409, 467), False, 'import... |
import os
import cv2
from Segmentation import CombinedHist, get_histograms, HistQueue
import matplotlib.pyplot as plt
import numpy as np
listofFiles = os.listdir('generated_frames')
# change the size of queue accordingly
queue_of_hists = HistQueue.HistQueue(25)
x = []
y_r = []
y_g = []
y_b = []
def compare(current_h... | [
"numpy.median",
"os.listdir",
"numpy.amin",
"Segmentation.CombinedHist.CombinedHist",
"numpy.add",
"numpy.sort",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.axhline",
"matplotlib.pyplot.figure",
"Segmentation.HistQueue.HistQueue",
"numpy.percentile",
"matplotlib.pyplot.show"
] | [((152, 182), 'os.listdir', 'os.listdir', (['"""generated_frames"""'], {}), "('generated_frames')\n", (162, 182), False, 'import os\n'), ((239, 262), 'Segmentation.HistQueue.HistQueue', 'HistQueue.HistQueue', (['(25)'], {}), '(25)\n', (258, 262), False, 'from Segmentation import CombinedHist, get_histograms, HistQueue\... |
"""
-------------------------------------------------------------------------
shine -
setup
!!TODO: add file description here!!
created: 2017/06/04 in PyCharm
(c) 2017 Sven - ducandu GmbH
-------------------------------------------------------------------------
"""
from setuptools import setup
setup(nam... | [
"setuptools.setup"
] | [((311, 538), 'setuptools.setup', 'setup', ([], {'name': '"""aiopening"""', 'version': '"""1.0"""', 'description': '"""AI (but even opener)"""', 'url': '"""http://github.com/sven1977/aiopening"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'packages': "['aiopening']", 'zip_safe'... |
"""
List of podcasts and their filename parser types.
"""
from .rss_parsers import BaseItem, TalkPythonItem, ChangelogItem, IndieHackersItem
import attr
@attr.s(slots=True, frozen=True)
class Podcast:
name = attr.ib(type=str)
title = attr.ib(type=str)
url = attr.ib(type=str)
rss = attr.ib(type=str)
... | [
"attr.s",
"attr.ib"
] | [((156, 187), 'attr.s', 'attr.s', ([], {'slots': '(True)', 'frozen': '(True)'}), '(slots=True, frozen=True)\n', (162, 187), False, 'import attr\n'), ((214, 231), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(type=str)\n', (221, 231), False, 'import attr\n'), ((244, 261), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(... |
#!/usr/bin/env python
# vim: set fileencoding=utf-8 ts=4 sts=4 sw=4 et tw=80 :
#
# Extract and save extended object catalogs from the specified data and
# uncertainty images. This version of the script jointly analyzes all
# images from a specific AOR/channel to enable more sophisticated
# analysis.
#
# <NAME>
# Create... | [
"logging.getLogger",
"sys.exit",
"jpl_eph_helpers.EphTool",
"sys._getframe",
"os.path.isdir",
"os.mkdir",
"spitz_extract.SpitzFind",
"imp.reload",
"os.path.isfile",
"sys.stderr.write",
"catalog_tools.XCorrPruner",
"os.path.dirname",
"time.time",
"logging.basicConfig",
"numpy.__version__.... | [((672, 711), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (691, 711), False, 'import logging\n'), ((721, 748), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (738, 748), False, 'import logging\n'), ((2585, 2620), 'spitz_xcorr_st... |
"""
Helper functions to visualize a graph in a notebook or save the plot to file.
Import as:
import dataflow.core.visualization as dtfcorvisu
"""
import IPython
import networkx as networ
import pygraphviz
import dataflow.core.dag as dtfcordag
import helpers.hdbg as hdbg
import helpers.hio as hio
def draw(dag: dtf... | [
"helpers.hdbg.dassert_isinstance",
"helpers.hio.create_enclosing_dir",
"networkx.nx_agraph.to_agraph"
] | [((736, 771), 'helpers.hio.create_enclosing_dir', 'hio.create_enclosing_dir', (['file_name'], {}), '(file_name)\n', (760, 771), True, 'import helpers.hio as hio\n'), ((1006, 1049), 'helpers.hdbg.dassert_isinstance', 'hdbg.dassert_isinstance', (['dag', 'dtfcordag.DAG'], {}), '(dag, dtfcordag.DAG)\n', (1029, 1049), True,... |
"""Calculate pixel errors for a single run or all runs in an experiment dir."""
import torch
import itertools
import numpy as np
import imageio
import argparse
import os
import glob
from model.main import main as restore_model
from model.utils.utils import bw_transform
os.environ["CUDA_VISIBLE_DEVICES"] = '-1'
def... | [
"os.path.exists",
"argparse.ArgumentParser",
"model.main.main",
"torch.mean",
"torch.unsqueeze",
"model.utils.utils.bw_transform",
"torch.stack",
"os.path.join",
"os.makedirs",
"torch.sqrt",
"torch.cat",
"torch.tensor",
"torch.arange",
"glob.glob",
"imageio.mimsave",
"torch.clamp"
] | [((805, 850), 'model.main.main', 'restore_model', ([], {'restore': 'restore', 'extras': 'extras'}), '(restore=restore, extras=extras)\n', (818, 850), True, 'from model.main import main as restore_model\n'), ((2788, 2819), 'torch.cat', 'torch.cat', (['[z_recon, z_pred]', '(1)'], {}), '([z_recon, z_pred], 1)\n', (2797, 2... |
from django.contrib import admin
# Register your models here.
"""User admin classes."""
# Django
from django.contrib import admin
# Models
from users.models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
"""User admin."""
list_display = ('pk', 'username', 'email','first_name','last_na... | [
"django.contrib.admin.register"
] | [((174, 194), 'django.contrib.admin.register', 'admin.register', (['User'], {}), '(User)\n', (188, 194), False, 'from django.contrib import admin\n')] |
# Copyright 2020 Toyota Research Institute. All rights reserved.
"""
Geometry utilities
"""
import numpy as np
def invert_pose_numpy(T):
"""
'Invert' 4x4 extrinsic matrix
Parameters
----------
T: 4x4 matrix (world to camera)
Returns
-------
4x4 matrix (camera to world)
"""
... | [
"numpy.copy",
"numpy.matmul"
] | [((326, 336), 'numpy.copy', 'np.copy', (['T'], {}), '(T)\n', (333, 336), True, 'import numpy as np\n'), ((405, 422), 'numpy.matmul', 'np.matmul', (['R.T', 't'], {}), '(R.T, t)\n', (414, 422), True, 'import numpy as np\n')] |
"""
Module that defines Corpus and DocumentSource/DocumentDestination classes which access documents
as lines or parts in a file.
"""
import json
from gatenlp.urlfileutils import yield_lines_from
from gatenlp.document import Document
from gatenlp.corpora.base import DocumentSource, DocumentDestination
from gatenlp.cor... | [
"json.loads",
"json.dumps",
"gatenlp.document.Document",
"gatenlp.document.Document.load_mem",
"gatenlp.urlfileutils.yield_lines_from"
] | [((8942, 8971), 'gatenlp.urlfileutils.yield_lines_from', 'yield_lines_from', (['self.source'], {}), '(self.source)\n', (8958, 8971), False, 'from gatenlp.urlfileutils import yield_lines_from\n'), ((6287, 6303), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (6297, 6303), False, 'import json\n'), ((9601, 9615),... |
import fastapi
from starlette.requests import Request
from web.viewmodels.account.AccountViewModel import AccountViewModel
from web.viewmodels.account.LoginViewModel import LoginViewModel
from web.viewmodels.account.RegisterViewModel import RegisterViewModel
router = fastapi.APIRouter()
@router.get('/account')
def i... | [
"web.viewmodels.account.AccountViewModel.AccountViewModel",
"fastapi.APIRouter",
"web.viewmodels.account.RegisterViewModel.RegisterViewModel",
"web.viewmodels.account.LoginViewModel.LoginViewModel"
] | [((269, 288), 'fastapi.APIRouter', 'fastapi.APIRouter', ([], {}), '()\n', (286, 288), False, 'import fastapi\n'), ((353, 378), 'web.viewmodels.account.AccountViewModel.AccountViewModel', 'AccountViewModel', (['request'], {}), '(request)\n', (369, 378), False, 'from web.viewmodels.account.AccountViewModel import Account... |
import numpy as np
from tests.test_utils import run_track_tests
from mirdata import annotations
from mirdata.datasets import tonas
TEST_DATA_HOME = "tests/resources/mir_datasets/tonas"
def test_track():
default_trackid = "01-D_AMairena"
dataset = tonas.Dataset(TEST_DATA_HOME)
track = dataset.track(defa... | [
"mirdata.datasets.tonas.Dataset",
"mirdata.datasets.tonas.load_audio",
"mirdata.datasets.tonas._load_tuning_frequency",
"mirdata.datasets.tonas.load_f0",
"numpy.array",
"tests.test_utils.run_track_tests",
"mirdata.datasets.tonas.load_notes"
] | [((260, 289), 'mirdata.datasets.tonas.Dataset', 'tonas.Dataset', (['TEST_DATA_HOME'], {}), '(TEST_DATA_HOME)\n', (273, 289), False, 'from mirdata.datasets import tonas\n'), ((1137, 1205), 'tests.test_utils.run_track_tests', 'run_track_tests', (['track', 'expected_attributes', 'expected_property_types'], {}), '(track, e... |
#encoding: utf-8
import sys
reload(sys)
sys.setdefaultencoding( "utf-8" )
import zmq, sys, json
import seg
import detoken
import datautils
from random import sample
serverl=["tcp://127.0.0.1:"+str(port) for port in xrange(5556,5556+4)]
def _translate_core(jsond):
global serverl
sock = zmq.Context().socket(zmq.R... | [
"random.sample",
"sys.setdefaultencoding",
"seg.poweron",
"seg.poweroff",
"seg.segline",
"zmq.Context"
] | [((41, 72), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (63, 72), False, 'import zmq, sys, json\n'), ((725, 738), 'seg.poweron', 'seg.poweron', ([], {}), '()\n', (736, 738), False, 'import seg\n'), ((757, 771), 'seg.poweroff', 'seg.poweroff', ([], {}), '()\n', (769, 771), F... |
"""Configuration defaults and loading functions.
Pyleus will look for configuration files in the following file paths in order
of increasing precedence. The latter configuration overrides the previous one.
#. /etc/pyleus.conf
#. ~/.config/pyleus.conf
#. ~/.pyleus.conf
You can always specify a configuration file when... | [
"os.path.exists",
"collections.namedtuple",
"os.path.isfile",
"pyleus.compat.configparser.SafeConfigParser",
"pyleus.utils.expand_path"
] | [((1773, 2042), 'collections.namedtuple', 'collections.namedtuple', (['"""Configuration"""', '"""base_jar config_file debug func include_packages output_jar pypi_index_url nimbus_host nimbus_port storm_cmd_path system_site_packages topology_path topology_jar topology_name verbose wait_time jvm_opts"""'],... |
from shutil import rmtree
from tempfile import mkdtemp
import pytest
import param
import pydrobert.param.serialization as serial
param.parameterized.warnings_as_exceptions = True
@pytest.fixture(params=["ruamel_yaml", "pyyaml"])
def yaml_loader(request):
if request.param == "ruamel_yaml":
try:
... | [
"yaml.load",
"ruamel.yaml.YAML",
"tempfile.mkdtemp",
"shutil.rmtree",
"pytest.fixture"
] | [((185, 233), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['ruamel_yaml', 'pyyaml']"}), "(params=['ruamel_yaml', 'pyyaml'])\n", (199, 233), False, 'import pytest\n'), ((919, 955), 'pytest.fixture', 'pytest.fixture', ([], {'params': '[True, False]'}), '(params=[True, False])\n', (933, 955), False, 'import pytes... |
import argparse
import csv
import matplotlib
import matplotlib.ticker as tck
import matplotlib.pyplot as plt
import numpy as np
# Matplotlib export settings
matplotlib.use('pgf')
import matplotlib.pyplot as plt
matplotlib.rcParams.update({
'pgf.texsystem': 'pdflatex',
'font.size': 10,
'font.family': 'ser... | [
"numpy.sqrt",
"argparse.ArgumentParser",
"matplotlib.rcParams.update",
"matplotlib.use",
"matplotlib.ticker.MultipleLocator",
"numpy.absolute",
"numpy.imag",
"numpy.angle",
"numpy.exp",
"numpy.real",
"matplotlib.ticker.FormatStrFormatter",
"csv.reader",
"matplotlib.pyplot.subplots"
] | [((160, 181), 'matplotlib.use', 'matplotlib.use', (['"""pgf"""'], {}), "('pgf')\n", (174, 181), False, 'import matplotlib\n'), ((214, 359), 'matplotlib.rcParams.update', 'matplotlib.rcParams.update', (["{'pgf.texsystem': 'pdflatex', 'font.size': 10, 'font.family': 'serif',\n 'text.usetex': True, 'pgf.rcfonts': False... |
# coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.buil... | [
"pants.goal.workspace.ScmWorkspace",
"pants.util.objects.datatype",
"pants.base.exceptions.TaskError",
"pants.base.build_environment.get_scm",
"pants.scm.change_calculator.BuildGraphChangeCalculator"
] | [((954, 1044), 'pants.util.objects.datatype', 'datatype', (['"""ChangedRequest"""', "['changes_since', 'diffspec', 'include_dependees', 'fast']"], {}), "('ChangedRequest', ['changes_since', 'diffspec',\n 'include_dependees', 'fast'])\n", (962, 1044), False, 'from pants.util.objects import datatype\n'), ((4401, 4697)... |
"""
eZmax API Definition (Full)
This API expose all the functionnalities for the eZmax and eZsign applications. # noqa: E501
The version of the OpenAPI document: 1.1.7
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
import unittest
import eZmaxApi
from eZmaxApi.api.global_cus... | [
"unittest.main",
"eZmaxApi.api.global_customer_api.GlobalCustomerApi"
] | [((789, 804), 'unittest.main', 'unittest.main', ([], {}), '()\n', (802, 804), False, 'import unittest\n'), ((504, 523), 'eZmaxApi.api.global_customer_api.GlobalCustomerApi', 'GlobalCustomerApi', ([], {}), '()\n', (521, 523), False, 'from eZmaxApi.api.global_customer_api import GlobalCustomerApi\n')] |
import unittest
from cosymlib import file_io
from numpy import testing
from cosymlib.molecule.geometry import Geometry
import os
data_dir = os.path.join(os.path.dirname(__file__), 'data')
class TestSymgroupFchk(unittest.TestCase):
def setUp(self):
self._structure = file_io.read_generic_structure_file(d... | [
"os.path.dirname",
"numpy.testing.assert_array_almost_equal",
"cosymlib.molecule.geometry.Geometry",
"cosymlib.file_io.read_generic_structure_file"
] | [((155, 180), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (170, 180), False, 'import os\n'), ((283, 353), 'cosymlib.file_io.read_generic_structure_file', 'file_io.read_generic_structure_file', (["(data_dir + '/wfnsym/tih4_5d.fchk')"], {}), "(data_dir + '/wfnsym/tih4_5d.fchk')\n", (318, 353... |
# -*- coding: utf-8 -*-
"""Gymnasium implementation."""
# Django
from django.core.validators import RegexValidator
from django.db import models
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _ # noqa
class Gymnasium(models.Model):
"""Gymnasium model for the website."... | [
"django.utils.text.slugify",
"django.utils.translation.ugettext_lazy"
] | [((352, 361), 'django.utils.translation.ugettext_lazy', '_', (['"""slug"""'], {}), "('slug')\n", (353, 361), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((420, 429), 'django.utils.translation.ugettext_lazy', '_', (['"""name"""'], {}), "('name')\n", (421, 429), True, 'from django.utils.translatio... |
#!/usr/bin/env python3
# coding: utf-8
# MIT License © https://github.com/scherma
# contact http_<EMAIL>4<EMAIL>
import logging, os, configparser, libvirt, json, arrow, pyvnc, shutil, time, victimfiles, glob, websockify, multiprocessing, signal
import tempfile, evtx_dates, db_calls, psycopg2, psycopg2.extras, sys, pca... | [
"logging.getLogger",
"victimfiles.VictimFiles",
"db_calls.insert_http",
"multiprocessing.Process",
"io.BytesIO",
"time.sleep",
"sys.exc_info",
"case_postprocess.Postprocessor",
"websockify.WebSocketProxy",
"pyvnc.Connector",
"os.remove",
"os.path.exists",
"os.listdir",
"db_calls.insert_tls... | [((481, 516), 'logging.getLogger', 'logging.getLogger', (['"""antfarm.worker"""'], {}), "('antfarm.worker')\n", (498, 516), False, 'import logging, os, configparser, libvirt, json, arrow, pyvnc, shutil, time, victimfiles, glob, websockify, multiprocessing, signal\n'), ((20219, 20317), 'websockify.WebSocketProxy', 'webs... |
import json
class TestListRepo:
def test_invalid(self, host):
result = host.run('stack list repo test')
assert result.rc == 255
assert result.stderr.startswith('error - ')
def test_args(self, host, add_repo):
# Add a second repo so we can make sure it is skipped
add_repo('test2', 'test2url')
# Run list... | [
"operator.attrgetter",
"stack.commands.get_mysql_connection",
"json.loads",
"stack.argument_processors.pallet.PalletArgProcessor"
] | [((499, 524), 'json.loads', 'json.loads', (['result.stdout'], {}), '(result.stdout)\n', (509, 524), False, 'import json\n'), ((734, 759), 'json.loads', 'json.loads', (['result.stdout'], {}), '(result.stdout)\n', (744, 759), False, 'import json\n'), ((1042, 1067), 'json.loads', 'json.loads', (['result.stdout'], {}), '(r... |
import matplotlib.pyplot as plt
import numpy as np
def plot_chains(chain, fileout=None, tracers=0, labels=None, delay=0, ymax=200000, thin=100, num_xticks=7, truths=None):
if chain.ndim < 3:
print("You must include a multiple chains")
return
n_chains, length, n_var = chain.shape
print(n... | [
"matplotlib.pyplot.savefig",
"numpy.ones",
"numpy.arange",
"numpy.random.choice",
"numpy.linspace",
"numpy.empty",
"matplotlib.pyplot.subplots_adjust",
"matplotlib.pyplot.show"
] | [((708, 783), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'left': '(0.09)', 'bottom': '(0.07)', 'right': '(0.96)', 'top': '(0.96)', 'hspace': '(0)'}), '(left=0.09, bottom=0.07, right=0.96, top=0.96, hspace=0)\n', (727, 783), True, 'import matplotlib.pyplot as plt\n'), ((798, 827), 'numpy.empty', '... |
##
## Evaluation Script
##
import numpy as np
import time
from sample_model import Model
from data_loader import data_loader
from generator import Generator
def evaluate(label_indices = {'brick': 0, 'ball': 1, 'cylinder': 2},
channel_means = np.array([147.12697, 160.21092, 167.70029]),
data... | [
"generator.Generator",
"sample_model.Model",
"data_loader.data_loader",
"numpy.argmax",
"numpy.array",
"numpy.sum",
"time.time"
] | [((258, 301), 'numpy.array', 'np.array', (['[147.12697, 160.21092, 167.70029]'], {}), '([147.12697, 160.21092, 167.70029])\n', (266, 301), True, 'import numpy as np\n'), ((513, 629), 'data_loader.data_loader', 'data_loader', ([], {'label_indices': 'label_indices', 'channel_means': 'channel_means', 'train_test_split': '... |
# -*- coding=utf-8 -*-
"""
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0... | [
"paddle.fluid.layers.concat",
"paddle.fluid.layers.topk",
"paddle.fluid.ParamAttr",
"paddle.fluid.data",
"paddle.fluid.layers.softmax",
"train_network.DnnLayerClassifierNet",
"paddle.fluid.contrib.layers.index_sample",
"paddle.fluid.layers.slice",
"paddle.fluid.layers.cast",
"paddle.fluid.default_... | [((1469, 1496), 'train_network.DnnLayerClassifierNet', 'DnnLayerClassifierNet', (['args'], {}), '(args)\n', (1490, 1496), False, 'from train_network import DnnLayerClassifierNet, InputTransNet\n'), ((1528, 1547), 'train_network.InputTransNet', 'InputTransNet', (['args'], {}), '(args)\n', (1541, 1547), False, 'from trai... |
import requests
from requests import Response
class Client:
@staticmethod
def request(method: str, url: str, **kwargs) -> Response:
"""
Request method
method: method for the new Request object: GET, OPTIONS, HEAD, POST, PUT, PATCH, or DELETE. # noqa
url – URL for the new Reques... | [
"requests.request"
] | [((688, 727), 'requests.request', 'requests.request', (['method', 'url'], {}), '(method, url, **kwargs)\n', (704, 727), False, 'import requests\n')] |
"""server URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-bas... | [
"django.conf.urls.url"
] | [((789, 834), 'django.conf.urls.url', 'url', (['"""^stats/check/"""', 'stats.get_stats_from_id'], {}), "('^stats/check/', stats.get_stats_from_id)\n", (792, 834), False, 'from django.conf.urls import url\n'), ((842, 889), 'django.conf.urls.url', 'url', (['"""^stats/update/"""', 'stats.post_stats_from_id'], {}), "('^sta... |
import json
from ..connection import get_connection
class Metadata:
def __init__(self, database):
self.connection = get_connection(database).connection
# first list is default if nothing is specified (should be extended)
# list is ordered as [edge_name, node1_id, edge_node1_id, edge_node2_id, n... | [
"json.dumps"
] | [((604, 620), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (614, 620), False, 'import json\n')] |
import requests
from bs4 import BeautifulSoup
def getSummary(link):
#Get page response
response = requests.get(link)
#Parse the pgae
soup = BeautifulSoup(response.content,'html.parser')
#Find first paragraph
summary_p = soup.find('p')
#Get the first text
summary = summary_p.text[:40] + ... | [
"bs4.BeautifulSoup",
"requests.get"
] | [((107, 125), 'requests.get', 'requests.get', (['link'], {}), '(link)\n', (119, 125), False, 'import requests\n'), ((157, 203), 'bs4.BeautifulSoup', 'BeautifulSoup', (['response.content', '"""html.parser"""'], {}), "(response.content, 'html.parser')\n", (170, 203), False, 'from bs4 import BeautifulSoup\n'), ((1049, 108... |
import copy
from configuration.configuration import QuestionnaireConfiguration
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import F
from django.template.loader import render_to_string
from configuration.models import Configuration, Key, Value, Translati... | [
"configuration.configuration.QuestionnaireConfiguration",
"django.db.models.F",
"django.template.loader.render_to_string",
"copy.deepcopy"
] | [((3316, 3387), 'configuration.configuration.QuestionnaireConfiguration', 'QuestionnaireConfiguration', ([], {'keyword': 'self.code', 'configuration_object': 'obj'}), '(keyword=self.code, configuration_object=obj)\n', (3342, 3387), False, 'from configuration.configuration import QuestionnaireConfiguration\n'), ((13237,... |
import matplotlib.pyplot as plt
import distance
from matplotlib import style
from clustering_algorithms.affinity_propagation import AffinityPropagation
from clustering_algorithms.custom_k_means import KMeans
from clustering_algorithms.custom_mean_shift import MeanShift
from clustering_algorithms.custom_mean_shift_strin... | [
"utils.string_compare_algorithm.needleman_wunsch_with_penalty",
"prepare_data.format_sequences.format_sequences_from_student",
"numpy.asarray",
"clustering_algorithms.affinity_propagation.AffinityPropagation",
"clustering_algorithms.custom_k_means.KMeans",
"distance.levenshtein",
"clustering_algorithms.... | [((2032, 2075), 'prepare_data.format_sequences.format_sequences_from_student', 'format_sequences_from_student', (['student_name'], {}), '(student_name)\n', (2061, 2075), False, 'from prepare_data.format_sequences import format_sequences_from_student\n'), ((2510, 2664), 'clustering_algorithms.custom_k_means.KMeans', 'KM... |
# !/usr/bin/env python
# -*- coding: utf-8 -*-
"""Conversion between single-channel integer value to 3-channels color image.
Mostly used for semantic segmentation.
"""
from __future__ import annotations
import numpy as np
import torch
from multipledispatch import dispatch
from torch import Tensor
from onevision.cv.... | [
"torch.from_numpy",
"numpy.squeeze",
"numpy.stack",
"multipledispatch.dispatch",
"onevision.cv.core.to_channel_first",
"numpy.zeros_like",
"onevision.cv.core.get_num_channels"
] | [((1527, 1549), 'multipledispatch.dispatch', 'dispatch', (['Tensor', 'list'], {}), '(Tensor, list)\n', (1535, 1549), False, 'from multipledispatch import dispatch\n'), ((1747, 1773), 'multipledispatch.dispatch', 'dispatch', (['np.ndarray', 'list'], {}), '(np.ndarray, list)\n', (1755, 1773), False, 'from multipledispatc... |
from statistics import mean
import csv
from aalpy.SULs import DfaSUL, MealySUL, MooreSUL
from aalpy.learning_algs import run_Lstar
from aalpy.oracles import RandomWalkEqOracle
from aalpy.utils import generate_random_dfa, generate_random_mealy_machine, generate_random_moore_machine
num_states = 1000
alph_size = 5
rep... | [
"aalpy.oracles.RandomWalkEqOracle",
"statistics.mean",
"aalpy.SULs.DfaSUL",
"csv.writer",
"aalpy.learning_algs.run_Lstar",
"aalpy.SULs.MealySUL",
"aalpy.utils.generate_random_dfa",
"aalpy.utils.generate_random_mealy_machine",
"aalpy.utils.generate_random_moore_machine",
"aalpy.SULs.MooreSUL"
] | [((2899, 2929), 'csv.writer', 'csv.writer', (['f'], {'dialect': '"""excel"""'}), "(f, dialect='excel')\n", (2909, 2929), False, 'import csv\n'), ((703, 796), 'aalpy.utils.generate_random_dfa', 'generate_random_dfa', (['num_states'], {'alphabet': 'alphabet', 'num_accepting_states': '(num_states // 2)'}), '(num_states, a... |
#!/usr/bin/env python
import sys
from os.path import exists
import numpy as np
import pylab
import scipy.interpolate
def read_fortran(filename):
""" Reads Fortran style binary data and returns a numpy array.
"""
with open(filename, 'rb') as f:
# read size of record
f.seek(0)
n = np.fromfile(f, dtype='int32... | [
"os.path.exists",
"numpy.fromfile",
"numpy.sqrt",
"pylab.gca",
"pylab.xlabel",
"numpy.column_stack",
"pylab.colorbar",
"numpy.isnan",
"numpy.meshgrid",
"pylab.pcolor",
"pylab.ylabel",
"pylab.show"
] | [((849, 866), 'numpy.meshgrid', 'np.meshgrid', (['x', 'z'], {}), '(x, z)\n', (860, 866), True, 'import numpy as np\n'), ((1239, 1260), 'numpy.column_stack', 'np.column_stack', (['args'], {}), '(args)\n', (1254, 1260), True, 'import numpy as np\n'), ((2029, 2050), 'os.path.exists', 'exists', (['x_coords_file'], {}), '(x... |
# with the TRACKBAR gui component
# we can perform some action my moving cursor
import cv2
import numpy as np
def funk(): # create one funciton # Now we are not adding any action in it . # just pass
pass
def main():
img1 = np.zeros((512,512,3) , np.uint8) # create a imgae of size 512 x512
... | [
"cv2.imshow",
"numpy.zeros",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.getTrackbarPos",
"cv2.createTrackbar",
"cv2.namedWindow"
] | [((250, 283), 'numpy.zeros', 'np.zeros', (['(512, 512, 3)', 'np.uint8'], {}), '((512, 512, 3), np.uint8)\n', (258, 283), True, 'import numpy as np\n'), ((451, 478), 'cv2.namedWindow', 'cv2.namedWindow', (['windowName'], {}), '(windowName)\n', (466, 478), False, 'import cv2\n'), ((673, 722), 'cv2.createTrackbar', 'cv2.c... |
from nldi_el_serv.XSGen import XSGen
from nldi_el_serv.dem_query import query_dems_shape
import py3dep
from pynhd import NLDI
gagebasin = NLDI().get_basins("06721000").to_crs('epsg:3857')
gageloc = NLDI().getfeature_byid("nwissite", "USGS-06721000").to_crs('epsg:3857')
cid = gageloc.comid.values.astype(str)
print(cid... | [
"pynhd.NLDI",
"nldi_el_serv.dem_query.query_dems_shape",
"nldi_el_serv.XSGen.XSGen"
] | [((529, 590), 'nldi_el_serv.XSGen.XSGen', 'XSGen', ([], {'point': 'gageloc', 'cl_geom': 'strmseg_loc', 'ny': '(101)', 'width': '(1000)'}), '(point=gageloc, cl_geom=strmseg_loc, ny=101, width=1000)\n', (534, 590), False, 'from nldi_el_serv.XSGen import XSGen\n'), ((745, 767), 'nldi_el_serv.dem_query.query_dems_shape', '... |
# PyVot
# Copyright(c) Microsoft Corporation
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the License); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED ON AN ... | [
"distutils.core.Distribution.find_config_files",
"sys.argv.remove",
"distutils.core.setup"
] | [((3384, 3406), 'distutils.core.setup', 'setup', ([], {}), '(**setup_options)\n', (3389, 3406), False, 'from distutils.core import setup, Distribution\n'), ((1052, 1086), 'sys.argv.remove', 'sys.argv.remove', (['no_downloads_flag'], {}), '(no_downloads_flag)\n', (1067, 1086), False, 'import sys\n'), ((1613, 1649), 'dis... |
from odoo import models, fields, api
from odoo.exceptions import ValidationError
class DemoOdooWizardTutorial(models.Model):
_name = 'demo.odoo.wizard.tutorial'
_description = 'Demo Odoo Wizard Tutorial'
name = fields.Char('Description', required=True)
partner_id = fields.Many2one('res.partner', strin... | [
"odoo.fields.Many2one",
"odoo.fields.Char",
"odoo.exceptions.ValidationError"
] | [((225, 266), 'odoo.fields.Char', 'fields.Char', (['"""Description"""'], {'required': '(True)'}), "('Description', required=True)\n", (236, 266), False, 'from odoo import models, fields, api\n'), ((284, 332), 'odoo.fields.Many2one', 'fields.Many2one', (['"""res.partner"""'], {'string': '"""Partner"""'}), "('res.partner... |
from django.db import models
class PartnerCharity(models.Model):
slug_id = models.CharField(max_length=30, unique=True)
name = models.TextField(unique=True, verbose_name='Name (human readable)')
email = models.EmailField(help_text='Used to cc the charity on receipts')
xero_account_name = models.TextFi... | [
"django.db.models.EmailField",
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.FileField",
"django.db.models.BooleanField",
"django.db.models.CharField"
] | [((81, 125), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'unique': '(True)'}), '(max_length=30, unique=True)\n', (97, 125), False, 'from django.db import models\n'), ((137, 204), 'django.db.models.TextField', 'models.TextField', ([], {'unique': '(True)', 'verbose_name': '"""Name (human... |
__all__ = ['LightGBMRegressorModel']
from mmlspark.lightgbm.LightGBMRegressor import LightGBMRegressor, LightGBMRegressionModel
from mmlspark.train import ComputeModelStatistics
from pyspark.ml.evaluation import RegressionEvaluator
from pyspark.sql import DataFrame
import pyspark.sql.functions as F
from python_data_u... | [
"python_data_utils.spark.ml.base.Metrics.register",
"pyspark.ml.evaluation.RegressionEvaluator",
"matplotlib.pyplot.tight_layout",
"warnings.simplefilter",
"mmlspark.lightgbm.LightGBMRegressor.LightGBMRegressor",
"matplotlib.pyplot.subplots"
] | [((485, 547), 'warnings.simplefilter', 'warnings.simplefilter', ([], {'action': '"""ignore"""', 'category': 'FutureWarning'}), "(action='ignore', category=FutureWarning)\n", (506, 547), False, 'import warnings\n'), ((1522, 1560), 'python_data_utils.spark.ml.base.Metrics.register', 'Metrics.register', (['"""regression_m... |
import argparse
import os
import sys
sys.path.append(os.environ['CI_SITE_CONFIG'])
import ci_site_config
import run
import common
parser = argparse.ArgumentParser()
parser.add_argument("--prov", help="core provider", choices=["psm2", "verbs", \
"tcp", "udp", "sockets", "shm"])
parser.add_argument... | [
"run.fabtests",
"argparse.ArgumentParser",
"run.fi_info_test",
"os.chdir",
"run.shmemtest",
"run.mpistress_benchmark",
"run.osu_benchmark",
"run.intel_mpi_benchmark",
"sys.path.append"
] | [((37, 82), 'sys.path.append', 'sys.path.append', (["os.environ['CI_SITE_CONFIG']"], {}), "(os.environ['CI_SITE_CONFIG'])\n", (52, 82), False, 'import sys\n'), ((140, 165), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (163, 165), False, 'import argparse\n'), ((1166, 1183), 'os.chdir', 'os.chd... |
from wtforms import Form, BooleanField, IntegerField, StringField, PasswordField, validators
from wtforms.fields.html5 import EmailField
from src.common.database import db
from sqlalchemy import exc
class RunnerRegistrationForm(Form):
first_name = StringField('First name', [
validators.Length(min=2, max=... | [
"wtforms.validators.NumberRange",
"src.common.database.db.Column",
"src.common.database.db.session.commit",
"src.common.database.db.session.add",
"src.common.database.db.session",
"wtforms.validators.Length",
"src.common.database.db.relationship",
"src.common.database.db.UniqueConstraint",
"src.comm... | [((895, 934), 'src.common.database.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (904, 934), False, 'from src.common.database import db\n'), ((1114, 1151), 'src.common.database.db.Column', 'db.Column', (['db.Integer'], {'nullable': '(False)'}), '(db.Integer, n... |
import fourparts as fp
import pandas as pd
file_name = 'chorale_F'
df = fp.midi_to_df('samples/' + file_name + '.mid', save=True)
chords = fp.PreProcessor(4).get_progression(df)
chord_progression = fp.ChordProgression(chords)
# gets pitch class sets
pitch_class_sets = chord_progression.get_pitch_class_sets()
pd.Dat... | [
"fourparts.PreProcessor",
"fourparts.ChordProgression",
"fourparts.DyadProgression",
"pandas.DataFrame",
"fourparts.midi_to_df"
] | [((75, 132), 'fourparts.midi_to_df', 'fp.midi_to_df', (["('samples/' + file_name + '.mid')"], {'save': '(True)'}), "('samples/' + file_name + '.mid', save=True)\n", (88, 132), True, 'import fourparts as fp\n'), ((201, 228), 'fourparts.ChordProgression', 'fp.ChordProgression', (['chords'], {}), '(chords)\n', (220, 228),... |
"""
Defines models
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Function
from torch.autograd import Variable
from torch.nn.utils.rnn import pack_padded_sequence
from torch.nn.utils.rnn import pad_packed_sequence
def init_weights(m):
if type(m) == nn.Linear or ... | [
"torch.nn.MaxPool1d",
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.nn.Softmax",
"torch.nn.functional.adaptive_avg_pool1d",
"torch.nn.init.xavier_uniform_",
"torch.transpose",
"torch.nn.BatchNorm1d",
"torch.nn.Linear",
"torch.nn.Conv1d",
"torch.cat",
"torch.nn.GRU"
] | [((350, 389), 'torch.nn.init.xavier_uniform_', 'torch.nn.init.xavier_uniform_', (['m.weight'], {}), '(m.weight)\n', (379, 389), False, 'import torch\n'), ((1535, 1686), 'torch.nn.Conv1d', 'nn.Conv1d', ([], {'in_channels': 'opt.visual_feature_dim', 'out_channels': 'self.conv_width_v', 'kernel_size': 'self.kernel_size_v'... |
# Time: O(n)
# Space: O(n)
# 1297 weekly contest 168 12/21/2019
# Given a string s, return the maximum number of ocurrences of any substring under the following rules:
#
# The number of unique characters in the substring must be less than or equal to maxLetters.
# The substring size must be between minSize and maxSi... | [
"collections.Counter",
"collections.defaultdict"
] | [((2196, 2224), 'collections.defaultdict', 'collections.defaultdict', (['int'], {}), '(int)\n', (2219, 2224), False, 'import collections\n'), ((2736, 2764), 'collections.defaultdict', 'collections.defaultdict', (['int'], {}), '(int)\n', (2759, 2764), False, 'import collections\n'), ((3715, 3743), 'collections.defaultdi... |
import copy
import logging
from api.models import Transaction, BonusTransaction, Order, Tag, OrderItem
from api.models.guest import Guest
from api.models.label import Label
from api.tests.data.guests import TestGuests
from api.tests.data.statuses import TestStatuses
from api.tests.data.users import TestUsers
from api.... | [
"logging.getLogger",
"api.models.Transaction.objects.all",
"api.models.Order.objects.all",
"api.models.label.Label.objects.all",
"api.models.OrderItem.objects.all",
"copy.deepcopy",
"api.models.BonusTransaction.objects.all",
"api.models.Tag.objects.all",
"api.models.guest.Guest.objects.all"
] | [((382, 409), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (399, 409), False, 'import logging\n'), ((1806, 1848), 'copy.deepcopy', 'copy.deepcopy', (["self.RESPONSES[f'GET{url}']"], {}), "(self.RESPONSES[f'GET{url}'])\n", (1819, 1848), False, 'import copy\n'), ((2146, 2188), 'copy.deepc... |
import re
from typing import Any, Type, Tuple, Union, Iterable
from nonebot.typing import overrides
from nonebot.adapters import Message as BaseMessage
from nonebot.adapters import MessageSegment as BaseMessageSegment
from .utils import escape, unescape
from .api import Message as GuildMessage
from .api import Messa... | [
"re.finditer",
"nonebot.typing.overrides"
] | [((417, 446), 'nonebot.typing.overrides', 'overrides', (['BaseMessageSegment'], {}), '(BaseMessageSegment)\n', (426, 446), False, 'from nonebot.typing import overrides\n'), ((1378, 1407), 'nonebot.typing.overrides', 'overrides', (['BaseMessageSegment'], {}), '(BaseMessageSegment)\n', (1387, 1407), False, 'from nonebot.... |
# Future
from __future__ import annotations
# Standard Library
from collections.abc import Callable
from typing import Literal, TypeVar
# Packages
from discord.ext import commands
# Local
from cd import custom, exceptions
__all__ = (
"is_player_connected",
)
T = TypeVar("T")
def is_player_connected() -> Ca... | [
"discord.ext.commands.check",
"cd.exceptions.EmbedError",
"typing.TypeVar"
] | [((274, 286), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (281, 286), False, 'from typing import Literal, TypeVar\n'), ((602, 627), 'discord.ext.commands.check', 'commands.check', (['predicate'], {}), '(predicate)\n', (616, 627), False, 'from discord.ext import commands\n'), ((491, 568), 'cd.exceptions.E... |
"""Base test for TalisMUD tests.
It creates an in-memory database for each test, so they run in independent
environments.
"""
import unittest
from pony.orm import db_session
from data.base import db
from data.properties import LazyPropertyDescriptor
# Bind to a temporary database
db.bind(provider="sqlite", filena... | [
"data.base.db.entities.values",
"data.base.db.create_tables",
"data.base.db.drop_all_tables",
"data.base.db.generate_mapping",
"data.base.db.bind",
"pony.orm.db_session._enter",
"pony.orm.db_session.__exit__"
] | [((287, 334), 'data.base.db.bind', 'db.bind', ([], {'provider': '"""sqlite"""', 'filename': '""":memory:"""'}), "(provider='sqlite', filename=':memory:')\n", (294, 334), False, 'from data.base import db\n'), ((335, 374), 'data.base.db.generate_mapping', 'db.generate_mapping', ([], {'create_tables': '(True)'}), '(create... |
import requests
import sys
requests.put(f"http://localhost:9200/{sys.argv[1]}?pretty")
headers = {"Content-Type": "application/x-ndjson"}
data = open(sys.argv[2], "rb").read()
requests.post(
f"http://localhost:9200/{sys.argv[1]}/_bulk?pretty", headers=headers, data=data
)
| [
"requests.post",
"requests.put"
] | [((31, 90), 'requests.put', 'requests.put', (['f"""http://localhost:9200/{sys.argv[1]}?pretty"""'], {}), "(f'http://localhost:9200/{sys.argv[1]}?pretty')\n", (43, 90), False, 'import requests\n'), ((189, 288), 'requests.post', 'requests.post', (['f"""http://localhost:9200/{sys.argv[1]}/_bulk?pretty"""'], {'headers': 'h... |
import torch
import torch.nn.functional as F
from torch.optim import Adam
from torch.utils.data import DataLoader
import torchvision
from torchvision import transforms
from torchvision.models import resnet101
import pytorch_lightning as pl
from model.AEINet import ADDGenerator, MultilevelAttributesEncoder
from model... | [
"torchvision.transforms.CenterCrop",
"model.AEINet.ADDGenerator",
"model.loss.AEI_Loss",
"torch.load",
"torch.stack",
"torchvision.models.resnet101",
"torchvision.transforms.Resize",
"torch.nn.functional.normalize",
"model.AEINet.MultilevelAttributesEncoder",
"model.loss.GANLoss",
"model.MultiSc... | [((582, 618), 'model.AEINet.ADDGenerator', 'ADDGenerator', (['hp.arcface.vector_size'], {}), '(hp.arcface.vector_size)\n', (594, 618), False, 'from model.AEINet import ADDGenerator, MultilevelAttributesEncoder\n'), ((636, 665), 'model.AEINet.MultilevelAttributesEncoder', 'MultilevelAttributesEncoder', ([], {}), '()\n',... |
from configs import cfg
from src.utils.record_log import _logger
import numpy as np
import tensorflow as tf
import scipy.stats as stats
class Evaluator(object):
def __init__(self, model):
self.model = model
self.global_step = model.global_step
## ---- summary----
self.build_summar... | [
"src.utils.record_log._logger.add",
"numpy.mean",
"tensorflow.summary.merge_all",
"tensorflow.placeholder",
"numpy.argmax",
"tensorflow.summary.scalar",
"numpy.array",
"tensorflow.name_scope",
"numpy.concatenate",
"scipy.stats.pearsonr",
"scipy.stats.spearmanr",
"tensorflow.summary.FileWriter"... | [((346, 384), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (['cfg.summary_dir'], {}), '(cfg.summary_dir)\n', (367, 384), True, 'import tensorflow as tf\n'), ((461, 474), 'src.utils.record_log._logger.add', '_logger.add', ([], {}), '()\n', (472, 474), False, 'from src.utils.record_log import _logger\n'), ((... |
import RPi.GPIO as GPIO
import connexion
if __name__ == '__main__':
app = connexion.App('a-pi-api')
app.add_api('v0/spec.yml')
app.run(host='0.0.0.0', port=80)
| [
"connexion.App"
] | [((76, 101), 'connexion.App', 'connexion.App', (['"""a-pi-api"""'], {}), "('a-pi-api')\n", (89, 101), False, 'import connexion\n')] |
#coding=utf-8
import matplotlib
matplotlib.use("Agg")
import tensorflow as tf
import argparse
import numpy as np
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D,MaxPooling2D,UpSampling2D,BatchNormalization,Reshape,Permute,Activation
from tensorflow.keras.utils imp... | [
"sklearn.preprocessing.LabelEncoder",
"matplotlib.pyplot.ylabel",
"tensorflow.keras.layers.BatchNormalization",
"numpy.array",
"numpy.arange",
"os.listdir",
"tensorflow.keras.layers.Conv2D",
"argparse.ArgumentParser",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.style.use",
"numpy.random.seed"... | [((34, 55), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (48, 55), False, 'import matplotlib\n'), ((650, 670), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (664, 670), True, 'import numpy as np\n'), ((785, 799), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}... |
from . import utils
import os
import scanpy as sc
import scprep
import tempfile
URL = "https://ndownloader.figshare.com/files/25555751"
@utils.loader
def load_human_blood_nestorowa2016(test=False):
"""Download Nesterova data from Figshare."""
if test:
# load full data first, cached if available
... | [
"tempfile.TemporaryDirectory",
"scanpy.read",
"scanpy.pp.subsample",
"os.path.join",
"scprep.io.download.download_url"
] | [((488, 521), 'scanpy.pp.subsample', 'sc.pp.subsample', (['adata'], {'n_obs': '(500)'}), '(adata, n_obs=500)\n', (503, 521), True, 'import scanpy as sc\n'), ((741, 770), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (768, 770), False, 'import tempfile\n'), ((806, 861), 'os.path.join', ... |
from minidoc import svg
from minidoc import tst
from efdir import fs
import shutil
import os
def creat_one_svg(k,v,i=None,**kwargs):
if("dst_dir" in kwargs):
dst_dir = kwargs['dst_dir']
else:
dst_dir = "./images"
screen_size = svg.get_screen_size(v,**kwargs)
kwargs['screen_size'] = scre... | [
"minidoc.tst.get_svg_name",
"minidoc.svg.cmds_arr2str",
"shutil.move",
"minidoc.svg.get_screen_size",
"os.path.join",
"efdir.fs.mkdir",
"minidoc.svg.creat_svg"
] | [((256, 288), 'minidoc.svg.get_screen_size', 'svg.get_screen_size', (['v'], {}), '(v, **kwargs)\n', (275, 288), False, 'from minidoc import svg\n'), ((343, 372), 'minidoc.svg.cmds_arr2str', 'svg.cmds_arr2str', (['v'], {}), '(v, **kwargs)\n', (359, 372), False, 'from minidoc import svg\n'), ((390, 423), 'minidoc.svg.cre... |
# Generated by Django 3.0.7 on 2020-07-19 03:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_auto_20200614_0254'),
]
operations = [
migrations.AddField(
model_name='touristspot',
name='photo',
... | [
"django.db.models.ImageField"
] | [((335, 393), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""core"""'}), "(blank=True, null=True, upload_to='core')\n", (352, 393), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test some MITAB specific translation issues.
# Author: <NAME>, <even dot rouault at mines dash paris dot org>
#
###################################################... | [
"osgeo.osr.SpatialReference",
"gdaltest.run_tests",
"gdaltest.summarize",
"gdaltest.setup_run",
"gdaltest.post_reason",
"sys.path.append"
] | [((1570, 1597), 'sys.path.append', 'sys.path.append', (['"""../pymod"""'], {}), "('../pymod')\n", (1585, 1597), False, 'import sys\n'), ((1823, 1845), 'osgeo.osr.SpatialReference', 'osr.SpatialReference', ([], {}), '()\n', (1843, 1845), False, 'from osgeo import osr\n'), ((2844, 2866), 'osgeo.osr.SpatialReference', 'os... |
# -*- coding: utf-8 -*-
import math
from typing import Tuple, Union
import numpy as np
from .cutting_plane import CUTStatus
Arr = Union[np.ndarray]
class ell_stable:
"""Ellipsoid Search Space
ell_stable = {x | (x − xc)' Q^−1 (x − xc) ≤ κ}
Returns:
[type] -- [description]
"""
... | [
"numpy.eye",
"math.sqrt",
"numpy.isscalar",
"numpy.diag"
] | [((1213, 1229), 'numpy.isscalar', 'np.isscalar', (['val'], {}), '(val)\n', (1224, 1229), True, 'import numpy as np\n'), ((4841, 4858), 'numpy.isscalar', 'np.isscalar', (['beta'], {}), '(beta)\n', (4852, 4858), True, 'import numpy as np\n'), ((6096, 6132), 'math.sqrt', 'math.sqrt', (['(t0n * t1n + tempn * tempn)'], {}),... |
from flask_wtf import FlaskForm
from wtforms import PasswordField, StringField, SubmitField
from wtforms.validators import DataRequired
#
# Purpose: This from will be used to collect the information for the user logging
# and logging out.
#
# Fields:
# Password: <PASSWORD>
# Username: This contains the nam... | [
"wtforms.validators.DataRequired",
"wtforms.SubmitField"
] | [((811, 831), 'wtforms.SubmitField', 'SubmitField', (['"""Login"""'], {}), "('Login')\n", (822, 831), False, 'from wtforms import PasswordField, StringField, SubmitField\n'), ((711, 725), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (723, 725), False, 'from wtforms.validators import DataRequired... |
import time
from netmiko.base_connection import BaseConnection
class F5TmshSSH(BaseConnection):
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read()
self.set_base_prompt()
self.tmsh_mode()
self.set_bas... | [
"time.sleep"
] | [((915, 943), 'time.sleep', 'time.sleep', (['(1 * delay_factor)'], {}), '(1 * delay_factor)\n', (925, 943), False, 'import time\n')] |
from .. import global_vars as g
from ..window import Window
import numpy as np
from ..roi import makeROI
class TestSettings():
def test_random_roi_color(self):
initial = g.settings['roi_color']
g.settings['roi_color'] = 'random'
w1 = Window(np.random.random([10, 10, 10]))
roi1 = makeROI('rectangle', [[1, 1],... | [
"numpy.random.random"
] | [((250, 280), 'numpy.random.random', 'np.random.random', (['[10, 10, 10]'], {}), '([10, 10, 10])\n', (266, 280), True, 'import numpy as np\n'), ((868, 898), 'numpy.random.random', 'np.random.random', (['[10, 10, 10]'], {}), '([10, 10, 10])\n', (884, 898), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# collect a set of trip_id s at all stops in a GTFS file over the selected week of the service period starting at serviceweekstartdate
# filter stops near trainstations based on input txt file - stopsneartrainstop_post_edit
# merge sets of trips at stops near each trainst... | [
"stopswtrainstopidsandtpdperline_v1.main",
"process_date.get_date_now",
"time.time",
"trip_ids_at_stops_merge_near_trainstops_perday_v3.main"
] | [((633, 660), 'process_date.get_date_now', 'process_date.get_date_now', ([], {}), '()\n', (658, 660), False, 'import process_date\n'), ((662, 797), 'trip_ids_at_stops_merge_near_trainstops_perday_v3.main', 'trip_ids_at_stops_merge_near_trainstops_perday_v3.main', (['processdate', 'cfg.gtfspath', 'cfg.gtfsdirbase', 'cfg... |
"""
NCL_bar_2.py
===============
This script illustrates the following concepts:
- Drawing bars instead of curves in an XY plot
- Changing the aspect ratio of a bar plot
- Drawing filled bars up or down based on a Y reference value
- Setting the minimum/maximum value of the Y axis in a bar plot
- Using n... | [
"geocat.datafiles.get",
"geocat.viz.util.add_major_minor_ticks",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.bar",
"matplotlib.pyplot.axes",
"numpy.empty_like",
"numpy.linspace",
"numpy.shape",
"geocat.viz.util.set_titles_and_labels",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((1430, 1449), 'numpy.empty_like', 'np.empty_like', (['date'], {}), '(date)\n', (1443, 1449), True, 'import numpy as np\n'), ((1459, 1486), 'numpy.arange', 'np.arange', (['(0)', 'num_months', '(1)'], {}), '(0, num_months, 1)\n', (1468, 1486), True, 'import numpy as np\n'), ((1755, 1782), 'matplotlib.pyplot.figure', 'p... |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under... | [
"os.path.dirname",
"gabbi.driver.build_tests"
] | [((972, 1047), 'gabbi.driver.build_tests', 'driver.build_tests', (['self.test_dir', 'self.loader'], {'host': '"""localhost"""', 'port': '(8001)'}), "(self.test_dir, self.loader, host='localhost', port=8001)\n", (990, 1047), False, 'from gabbi import driver\n'), ((1723, 1824), 'gabbi.driver.build_tests', 'driver.build_t... |
#!/pxrpythonsubst
#
# Copyright 2018 Pixar
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
... | [
"pxr.UsdSkel.Skeleton.Get",
"maya.api.OpenMaya.MFnTransform",
"maya.standalone.initialize",
"maya.cmds.currentTime",
"maya.api.OpenMaya.MSelectionList",
"pxr.UsdSkel.MakeTransform",
"maya.cmds.usdExport",
"pxr.UsdSkel.PackedJointAnimation.Get",
"pxr.Usd.Stage.Open",
"maya.cmds.loadPlugin",
"unit... | [((4135, 4161), 'unittest.main', 'unittest.main', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (4148, 4161), False, 'import unittest\n'), ((1331, 1359), 'maya.standalone.initialize', 'standalone.initialize', (['"""usd"""'], {}), "('usd')\n", (1352, 1359), False, 'from maya import standalone\n'), ((1464, 1501), 'maya.... |
""" Support matrices generation.
radmtx module contains two class objects: sender and receiver, representing
the ray sender and receiver in the rfluxmtx operation. sender object is can
be instantiated as a surface, a list of points, or a view, and these are
typical forms of a sender. Similarly, a receiver object can b... | [
"logging.getLogger",
"frads.radutil.Primitive",
"tempfile.TemporaryDirectory",
"frads.radgeom.Polygon",
"subprocess.run",
"frads.makesky.basis_glow",
"frads.radutil.opt2str",
"os.path.join",
"frads.util.spcheckout",
"frads.radutil.parse_polygon",
"frads.radutil.up_vector",
"frads.util.mkdir_p"... | [((608, 641), 'logging.getLogger', 'logging.getLogger', (['"""frads.radmtx"""'], {}), "('frads.radmtx')\n", (625, 641), False, 'import logging\n'), ((3273, 3297), 'frads.radutil.opt2str', 'radutil.opt2str', (['vu_dict'], {}), '(vu_dict)\n', (3288, 3297), False, 'from frads import radutil, util\n'), ((6958, 6983), 'frad... |