code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# -*- coding: utf-8 -*-
import click
import logging
from pathlib import Path
import pandas as pd
import re
import string
from nltk.corpus import stopwords
def brand_preprocess(row, trim_len=2):
""" This function creates a brand name column by parsing out the product column of data. It trims the words based on tri... | [
"logging.getLogger",
"logging.basicConfig",
"re.escape",
"nltk.corpus.stopwords.words",
"pandas.read_csv",
"pathlib.Path",
"click.Path",
"pandas.isna",
"click.command",
"pandas.concat",
"pandas.to_datetime"
] | [((2338, 2353), 'click.command', 'click.command', ([], {}), '()\n', (2351, 2353), False, 'import click\n'), ((2721, 2741), 'pathlib.Path', 'Path', (['output_dirpath'], {}), '(output_dirpath)\n', (2725, 2741), False, 'from pathlib import Path\n'), ((2755, 2774), 'pathlib.Path', 'Path', (['input_dirpath'], {}), '(input_d... |
import os
import re
import hyperparams as hp
from data_load import DataLoad
from tqdm import tqdm
import numpy as np
import pandas as pd
import tensorflow as tf
def load_ckpt_paths(model_name='cdmf'):
# get ckpt
ckpt_path = '../model_ckpt/compare/{}/'.format(model_name)
fpaths = []
with open(ckpt_pat... | [
"tensorflow.Graph",
"tensorflow.reset_default_graph",
"numpy.sqrt",
"tensorflow.Session",
"tqdm.tqdm",
"os.path.join",
"tensorflow.train.import_meta_graph",
"data_load.DataLoad",
"tensorflow.ConfigProto"
] | [((623, 886), 'data_load.DataLoad', 'DataLoad', ([], {'data_path': 'hp.DATA_PATH', 'fnames': 'hp.FNAMES', 'forced_seq_len': 'hp.FORCED_SEQ_LEN', 'vocab_size': 'hp.VOCAB_SIZE', 'paly_times': 'hp.PLAY_TIMES', 'num_main_actors': 'hp.NUM_MAIN_ACTORS', 'batch_size': 'hp.BATCH_SIZE', 'num_epochs': 'hp.NUM_EPOCHS', 'noise_rat... |
# Generated by Django 2.1 on 2018-08-14 09:42
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('substitute_finder', '0002_category'),
]
operations = [
migrations.CreateModel(
name='Product',
... | [
"django.db.models.URLField",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.DateTimeField"
] | [((364, 463), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""identifiant"""'}), "(max_length=300, primary_key=True, serialize=False,\n verbose_name='identifiant')\n", (380, 463), False, 'from django.db import migrati... |
# coding=utf-8
# Author: <NAME> <<EMAIL>>
import numpy as np
import re
class KeelAttribute:
"""
A class that represent an attribute of keel dataset format.
"""
TYPE_REAL, TYPE_INTEGER, TYPE_NOMINAL = ("real", "integer", "nominal")
def __init__(self, attribute_name, attribute_type, attribute_rang... | [
"numpy.unique",
"numpy.max",
"numpy.concatenate",
"numpy.min",
"re.findall",
"numpy.transpose"
] | [((1202, 1224), 'numpy.concatenate', 'np.concatenate', (['labels'], {}), '(labels)\n', (1216, 1224), True, 'import numpy as np\n'), ((1253, 1290), 'numpy.unique', 'np.unique', (['labels'], {'return_counts': '(True)'}), '(labels, return_counts=True)\n', (1262, 1290), True, 'import numpy as np\n'), ((1320, 1341), 'numpy.... |
#!/usr/bin/env python3
import os
import re
import sys
import urllib.request
# api_filename = "projects.md"
api_filename = "groups.md"
url = "https://gitlab.com/gitlab-org/gitlab-ce/raw/master/doc/api/" + api_filename
doc_dir = "doc_tmp"
if not os.path.exists(doc_dir):
os.makedirs(doc_dir)
filename, headers =... | [
"os.path.exists",
"os.makedirs",
"re.compile",
"os.path.join",
"re.sub"
] | [((530, 583), 're.compile', 're.compile', (['"""```json.*?```"""', '(re.MULTILINE | re.DOTALL)'], {}), "('```json.*?```', re.MULTILINE | re.DOTALL)\n", (540, 583), False, 'import re\n'), ((603, 626), 're.sub', 're.sub', (['p', '""""""', 'markdown'], {}), "(p, '', markdown)\n", (609, 626), False, 'import re\n'), ((663, ... |
import os
import pandas as pd
import pytest
import yaml
import wandb
run = wandb.init(project='RP_NVIDIA_Machine_Learning',
job_type='data_validation')
@pytest.fixture(scope='session')
def data():
config_path = os.path.join(os.pardir, 'configs')
with open(os.path.join(config_path, 'datav... | [
"pandas.read_csv",
"os.path.join",
"wandb.init",
"pytest.fail",
"yaml.safe_load",
"pytest.fixture"
] | [((78, 154), 'wandb.init', 'wandb.init', ([], {'project': '"""RP_NVIDIA_Machine_Learning"""', 'job_type': '"""data_validation"""'}), "(project='RP_NVIDIA_Machine_Learning', job_type='data_validation')\n", (88, 154), False, 'import wandb\n'), ((175, 206), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""... |
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns(
'popcorn_gallery.tutorials.views',
url(r'^(?P<slug>[\w-]+)/$', 'object_detail', name='object_detail'),
url(r'^$', 'object_list', name='object_list'),
)
| [
"django.conf.urls.defaults.url"
] | [((120, 186), 'django.conf.urls.defaults.url', 'url', (['"""^(?P<slug>[\\\\w-]+)/$"""', '"""object_detail"""'], {'name': '"""object_detail"""'}), "('^(?P<slug>[\\\\w-]+)/$', 'object_detail', name='object_detail')\n", (123, 186), False, 'from django.conf.urls.defaults import patterns, url\n'), ((192, 236), 'django.conf.... |
"""
This test module has tests relating to kelvin model validations.
All functions in /calculations/models_kelvin.py are tested here.
The purposes are:
- testing the meniscus shape determination function
- testing the output of the kelvin equations
- testing that the "function getter" is performing as exp... | [
"pygaps.characterisation.models_kelvin.get_meniscus_geometry",
"numpy.isclose",
"pytest.mark.parametrize",
"pytest.raises",
"pygaps.characterisation.models_kelvin.get_kelvin_model"
] | [((633, 914), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""branch, pore, geometry"""', "[('ads', 'slit', 'hemicylindrical'), ('ads', 'cylinder', 'cylindrical'), (\n 'ads', 'sphere', 'hemispherical'), ('des', 'slit', 'hemicylindrical'),\n ('des', 'cylinder', 'hemispherical'), ('des', 'sphere', 'hemi... |
import os
from datetime import datetime
import sys
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from jet_bridge_base import configuration
from jet_bridge.configuration import JetBridgeConfiguration
conf = JetBridgeConfiguration()
configuration.set_configuration(conf)
from jet_bridge_b... | [
"jet_bridge_base.logger.logger.warning",
"jet_bridge_base.logger.logger.info",
"jet_bridge.utils.create_config.create_config",
"tornado.ioloop.IOLoop.current",
"jet_bridge.configuration.JetBridgeConfiguration",
"jet_bridge_base.db.database_connect",
"datetime.datetime.now",
"jet_bridge_base.commands.c... | [((239, 263), 'jet_bridge.configuration.JetBridgeConfiguration', 'JetBridgeConfiguration', ([], {}), '()\n', (261, 263), False, 'from jet_bridge.configuration import JetBridgeConfiguration\n'), ((264, 301), 'jet_bridge_base.configuration.set_configuration', 'configuration.set_configuration', (['conf'], {}), '(conf)\n',... |
from distutils.spawn import find_executable
from os import path
import click
from .settings import (
BASE_DEVELOPMENT_REQUIREMENTS_FILENAME,
BASE_REQUIREMENTS_FILENAME,
DEVELOPMENT_REQUIREMENTS_FILENAME,
REQUIREMENTS_FILENAME,
)
from .util import print_and_run
def _ensure_pip_tools_installed():
... | [
"distutils.spawn.find_executable",
"click.group",
"click.option",
"click.echo",
"os.path.relpath"
] | [((457, 470), 'click.group', 'click.group', ([], {}), '()\n', (468, 470), False, 'import click\n'), ((509, 559), 'click.option', 'click.option', (['"""--dev"""'], {'is_flag': '(True)', 'default': '(False)'}), "('--dev', is_flag=True, default=False)\n", (521, 559), False, 'import click\n'), ((794, 836), 'click.echo', 'c... |
# Copyright © 2020 Interplanetary Database Association e.V.,
# Planetmint and IPDB software contributors.
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
# Code is Apache-2.0 and docs are CC-BY-4.0
from typing import List
from planetmint_driver import Planetmint
class Hosts:
hostnames = []
connections... | [
"planetmint_driver.Planetmint"
] | [((649, 662), 'planetmint_driver.Planetmint', 'Planetmint', (['h'], {}), '(h)\n', (659, 662), False, 'from planetmint_driver import Planetmint\n')] |
from flask import Blueprint, flash, redirect, render_template, request, url_for
from werkzeug.security import check_password_hash, generate_password_hash
from flask_login import login_required, login_user, logout_user
from logbook.models import User, db
from peewee import fn
auth = Blueprint("auth", __name__)
... | [
"flask.render_template",
"flask.flash",
"flask_login.login_user",
"flask_login.logout_user",
"flask.request.form.get",
"flask.url_for",
"peewee.fn.Lower",
"werkzeug.security.generate_password_hash",
"flask.Blueprint",
"werkzeug.security.check_password_hash"
] | [((292, 319), 'flask.Blueprint', 'Blueprint', (['"""auth"""', '__name__'], {}), "('auth', __name__)\n", (301, 319), False, 'from flask import Blueprint, flash, redirect, render_template, request, url_for\n'), ((373, 402), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (388, ... |
import aiohttp
from aiohttp import ClientConnectionError, ClientResponseError
from .models import CoinsResponse, SimplePriceResponse
from .configs import Config
from typing import List, Dict, Union
class APIHandler:
def __init__(self):
self._config: Config = Config()
async def get_supported_coins(sel... | [
"aiohttp.ClientSession"
] | [((413, 436), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (434, 436), False, 'import aiohttp\n'), ((1469, 1492), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (1490, 1492), False, 'import aiohttp\n')] |
from argparse import ArgumentParser
from pathlib import Path
from tensorflow import keras
# Define this script's flags
parser = ArgumentParser()
parser.add_argument('--lr', type=float, default=1e-3)
parser.add_argument('--batch_size', type=int, default=32)
parser.add_argument('--max_epochs', type=int, default=5)
pars... | [
"tensorflow.keras.callbacks.TensorBoard",
"argparse.ArgumentParser",
"pathlib.Path",
"tensorflow.keras.datasets.mnist.load_data",
"tensorflow.keras.optimizers.Adam",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.layers.Flatten"
] | [((130, 146), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (144, 146), False, 'from argparse import ArgumentParser\n'), ((636, 690), 'tensorflow.keras.datasets.mnist.load_data', 'keras.datasets.mnist.load_data', (["(data_dir / 'mnist.npz')"], {}), "(data_dir / 'mnist.npz')\n", (666, 690), False, 'from... |
import pygame as pg
from pygame.time import Clock
from src.drawer import Drawer
from src.game import Game
from src.utils.config import Config
from src.utils.score import ScoresList
from src.utils.sfx import SfxHolder
from src.utils.text import Text
from src.utils.util import Util, user_quit
class Loop:
def __ini... | [
"pygame.event.get",
"pygame.display.flip",
"src.utils.util.user_quit",
"pygame.time.Clock",
"pygame.display.update"
] | [((560, 567), 'pygame.time.Clock', 'Clock', ([], {}), '()\n', (565, 567), False, 'from pygame.time import Clock\n'), ((907, 921), 'pygame.event.get', 'pg.event.get', ([], {}), '()\n', (919, 921), True, 'import pygame as pg\n'), ((1749, 1768), 'pygame.display.update', 'pg.display.update', ([], {}), '()\n', (1766, 1768),... |
#!/usr/bin/env python
"""A third example to get started with tinypyki.
Toying with mass certificate generation.
"""
import os
import tinypyki as tiny
print("Creating a pki instance named \"mass-pki\"")
pki = tiny.PKI("mass-pki")
print("Create the \"root-ca\"")
root_ca = tiny.Node(nid = "root-ca", pathlen = 1, s... | [
"tinypyki.do.insert",
"tinypyki.change.subj",
"tinypyki.do.verify",
"tinypyki.PKI",
"tinypyki.do.verifyenv",
"tinypyki.do.everything",
"tinypyki.do.revoke",
"tinypyki.Node",
"tinypyki.show"
] | [((213, 233), 'tinypyki.PKI', 'tiny.PKI', (['"""mass-pki"""'], {}), "('mass-pki')\n", (221, 233), True, 'import tinypyki as tiny\n'), ((278, 334), 'tinypyki.Node', 'tiny.Node', ([], {'nid': '"""root-ca"""', 'pathlen': '(1)', 'san': '"""email=<EMAIL>"""'}), "(nid='root-ca', pathlen=1, san='email=<EMAIL>')\n", (287, 334)... |
from django.contrib import admin
from django.contrib.gis import admin as geo_model_admin
from leaflet.admin import LeafletGeoAdmin
from .models import Forecasts, Dam, Species
# Forecast Model
class ForecastsAdmin(admin.ModelAdmin):
list_display = ('dam', 'species', 'forecast_range')
admin.site.register(Forecasts, F... | [
"django.contrib.admin.site.register"
] | [((288, 334), 'django.contrib.admin.site.register', 'admin.site.register', (['Forecasts', 'ForecastsAdmin'], {}), '(Forecasts, ForecastsAdmin)\n', (307, 334), False, 'from django.contrib import admin\n'), ((435, 477), 'django.contrib.admin.site.register', 'admin.site.register', (['Species', 'SpeciesAdmin'], {}), '(Spec... |
# -*- coding: UTF-8 -*-
# vim: set expandtab sw=4 ts=4 sts=4:
#
# phpMyAdmin web site
#
# Copyright (C) 2008 - 2016 <NAME> <<EMAIL>>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either vers... | [
"markupfield.fields.MarkupField",
"urllib2.urlopen",
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"json.dumps",
"pmaweb.cdn.purge_all_cdn",
"django.db.models.DateTimeField",
"django.db.models.BooleanField",
"django.core.urlresolvers.reverse",
"dja... | [((15141, 15176), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'Release'}), '(post_save, sender=Release)\n', (15149, 15176), False, 'from django.dispatch import receiver\n'), ((16374, 16410), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'Download'}), '(post_save, sender=Downl... |
from django.conf import settings
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from django.contrib import admin
admin.autodiscover()
from account.openid_consumer import PinaxConsumer
handler500 = "pinax.views.server_error"
if settings.ACCOUNT_OPEN_SIGNUP:
... | [
"tagging.models.TaggedItem.objects.get_by_model",
"account.openid_consumer.PinaxConsumer",
"django.contrib.admin.autodiscover"
] | [((166, 186), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (184, 186), False, 'from django.contrib import admin\n'), ((815, 830), 'account.openid_consumer.PinaxConsumer', 'PinaxConsumer', ([], {}), '()\n', (828, 830), False, 'from account.openid_consumer import PinaxConsumer\n'), ((1653,... |
import os
import sys
import pytest
import numpy as np
import pandas as pd
from scipy.stats import ks_2samp
sys.path.append("zarnitsa/")
from zarnitsa.stats import DataAugmenterExternally
N_TO_CHECK = 500
SIG = 0.5
@pytest.fixture
def dae():
return DataAugmenterExternally()
@pytest.fixture
def normal_data()... | [
"numpy.random.normal",
"zarnitsa.stats.DataAugmenterExternally",
"sys.path.append",
"scipy.stats.ks_2samp"
] | [((109, 137), 'sys.path.append', 'sys.path.append', (['"""zarnitsa/"""'], {}), "('zarnitsa/')\n", (124, 137), False, 'import sys\n'), ((259, 284), 'zarnitsa.stats.DataAugmenterExternally', 'DataAugmenterExternally', ([], {}), '()\n', (282, 284), False, 'from zarnitsa.stats import DataAugmenterExternally\n'), ((343, 388... |
from .Wavefunction import Wavefunction
import numpy as np
from scipy.fft import ifft2, fft2
import numba
CACHE_OPTIMIZATIONS = True
class Collision():
targetWavefunction = None # Implements wilson line
incidentWavefunction = None # Doesn't (have to) implement wilson line
_omega = None
_omegaFFT = ... | [
"numpy.reshape",
"scipy.fft.fft2",
"numpy.sqrt",
"numpy.conj",
"numpy.exp",
"numpy.array",
"numpy.zeros",
"numba.jit",
"numpy.sum",
"numpy.arctan2",
"numpy.linalg.norm",
"numpy.sin"
] | [((17641, 17773), 'numba.jit', 'numba.jit', (['(numba.float64[:, :], numba.int64, numba.int64, numba.int64, numba.float64)'], {'nopython': '(True)', 'cache': 'CACHE_OPTIMIZATIONS'}), '((numba.float64[:, :], numba.int64, numba.int64, numba.int64,\n numba.float64), nopython=True, cache=CACHE_OPTIMIZATIONS)\n', (17650,... |
import numpy as np
import tensorflow as tf
import model
if __name__ == "__main__":
print("Making level configs...")
level_configs = model.default_level_configs()
print("Making filter variables...")
filters = model.make_filters(tf.get_default_graph(), level_configs)
print("Done")
| [
"model.default_level_configs",
"tensorflow.get_default_graph"
] | [((144, 173), 'model.default_level_configs', 'model.default_level_configs', ([], {}), '()\n', (171, 173), False, 'import model\n'), ((248, 270), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (268, 270), True, 'import tensorflow as tf\n')] |
from datadog import initialize, api
options = {
'api_key': 'api_key',
'app_key': 'app_key'
}
initialize(**options)
# Schedule downtime
api.Downtime.create(scope='env:staging', start=int(time.time()))
| [
"datadog.initialize"
] | [((103, 124), 'datadog.initialize', 'initialize', ([], {}), '(**options)\n', (113, 124), False, 'from datadog import initialize, api\n')] |
import contextlib
import re
from typing import NamedTuple, Optional
import discord
from redbot.core.commands import BadArgument, Context, MemberConverter
_discord_member_converter_instance = MemberConverter()
_id_regex = re.compile(r"([0-9]{15,21})$")
_mention_regex = re.compile(r"<@!?([0-9]{15,21})>$")
class Membe... | [
"redbot.core.commands.BadArgument",
"redbot.core.commands.MemberConverter",
"contextlib.suppress",
"re.compile"
] | [((193, 210), 'redbot.core.commands.MemberConverter', 'MemberConverter', ([], {}), '()\n', (208, 210), False, 'from redbot.core.commands import BadArgument, Context, MemberConverter\n'), ((223, 252), 're.compile', 're.compile', (['"""([0-9]{15,21})$"""'], {}), "('([0-9]{15,21})$')\n", (233, 252), False, 'import re\n'),... |
from django.contrib import admin
from face_api.models import KnowledgeDatabase
from face_api.models import ImageUploads
# Register your models here.
admin.site.register(KnowledgeDatabase)
admin.site.register(ImageUploads)
| [
"django.contrib.admin.site.register"
] | [((150, 188), 'django.contrib.admin.site.register', 'admin.site.register', (['KnowledgeDatabase'], {}), '(KnowledgeDatabase)\n', (169, 188), False, 'from django.contrib import admin\n'), ((189, 222), 'django.contrib.admin.site.register', 'admin.site.register', (['ImageUploads'], {}), '(ImageUploads)\n', (208, 222), Fal... |
from dydx3.constants import SYNTHETIC_ASSET_MAP, SYNTHETIC_ASSET_ID_MAP, ASSET_RESOLUTION, COLLATERAL_ASSET
class TestConstants():
def test_constants_have_regular_structure(self):
for market, asset in SYNTHETIC_ASSET_MAP.items():
market_parts = market.split('-')
base_token, quote_t... | [
"dydx3.constants.SYNTHETIC_ASSET_MAP.values",
"dydx3.constants.SYNTHETIC_ASSET_MAP.items",
"dydx3.constants.ASSET_RESOLUTION.keys",
"dydx3.constants.SYNTHETIC_ASSET_ID_MAP.keys"
] | [((215, 242), 'dydx3.constants.SYNTHETIC_ASSET_MAP.items', 'SYNTHETIC_ASSET_MAP.items', ([], {}), '()\n', (240, 242), False, 'from dydx3.constants import SYNTHETIC_ASSET_MAP, SYNTHETIC_ASSET_ID_MAP, ASSET_RESOLUTION, COLLATERAL_ASSET\n'), ((482, 510), 'dydx3.constants.SYNTHETIC_ASSET_MAP.values', 'SYNTHETIC_ASSET_MAP.v... |
#INVASION COMMANDS:
# !invasions // !atinvasions <reward> // !rminvasions
import discord
from discord.ext import commands
import asyncio
from src import sess
class Invasions(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.alert_dict = {} # user: reward, list of prev invasions with ... | [
"discord.ext.commands.Cog.listener",
"src.sess.request",
"asyncio.sleep",
"discord.Embed",
"discord.ext.commands.command"
] | [((333, 356), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (354, 356), False, 'from discord.ext import commands\n'), ((535, 553), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (551, 553), False, 'from discord.ext import commands\n'), ((1969, 1987), 'discord.ext... |
import sqlalchemy as sa
from .meta import Base
class Person(Base):
__tablename__ = "person"
id = sa.Column(sa.Integer, primary_key=True)
name = sa.Column(sa.String)
date_of_birth = sa.Column(sa.Date)
height = sa.Column(sa.Integer)
weight = sa.Column(sa.Numeric)
__all__ = [
"Person",
]
| [
"sqlalchemy.Column"
] | [((109, 148), 'sqlalchemy.Column', 'sa.Column', (['sa.Integer'], {'primary_key': '(True)'}), '(sa.Integer, primary_key=True)\n', (118, 148), True, 'import sqlalchemy as sa\n'), ((160, 180), 'sqlalchemy.Column', 'sa.Column', (['sa.String'], {}), '(sa.String)\n', (169, 180), True, 'import sqlalchemy as sa\n'), ((201, 219... |
import sys
sys.path.append("../../configs")
#../../configs
from path import EXP_PATH
import numpy as np
DECAY_PARAMS_DICT =\
{
'stair' :
{
128 :{
'a1': {'initial_lr' : 1e-5, 'decay_steps' : 50000, 'decay_rate' : 0.3},
'a2' : {'initial_lr' : 3e-4, 'decay_step... | [
"sys.path.append",
"numpy.arange"
] | [((11, 43), 'sys.path.append', 'sys.path.append', (['"""../../configs"""'], {}), "('../../configs')\n", (26, 43), False, 'import sys\n'), ((1616, 1631), 'numpy.arange', 'np.arange', (['(1)', '(5)'], {}), '(1, 5)\n', (1625, 1631), True, 'import numpy as np\n')] |
# Copyright (C) 2020 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Contains WithAction mixin.
A mixin for processing actions on an object in the scope of put request .
"""
from collections import namedtuple, defaultdict
import werkzeug.exceptions as wzg_exceptions
fro... | [
"collections.namedtuple",
"ggrc.models.document.Document",
"ggrc.login.get_current_user",
"ggrc.rbac.permissions.is_allowed_delete_for",
"ggrc.db.session.delete",
"werkzeug.exceptions.Forbidden",
"ggrc.models.evidence.Evidence",
"ggrc.models.exceptions.ValidationError",
"ggrc.rbac.permissions.has_co... | [((895, 954), 'ggrc.models.reflection.Attribute', 'Attribute', (['"""actions"""'], {'create': '(False)', 'update': '(True)', 'read': '(False)'}), "('actions', create=False, update=True, read=False)\n", (904, 954), False, 'from ggrc.models.reflection import Attribute\n'), ((3437, 3454), 'collections.defaultdict', 'defau... |
import torch
class GAN_discriminator (torch.nn.Module):
def __init__(self, H):
#for GAN
# H=[5, 256, 128, 128, 5, 1, 64, 128, 256, 256, 4096, 1]
#for CGAN
# H =[8, 256, 128, 64, 8, 9, 64, 128, 256, 256, 4096, 1]
super(GAN_discriminator, self).__init__()
#region
... | [
"torch.nn.BatchNorm2d",
"torch.nn.Sigmoid",
"torch.nn.functional.leaky_relu",
"torch.nn.Conv2d",
"torch.nn.Linear",
"torch.nn.ConvTranspose2d",
"torch.cat"
] | [((339, 398), 'torch.nn.ConvTranspose2d', 'torch.nn.ConvTranspose2d', (['H[0]', 'H[0]', '(4, 1)'], {'stride': '(4, 1)'}), '(H[0], H[0], (4, 1), stride=(4, 1))\n', (363, 398), False, 'import torch\n'), ((423, 474), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['H[0]', 'H[1]', '(5, 3)'], {'padding': '(2, 1)'}), '(H[0], H[1], (... |
""" test gpath
isort:skip_file
"""
import os
import sys
import unittest
try:
from unittest import mock
except ImportError:
import mock
SRC = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "src")
if SRC not in sys.path:
sys.path.insert(0, SRC)
from ciopath.gpath import Pat... | [
"ciopath.gpath.Path",
"sys.path.insert",
"mock.patch",
"mock.patch.dict",
"unittest.main",
"os.path.abspath"
] | [((266, 289), 'sys.path.insert', 'sys.path.insert', (['(0)', 'SRC'], {}), '(0, SRC)\n', (281, 289), False, 'import sys\n'), ((14859, 14874), 'unittest.main', 'unittest.main', ([], {}), '()\n', (14872, 14874), False, 'import unittest\n'), ((621, 630), 'ciopath.gpath.Path', 'Path', (['"""/"""'], {}), "('/')\n", (625, 630... |
# Copyright 2022 Sony Semiconductors Israel, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required b... | [
"model_compression_toolkit.core.pytorch.pytorch_implementation.PytorchImplementation",
"model_compression_toolkit.get_target_platform_capabilities",
"model_compression_toolkit.core.common.Logger.error",
"importlib.util.find_spec",
"model_compression_toolkit.core.common.Logger.critical",
"model_compression... | [((1546, 1579), 'importlib.util.find_spec', 'importlib.util.find_spec', (['"""torch"""'], {}), "('torch')\n", (1570, 1579), False, 'import importlib\n'), ((2008, 2067), 'model_compression_toolkit.get_target_platform_capabilities', 'get_target_platform_capabilities', (['PYTORCH', 'DEFAULT_TP_MODEL'], {}), '(PYTORCH, DEF... |
import dgl
import torch as th
import numpy as np
import itertools
import time
from collections import *
Graph = namedtuple('Graph',
['g', 'src', 'tgt', 'tgt_y', 'nids', 'eids', 'nid_arr', 'n_nodes', 'n_edges', 'n_tokens', 'layer_eids'])
# We need to create new graph pools for relative position atte... | [
"dgl.batch",
"torch.tensor",
"torch.cat",
"numpy.zeros",
"dgl.DGLGraph",
"time.time",
"torch.zeros",
"torch.arange",
"torch.ones"
] | [((1669, 1680), 'time.time', 'time.time', ([], {}), '()\n', (1678, 1680), False, 'import time\n'), ((4163, 4180), 'dgl.batch', 'dgl.batch', (['g_list'], {}), '(g_list)\n', (4172, 4180), False, 'import dgl\n'), ((8419, 8436), 'dgl.batch', 'dgl.batch', (['g_list'], {}), '(g_list)\n', (8428, 8436), False, 'import dgl\n'),... |
# pylint: disable=not-context-manager
from unittest.mock import ANY, Mock
from starlette.testclient import TestClient
from ariadne.asgi import (
GQL_CONNECTION_ACK,
GQL_CONNECTION_INIT,
GQL_DATA,
GQL_ERROR,
GQL_START,
GraphQL,
)
from ariadne.types import Extension
def test_custom_context_val... | [
"ariadne.asgi.GraphQL",
"starlette.testclient.TestClient",
"unittest.mock.Mock"
] | [((365, 420), 'ariadne.asgi.GraphQL', 'GraphQL', (['schema'], {'context_value': "{'test': 'TEST-CONTEXT'}"}), "(schema, context_value={'test': 'TEST-CONTEXT'})\n", (372, 420), False, 'from ariadne.asgi import GQL_CONNECTION_ACK, GQL_CONNECTION_INIT, GQL_DATA, GQL_ERROR, GQL_START, GraphQL\n'), ((434, 449), 'starlette.t... |
import tensorflow as tf
import prettytensor as pt
import numpy as np
import gym
import math
import random
from collections import deque
from agents import mixed_network, spaces, replay_buffer
tensorType = tf.float32
"""
Implements a Deep Deterministic Policy Gradient agent.
Adjustable parameters:
- Actor / Critic ... | [
"tensorflow.Graph",
"agents.mixed_network.MixedNetwork",
"collections.deque",
"numpy.reshape",
"tensorflow.variable_scope",
"tensorflow.placeholder",
"tensorflow.Session",
"numpy.squeeze",
"tensorflow.gradients",
"tensorflow.global_variables_initializer",
"tensorflow.negative",
"tensorflow.con... | [((1300, 1310), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (1308, 1310), True, 'import tensorflow as tf\n'), ((1334, 1362), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'self.graph'}), '(graph=self.graph)\n', (1344, 1362), True, 'import tensorflow as tf\n'), ((1438, 1458), 'collections.deque', 'deque', ([]... |
from aiohttp.test_utils import TestClient
import pytest
import typing
import unittest.mock
from rolling.kernel import Kernel
from rolling.model.character import CharacterModel
from rolling.model.character import MINIMUM_BEFORE_EXHAUSTED
from rolling.server.document.affinity import AffinityDirectionType
from rolling.se... | [
"pytest.mark.usefixtures"
] | [((11035, 11084), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""websocket_prepare_mock"""'], {}), "('websocket_prepare_mock')\n", (11058, 11084), False, 'import pytest\n'), ((11090, 11147), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""zone_event_manager_listen_mock"""'], {}), "('zone_event_... |
# myWeather.py for inkyphat and RPiZW
print('Starting')
try:
import requests
print('requests module imported')
except:
print('Sorry, need to install requests module')
exit()
wx_url = 'api.openweathermap.org/data/2.5/weather?'
wx_city = 'q=Quispamsis,CA&units=metric'
wx_cityID = 'id=6115383&... | [
"requests.get"
] | [((403, 457), 'requests.get', 'requests.get', (["('http://' + wx_url + wx_cityID + api_key)"], {}), "('http://' + wx_url + wx_cityID + api_key)\n", (415, 457), False, 'import requests\n')] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may ... | [
"six.with_metaclass"
] | [((589, 639), 'six.with_metaclass', 'with_metaclass', (['CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(CaseInsensitiveEnumMeta, str, Enum)\n', (603, 639), False, 'from six import with_metaclass\n'), ((712, 762), 'six.with_metaclass', 'with_metaclass', (['CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(CaseInsensiti... |
from microbit import *
import microbit_i2c_lcd as lcd
i2c.init(sda=pin15,scl=pin13)
display = lcd.lcd(i2c)
display.lcd_display_string(str(chr(247)), 1)
print("this will display a pi symbol for ROM A00 japaneese\n"+\
"display a divide symbol for the A02 ROM european")
i2c.init(sda=pin20,scl=pin19)
| [
"microbit_i2c_lcd.lcd"
] | [((96, 108), 'microbit_i2c_lcd.lcd', 'lcd.lcd', (['i2c'], {}), '(i2c)\n', (103, 108), True, 'import microbit_i2c_lcd as lcd\n')] |
import time
import textwrap
import math
import binascii
from inkfish.create_discriminant import create_discriminant
from inkfish.classgroup import ClassGroup
from inkfish.iterate_squarings import iterate_squarings
from inkfish import proof_wesolowski
from inkfish.proof_of_time import (create_proof_of_time_nwesolowski,... | [
"textwrap.dedent",
"math.ceil",
"inkfish.classgroup.ClassGroup.from_ab_discriminant",
"inkfish.proof_wesolowski.generate_proof",
"inkfish.proof_wesolowski.verify_proof",
"inkfish.proof_pietrzak.generate_proof",
"inkfish.create_discriminant.create_discriminant",
"inkfish.proof_wesolowski.approximate_pa... | [((857, 890), 'inkfish.create_discriminant.create_discriminant', 'create_discriminant', (["b'seed'", '(512)'], {}), "(b'seed', 512)\n", (876, 890), False, 'from inkfish.create_discriminant import create_discriminant\n'), ((899, 939), 'inkfish.classgroup.ClassGroup.from_ab_discriminant', 'ClassGroup.from_ab_discriminant... |
import os
import re
import sys
from setuptools import setup, find_packages
from typing import Optional, Tuple
SETUP_DIR = os.path.dirname(os.path.realpath(__file__))
POLYTRACKER_HEADER = os.path.join(SETUP_DIR, 'polytracker', 'include', 'polytracker', 'polytracker.h')
if not os.path.exists(POLYTRACKER_HEADER):
sy... | [
"os.path.exists",
"setuptools.find_packages",
"os.path.join",
"re.match",
"os.path.realpath",
"sys.stderr.write",
"sys.exit"
] | [((188, 273), 'os.path.join', 'os.path.join', (['SETUP_DIR', '"""polytracker"""', '"""include"""', '"""polytracker"""', '"""polytracker.h"""'], {}), "(SETUP_DIR, 'polytracker', 'include', 'polytracker',\n 'polytracker.h')\n", (200, 273), False, 'import os\n'), ((139, 165), 'os.path.realpath', 'os.path.realpath', (['... |
import os,sys, re
from math import ceil, floor
class Gazette:
"""
Loads and parses municipal gazettes.
Attributes:
file_path: The string path to a gazette.
file: The string containing a gazette's content.
city: A string for the city (or cities) of the gazette.
date: A st... | [
"os.listdir",
"math.floor"
] | [((9122, 9141), 'os.listdir', 'os.listdir', (['input_f'], {}), '(input_f)\n', (9132, 9141), False, 'import os, sys, re\n'), ((7352, 7378), 'math.floor', 'floor', (['(0.2 * max_line_size)'], {}), '(0.2 * max_line_size)\n', (7357, 7378), False, 'from math import ceil, floor\n'), ((7405, 7431), 'math.floor', 'floor', (['(... |
# Generated by Django 3.0.2 on 2020-03-03 21:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('PropertyManagers', '0001_initial'),
('Properties', '0001_initial'),
]
operations = [
... | [
"django.db.models.AutoField",
"django.db.models.CharField",
"django.db.models.ForeignKey"
] | [((421, 514), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (437, 514), False, 'from django.db import migrations, models\... |
import os
os.remove("fichero_generado.txt") | [
"os.remove"
] | [((11, 44), 'os.remove', 'os.remove', (['"""fichero_generado.txt"""'], {}), "('fichero_generado.txt')\n", (20, 44), False, 'import os\n')] |
# -*- coding: utf-8 -*-
from remi.gui import *
from remi import start, App
import cv2
import numpy
import chdkptp
import time
import threading
import rawpy
class OpenCVVideoWidget(Image):
def __init__(self, **kwargs):
super(OpenCVVideoWidget, self).__init__("/%s/get_image_data" % id(self), **kwargs)
... | [
"numpy.dstack",
"numpy.fromfile",
"remi.start",
"cv2.imencode",
"chdkptp.ChdkDevice",
"time.sleep",
"threading.Event",
"numpy.random.randint",
"numpy.zeros",
"numpy.empty",
"cv2.cvtColor",
"numpy.full",
"numpy.dtype",
"time.time",
"threading.Thread",
"chdkptp.list_devices"
] | [((31139, 31295), 'remi.start', 'start', (['M10GUI'], {'address': '"""0.0.0.0"""', 'port': '(8081)', 'multiple_instance': '(False)', 'enable_file_cache': '(True)', 'start_browser': '(False)', 'debug': '(False)', 'update_interval': '(0.01)'}), "(M10GUI, address='0.0.0.0', port=8081, multiple_instance=False,\n enable_... |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by appli... | [
"os.path.exists",
"numpy.ones",
"triton_python_backend_utils.InferenceRequest",
"swig_decoders.map_batch",
"torch.utils.dlpack.to_dlpack",
"triton_python_backend_utils.Tensor",
"swig_decoders.Scorer",
"multiprocessing.cpu_count",
"numpy.zeros",
"triton_python_backend_utils.get_output_tensor_by_nam... | [((1805, 1828), 'os.path.exists', 'os.path.exists', (['lm_path'], {}), '(lm_path)\n', (1819, 1828), False, 'import os\n'), ((6428, 6475), 'swig_decoders.map_batch', 'map_batch', (['best_sent', 'self.vocab', 'num_processes'], {}), '(best_sent, self.vocab, num_processes)\n', (6437, 6475), False, 'from swig_decoders impor... |
# 获取学生所有的挑战题目信息
# 包括时间、结果、代码等
# 写入数据库stuoj的stuquestionbh表中
from selenium import webdriver
# from selenium.webdriver.common.by import By
import pymysql
import re
from bs4 import BeautifulSoup
import connsql
# import loginzznuoj
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support impo... | [
"selenium.webdriver.PhantomJS",
"time.sleep",
"connsql.conn.cursor"
] | [((497, 518), 'selenium.webdriver.PhantomJS', 'webdriver.PhantomJS', ([], {}), '()\n', (516, 518), False, 'from selenium import webdriver\n'), ((525, 546), 'connsql.conn.cursor', 'connsql.conn.cursor', ([], {}), '()\n', (544, 546), False, 'import connsql\n'), ((5715, 5728), 'time.sleep', 'time.sleep', (['(2)'], {}), '(... |
import asyncio
import time
import uvloop
import importlib
from pyrogram import Client as Bot, idle
from .config import API_ID, API_HASH, BOT_TOKEN, MONGO_DB_URI, SUDO_USERS, LOG_GROUP_ID
from Yukki import BOT_NAME, ASSNAME, app, chacha, aiohttpsession
from Yukki.YukkiUtilities.database.functions import clean_restart_st... | [
"Yukki.YukkiUtilities.database.queue.remove_active_chat",
"Yukki.chacha.send_message",
"Yukki.app.send_message",
"Yukki.YukkiUtilities.database.queue.get_active_chats",
"Yukki.app.edit_message_text",
"Yukki.YukkiUtilities.database.functions.clean_restart_stage",
"asyncio.get_event_loop",
"pyrogram.Cli... | [((1814, 1838), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1836, 1838), False, 'import asyncio\n'), ((567, 659), 'pyrogram.Client', 'Bot', (['""":yukki:"""', 'API_ID', 'API_HASH'], {'bot_token': 'BOT_TOKEN', 'plugins': "{'root': 'Yukki.Plugins'}"}), "(':yukki:', API_ID, API_HASH, bot_token=B... |
import hashlib
from abc import abstractmethod, ABC
from typing import TYPE_CHECKING
from .. import Signature, ExternalAddress, Hash32
from loopchain.crypto.hashing import build_hash_generator
if TYPE_CHECKING:
from secp256k1 import PrivateKey
from . import Transaction, TransactionVersioner
class Transaction... | [
"loopchain.crypto.hashing.build_hash_generator",
"hashlib.sha3_256"
] | [((441, 502), 'loopchain.crypto.hashing.build_hash_generator', 'build_hash_generator', (['hash_generator_version', 'self._hash_salt'], {}), '(hash_generator_version, self._hash_salt)\n', (461, 502), False, 'from loopchain.crypto.hashing import build_hash_generator\n'), ((1775, 1811), 'hashlib.sha3_256', 'hashlib.sha3_2... |
import csv
import os
resource_dir="/Users/jyj/OneDrive/A_A_Data_Analysis/MINSTP201808DATA2/03-Python/Homework/PyBank/Resources"
file_path=os.path.join(resource_dir,"budget_data.csv")
with open(file_path,newline="") as data_file:
csvreader=csv.reader(data_file,delimiter=",")
next(csvreader)
i=0
Nu... | [
"os.path.join",
"csv.reader"
] | [((140, 185), 'os.path.join', 'os.path.join', (['resource_dir', '"""budget_data.csv"""'], {}), "(resource_dir, 'budget_data.csv')\n", (152, 185), False, 'import os\n'), ((247, 283), 'csv.reader', 'csv.reader', (['data_file'], {'delimiter': '""","""'}), "(data_file, delimiter=',')\n", (257, 283), False, 'import csv\n')] |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import json
from kubernetes.client import models as k8s_models
from kubernetes.client import apis as k8s_apis
from kubernetes.client.rest import ApiException
from urllib3.exceptions import MaxRetryError
from . import VERSION_RX
from .. import config
from... | [
"json.loads"
] | [((2091, 2111), 'json.loads', 'json.loads', (['exc.body'], {}), '(exc.body)\n', (2101, 2111), False, 'import json\n')] |
"""Temporal VAE with gaussian margial and laplacian transition prior"""
import torch
import numpy as np
import ipdb as pdb
import torch.nn as nn
import pytorch_lightning as pl
import torch.distributions as D
from torch.nn import functional as F
from .components.beta import BetaVAE_MLP
from .metrics.correla... | [
"numpy.sqrt",
"numpy.log",
"torch.sqrt",
"torch.exp",
"torch.nn.functional.sigmoid",
"torch.sum",
"torch.eye",
"torch.zeros_like",
"torch.randn",
"torch.ones_like",
"torch.abs",
"torch.nn.functional.mse_loss",
"torch.nn.LeakyReLU",
"torch.distributions.laplace.Laplace",
"torch.norm",
"... | [((1386, 1423), 'torch.norm', 'torch.norm', (['diff'], {'dim': '(1)', 'keepdim': '(True)'}), '(diff, dim=1, keepdim=True)\n', (1396, 1423), False, 'import torch\n'), ((1550, 1565), 'torch.abs', 'torch.abs', (['diff'], {}), '(diff)\n', (1559, 1565), False, 'import torch\n'), ((2875, 2898), 'torch.nn.Linear', 'nn.Linear'... |
###############################################################################
# Copyright (c) 2019, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory
# Written by the Merlin dev team, listed in the CONTRIBUTORS file.
# <<EMAIL>>
#
# LLNL-CODE-797170
# All rights reser... | [
"logging.getLogger",
"celery.signals.worker_process_init.connect",
"celery.Celery",
"merlin.config.broker.get_connection_string",
"psutil.Process",
"billiard.current_process",
"merlin.config.results_backend.get_connection_string",
"psutil.cpu_count",
"merlin.config.broker.get_ssl_config",
"merlin.... | [((2003, 2030), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2020, 2030), False, 'import logging\n'), ((2730, 2860), 'celery.Celery', 'Celery', (['"""merlin"""'], {'broker': 'BROKER_URI', 'backend': 'RESULTS_BACKEND_URI', 'broker_use_ssl': 'broker_ssl', 'redis_backend_use_ssl': 'result... |
from datetime import datetime, timedelta
from peewee import SqliteDatabase, Model, PrimaryKeyField, IntegerField, CharField, BooleanField, DateTimeField
from bot.data.config import STATIC_DIR
from bot.utils.logging import logger
db = SqliteDatabase(f"{STATIC_DIR}/db.sqlite3")
class User(Model):
"""
Клас оп... | [
"peewee.BooleanField",
"peewee.CharField",
"peewee.SqliteDatabase",
"datetime.datetime.strptime",
"bot.utils.logging.logger.info",
"peewee.IntegerField",
"datetime.datetime.now",
"peewee.PrimaryKeyField",
"peewee.DateTimeField",
"datetime.timedelta"
] | [((237, 279), 'peewee.SqliteDatabase', 'SqliteDatabase', (['f"""{STATIC_DIR}/db.sqlite3"""'], {}), "(f'{STATIC_DIR}/db.sqlite3')\n", (251, 279), False, 'from peewee import SqliteDatabase, Model, PrimaryKeyField, IntegerField, CharField, BooleanField, DateTimeField\n'), ((372, 412), 'peewee.PrimaryKeyField', 'PrimaryKey... |
# /usr/bin/env python3.5
# -*- mode: python -*-
# =============================================================================
# @@-COPYRIGHT-START-@@
#
# Copyright (c) 2019-2020, Qualcomm Innovation Center, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modifica... | [
"tensorflow.compat.v1.get_collection_ref",
"tensorflow.contrib.graph_editor.detach_outputs",
"aimet_common.utils.AimetLogger.get_area_logger",
"tensorflow.contrib.graph_editor.reroute_ts"
] | [((2155, 2210), 'aimet_common.utils.AimetLogger.get_area_logger', 'AimetLogger.get_area_logger', (['AimetLogger.LogAreas.Utils'], {}), '(AimetLogger.LogAreas.Utils)\n', (2182, 2210), False, 'from aimet_common.utils import AimetLogger\n'), ((31151, 31217), 'tensorflow.compat.v1.get_collection_ref', 'tf.compat.v1.get_col... |
import os
from flask import Flask, Response, render_template, redirect
from flask_restful import reqparse,request, abort, Api, Resource, fields, marshal_with
from flask_sqlalchemy import SQLAlchemy
import sqlite3
app = Flask(__name__)
p_dir = os.path.dirname(os.path.abspath(__file__))
db_file = "sqlite:///{}".forma... | [
"flask_restful.reqparse.RequestParser",
"flask_restful.Api",
"flask.Flask",
"os.path.join",
"flask_restful.marshal_with",
"flask_restful.abort",
"os.path.abspath",
"flask_sqlalchemy.SQLAlchemy"
] | [((222, 237), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (227, 237), False, 'from flask import Flask, Response, render_template, redirect\n'), ((410, 418), 'flask_restful.Api', 'Api', (['app'], {}), '(app)\n', (413, 418), False, 'from flask_restful import reqparse, request, abort, Api, Resource, fields... |
import sys
import os
import json
import csv
from time import strftime
from datetime import timedelta, date, datetime
from flask import Blueprint, render_template, redirect, request, url_for, flash
import server.configuration as cfg
from server.postalservice import checkTemp
from server.helpers import LoginRequired, p... | [
"flask.render_template",
"flask.request.args.get",
"server.helpers.pwIsValid",
"server.models.SzenzorAdatok",
"flask.flash",
"time.strftime",
"os.path.join",
"csv.writer",
"flask.url_for",
"datetime.timedelta",
"os.path.dirname",
"server.configuration.save_allomas",
"server.postalservice.che... | [((428, 489), 'flask.Blueprint', 'Blueprint', (['"""device_bp"""', '__name__'], {'template_folder': '"""templates"""'}), "('device_bp', __name__, template_folder='templates')\n", (437, 489), False, 'from flask import Blueprint, render_template, redirect, request, url_for, flash\n'), ((6142, 6160), 'server.configuration... |
from functools import partial
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.components.compass import compass
from gdsfactory.components.via_stack import via_stack_slab_npp_m3
from gdsfactory.types import ComponentSpec, Floats, LayerSpecs, Optional
pad_via_stack_slab_npp ... | [
"gdsfactory.components.compass.compass",
"gdsfactory.component.Component",
"functools.partial"
] | [((322, 367), 'functools.partial', 'partial', (['via_stack_slab_npp_m3'], {'size': '(80, 80)'}), '(via_stack_slab_npp_m3, size=(80, 80))\n', (329, 367), False, 'from functools import partial\n'), ((1180, 1191), 'gdsfactory.component.Component', 'Component', ([], {}), '()\n', (1189, 1191), False, 'from gdsfactory.compon... |
import openmoc
import openmc.openmoc_compatible
import openmc.mgxs
import numpy as np
import matplotlib
# Enable Matplotib to work for headless nodes
matplotlib.use('Agg')
import matplotlib.pyplot as plt
plt.ioff()
opts = openmoc.options.Options()
openmoc.log.set_log_level('NORMAL')
##############################... | [
"matplotlib.pyplot.grid",
"openmoc.plotter.plot_cells",
"matplotlib.pyplot.ylabel",
"openmoc.materialize.compute_sph_factors",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"openmoc.log.set_log_level",
"matplotlib.pyplot.yscale",
"openmoc.options.Options",
"mat... | [((152, 173), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (166, 173), False, 'import matplotlib\n'), ((206, 216), 'matplotlib.pyplot.ioff', 'plt.ioff', ([], {}), '()\n', (214, 216), True, 'import matplotlib.pyplot as plt\n'), ((226, 251), 'openmoc.options.Options', 'openmoc.options.Options', (... |
# SPDX-License-Identifier: MIT
# Copyright 2022 hirmiura (https://github.com/hirmiura)
#
# TamaTouを生成するスクリプト
#
# 使い方
# 1. fontforgeでコンソールを出す(fontforge-console.bat)
# 2. ディレクトリ移動
# 3. ffpython TamaTou.py
# 4. 待つ
#
# Orbitron → オービトロン → オーブ → 玉 → Tamaやな!
# Noto → No Toufu → 豆腐だらけだし → Touやな!
# →→ TamaTou
#
import font... | [
"fontforge.open"
] | [((389, 429), 'fontforge.open', 'fontforge.open', (['f"""Orbitron-{weight}.ttf"""'], {}), "(f'Orbitron-{weight}.ttf')\n", (403, 429), False, 'import fontforge\n'), ((549, 591), 'fontforge.open', 'fontforge.open', (['f"""NotoSansJP-{weight}.otf"""'], {}), "(f'NotoSansJP-{weight}.otf')\n", (563, 591), False, 'import font... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
"""
bambu
------
pandas RDF functionality
Installation
--------------
::
# pip install pandas
pip install rdflib
"""
import sys
import pandas as pd
import rdflib
def bambu():
"""
mainfunc
"""
pass
def to... | [
"logging.getLogger",
"logging.basicConfig",
"optparse.OptionParser",
"unittest.main",
"pandas.DataFrame"
] | [((1810, 1854), 'optparse.OptionParser', 'optparse.OptionParser', ([], {'usage': '"""%prog: [args]"""'}), "(usage='%prog: [args]')\n", (1831, 1854), False, 'import optparse\n'), ((1608, 1658), 'pandas.DataFrame', 'pd.DataFrame', (['[[1, 2], [3, 4]]'], {'columns': "['A', 'B']"}), "([[1, 2], [3, 4]], columns=['A', 'B'])\... |
import brainrender
brainrender.SHADER_STYLE = 'cartoon'
from brainrender.scene import Scene
sharptrack_file = 'Examples/example_files/sharptrack_probe_points.mat'
scene = Scene(use_default_key_bindings=True)
scene.add_brain_regions('TH', alpha=.2, wireframe=True)
scene.add_probe_from_sharptrack(sharptrack_file)
s... | [
"brainrender.scene.Scene"
] | [((174, 210), 'brainrender.scene.Scene', 'Scene', ([], {'use_default_key_bindings': '(True)'}), '(use_default_key_bindings=True)\n', (179, 210), False, 'from brainrender.scene import Scene\n')] |
# Import Modules
import os
import csv
# Set the path
filepath = os.path.join("Resources","budget_data.csv")
# Open the CSV file
with open(filepath) as csvfile:
csvreader = csv.reader(csvfile, delimiter=",")
# Skip the header row
next(csvreader)
# Set up some numbers
month = 0
total = 0
max... | [
"os.path.join",
"csv.reader"
] | [((65, 109), 'os.path.join', 'os.path.join', (['"""Resources"""', '"""budget_data.csv"""'], {}), "('Resources', 'budget_data.csv')\n", (77, 109), False, 'import os\n'), ((178, 212), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (188, 212), False, 'import csv\n')] |
#!/usr/bin/python
import sys
import argparse
sys.path.append('./')
from src.utils.list_to_dict import list_to_dict
from src.utils.read_csv import read_csv
from src.utils.map_to_list_csv import map_to_list_csv
from src.gephi.write_csv import write_csv
print("")
print("-----------------------------")
print("Anonymiz... | [
"argparse.ArgumentParser",
"src.utils.read_csv.read_csv",
"src.gephi.write_csv.write_csv",
"src.utils.list_to_dict.list_to_dict",
"sys.path.append"
] | [((47, 68), 'sys.path.append', 'sys.path.append', (['"""./"""'], {}), "('./')\n", (62, 68), False, 'import sys\n'), ((394, 466), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Anonymizes a given attributes CSV"""'}), "(description='Anonymizes a given attributes CSV')\n", (417, 466), Fals... |
from sklearn.cluster import KMeans
import cv2
import PIL
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
from matplotlib import image as img1
import pandas as pd
from scipy.cluster.vq import whiten
import os
class DominantColors:
CLUSTERS = None
IMAGEPATH = None
I... | [
"sklearn.cluster.KMeans",
"matplotlib.pyplot.imshow",
"numpy.histogram",
"scipy.cluster.vq.kmeans",
"os.listdir",
"cv2.resize",
"numpy.arange",
"matplotlib.image.imread",
"scipy.cluster.vq.whiten",
"matplotlib.pyplot.figure",
"numpy.zeros",
"cv2.cvtColor",
"pandas.DataFrame",
"matplotlib.p... | [((564, 590), 'cv2.imread', 'cv2.imread', (['self.IMAGEPATH'], {}), '(self.IMAGEPATH)\n', (574, 590), False, 'import cv2\n'), ((766, 827), 'cv2.resize', 'cv2.resize', (['img', '(self.BASEWIDTH, hsize)', 'PIL.Image.ANTIALIAS'], {}), '(img, (self.BASEWIDTH, hsize), PIL.Image.ANTIALIAS)\n', (776, 827), False, 'import cv2\... |
import torch
from torch import nn
import numpy as np
from models.AttentionLayer import AttentionLayer
from models.SelfAttentionLayer import SelfAttention, SelfAttentionPytorch,\
BertSelfAttentionScores, BertSelfAttentionScoresP, BertMultiSelfAttentionScoresP,\
BertMultiAttentionScoresP, BertAttentionClsQuery
fr... | [
"models.SelfAttentionLayer.BertAttentionClsQuery",
"models.SelfAttentionLayer.SelfAttentionPytorch",
"torch.multinomial",
"numpy.ones",
"torch.nn.Softmax",
"models.SelfAttentionLayer.BertMultiAttentionScoresP",
"models.AttentionLayer.AttentionLayer",
"models.SelfAttentionLayer.BertMultiSelfAttentionSc... | [((12737, 12793), 'torch.tensor', 'torch.tensor', (['embraced_features_token'], {'dtype': 'torch.float'}), '(embraced_features_token, dtype=torch.float)\n', (12749, 12793), False, 'import torch\n'), ((800, 831), 'models.SelfAttentionLayer.SelfAttention', 'SelfAttention', (['self.hidden_size'], {}), '(self.hidden_size)\... |
###############################################################################
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this fi... | [
"logging.getLogger",
"eccodes.codes_release",
"csv2bufr.validate_mapping",
"csv2bufr.apply_scaling",
"csv2bufr.validate_value",
"eccodes.codes_bufr_new_from_samples",
"csv2bufr.transform",
"io.StringIO"
] | [((1204, 1231), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1221, 1231), False, 'import logging\n'), ((5262, 5298), 'eccodes.codes_bufr_new_from_samples', 'codes_bufr_new_from_samples', (['"""BUFR4"""'], {}), "('BUFR4')\n", (5289, 5298), False, 'from eccodes import codes_bufr_new_from... |
from django.urls import path
from flights import views
urlpatterns = [path("", views.index)]
| [
"django.urls.path"
] | [((71, 92), 'django.urls.path', 'path', (['""""""', 'views.index'], {}), "('', views.index)\n", (75, 92), False, 'from django.urls import path\n')] |
import sys
from setuptools import setup, find_packages
def get_version(fname):
import re
verstrline = open(fname, "rt").read()
mo = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError("Unable to find version string in... | [
"setuptools.find_packages",
"re.search"
] | [((145, 216), 're.search', 're.search', (['"""^__version__ = [\'\\\\"]([^\'\\\\"]*)[\'\\\\"]"""', 'verstrline', 're.M'], {}), '(\'^__version__ = [\\\'\\\\"]([^\\\'\\\\"]*)[\\\'\\\\"]\', verstrline, re.M)\n', (154, 216), False, 'import re\n'), ((982, 997), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (... |
# -*- coding: utf-8 -*-
"""Usage:
acli ec2 (ls | list | summary) [options] [--region=<region>]
acli ec2 (start | stop | reboot | terminate | info | cpu | vols | net) <instance_id> [options]
-f, --filter=<term> filter results by term
-s, --start=<start_date> metrics start-date
-e,... | [
"acli.services.ec2.ec2_summary",
"docopt.docopt"
] | [((696, 722), 'docopt.docopt', 'docopt', (['__doc__'], {'argv': 'argv'}), '(__doc__, argv=argv)\n', (702, 722), False, 'from docopt import docopt\n'), ((2400, 2415), 'docopt.docopt', 'docopt', (['__doc__'], {}), '(__doc__)\n', (2406, 2415), False, 'from docopt import docopt\n'), ((2323, 2361), 'acli.services.ec2.ec2_su... |
import json
from requests import ConnectionError
from config import *
from utils import *
from get_auth import TOKEN
# Create network
create_network_url = "http://{}:9696/v2.0/networks".format(IP)
token_headers = {
'X-Auth-Token': TOKEN,
'Content-Type': 'application/json'
}
# Create router
create_router_url ... | [
"json.loads",
"json.JSONEncoder"
] | [((547, 565), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (557, 565), False, 'import json\n'), ((877, 895), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (887, 895), False, 'import json\n'), ((1936, 1954), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (1946, 1954), False, 'im... |
import os
import sys
import signal
import asyncio
import json
import time
import traceback
import typing
import socket
import re
import select
import websockets
if sys.platform != "win32":
import termios
import tty
else:
import msvcrt
import win32api
from .. import api
from ..shared import constants, ... | [
"select.select",
"sys.stdin.fileno",
"json.loads",
"asyncio.sleep",
"json.dumps",
"msvcrt.getch",
"traceback.print_exc",
"websockets.connect",
"win32api.SetConsoleCtrlHandler",
"os.getpid",
"asyncio.gather",
"re.sub",
"asyncio.get_event_loop",
"time.time",
"asyncio.create_task"
] | [((1409, 1456), 'win32api.SetConsoleCtrlHandler', 'win32api.SetConsoleCtrlHandler', (['ui.ctrl_c', '(True)'], {}), '(ui.ctrl_c, True)\n', (1439, 1456), False, 'import win32api\n'), ((1776, 1800), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1798, 1800), False, 'import asyncio\n'), ((11615, 116... |
""" Functions for ionospheric modelling: see SDP memo 97
"""
import astropy.units as u
import numpy
from astropy.coordinates import SkyCoord
from data_models.memory_data_models import BlockVisibility
from processing_components.calibration.operations import create_gaintable_from_blockvisibility, \
create_gaintabl... | [
"logging.getLogger",
"matplotlib.pyplot.ylabel",
"processing_components.calibration.operations.create_gaintable_from_blockvisibility",
"numpy.array",
"processing_components.calibration.operations.create_gaintable_from_rows",
"processing_library.util.coordinate_support.skycoord_to_lmn",
"matplotlib.pyplo... | [((757, 784), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (774, 784), False, 'import logging\n'), ((1160, 1215), 'astropy.coordinates.SkyCoord', 'SkyCoord', ([], {'ra': 'ha', 'dec': 'dec', 'frame': '"""icrs"""', 'equinox': '"""J2000"""'}), "(ra=ha, dec=dec, frame='icrs', equinox='J2000... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import collections
from simpysql.Util.Expression import expression as expr, Expression
from simpysql.Util.Response import Response
from .BaseBuilder import BaseBuilder
from simpysql.Util.Dynamic import Dynamic
class SqlServerBuilder(BaseBuilder):
operators = [
... | [
"simpysql.Util.Dynamic.Dynamic",
"simpysql.Util.Expression.expression.list_to_str",
"simpysql.Util.Expression.expression.format_column",
"collections.defaultdict",
"simpysql.Util.Expression.Expression",
"simpysql.Util.Expression.expression.format_string"
] | [((1927, 1941), 'simpysql.Util.Dynamic.Dynamic', 'Dynamic', (['index'], {}), '(index)\n', (1934, 1941), False, 'from simpysql.Util.Dynamic import Dynamic\n'), ((4136, 4165), 'collections.defaultdict', 'collections.defaultdict', (['dict'], {}), '(dict)\n', (4159, 4165), False, 'import collections\n'), ((4482, 4511), 'co... |
import json
import logging
from pathlib import Path
from hermes.common.lex_utils import success, error
logger = logging.getLogger(__name__)
script_path = Path.cwd().joinpath('hermes/help/script.json')
with script_path.open() as f: script = json.load(f)
def handler(event, context):
help_text = '\n'.join(script['... | [
"logging.getLogger",
"pathlib.Path.cwd",
"json.dumps",
"hermes.common.lex_utils.success",
"json.load"
] | [((113, 140), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (130, 140), False, 'import logging\n'), ((242, 254), 'json.load', 'json.load', (['f'], {}), '(f)\n', (251, 254), False, 'import json\n'), ((345, 371), 'hermes.common.lex_utils.success', 'success', ([], {'message': 'help_text'}),... |
from typing import Optional, Sequence
import torch
from ...gpu import Device
from ...models.encoders import EncoderFactory
from ...models.optimizers import OptimizerFactory
from ...models.q_functions import QFunctionFactory
from ...preprocessing import ActionScaler, RewardScaler, Scaler
from ...torch_utility import T... | [
"torch.no_grad",
"torch.randn"
] | [((2284, 2299), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2297, 2299), False, 'import torch\n'), ((2416, 2462), 'torch.randn', 'torch.randn', (['action.shape'], {'device': 'batch.device'}), '(action.shape, device=batch.device)\n', (2427, 2462), False, 'import torch\n')] |
from time import sleep
from igata.predictors import PredictorBase
class DummyPredictorNoInputNoOutput(PredictorBase):
def predict(self, inputs, meta):
result = {"result": 0.222, "class": "car", "is_valid": True}
return result
class DummyPredictorNoInputNoOutputVariableOutput(PredictorBase):
... | [
"time.sleep"
] | [((894, 903), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (899, 903), False, 'from time import sleep\n')] |
# This is a collection of very short demo-plugins to illustrate how
# to create and register hooks into the various parts of Mailpile
#
# To start creating a new plugin, it may make sense to copy this file,
# globally search/replace the word "Demo" with your preferred plugin
# name and then go delete sections you aren'... | [
"mailpile.util.md5_hex",
"gettext.gettext"
] | [((1377, 1395), 'gettext.gettext', '_', (['"""Demo Contacts"""'], {}), "('Demo Contacts')\n", (1378, 1395), True, 'from gettext import gettext as _\n'), ((1420, 1450), 'gettext.gettext', '_', (['"""This is the demo importer"""'], {}), "('This is the demo importer')\n", (1421, 1450), True, 'from gettext import gettext a... |
import concurrent.futures
import logging
from logging import StreamHandler
import time
import timeit
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
logger = logging.getLogger(__name__)
def do_something(wait_time):
logger.info("Waiting for %d sec... | [
"logging.basicConfig",
"timeit.default_timer",
"logging.getLogger",
"time.sleep"
] | [((103, 210), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'}), "(level=logging.INFO, format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (122, 210), False, 'import logging\n'), ((226, 253), 'loggin... |
# $Filename$
# $Authors$
# Last Changed: $Date$ $Committer$ $Revision-Id$
#
# Copyright (c) 2003-2011, German Aerospace Center (DLR)
#
# All rights reserved.
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are
#
#met:
#
... | [
"datetime.datetime",
"datafinder.persistence.metadata.value_mapping.getPersistenceRepresentation",
"datafinder.persistence.metadata.value_mapping.MetadataValue",
"unicodedata.lookup",
"decimal.Decimal"
] | [((2093, 2150), 'unicodedata.lookup', 'unicodedata.lookup', (['"""LATIN SMALL LETTER A WITH DIAERESIS"""'], {}), "('LATIN SMALL LETTER A WITH DIAERESIS')\n", (2111, 2150), False, 'import unicodedata\n'), ((3779, 3820), 'datafinder.persistence.metadata.value_mapping.MetadataValue', 'MetadataValue', (['"""0"""'], {'expec... |
import random
from .api import put_change_grade
# grades[id] = grade for user #{id}.
# grades[0] is not used. Since user id starts from 1.
def change_grade_randomshuffle(grades):
changed_users_id = set(range(len(grades)))
changed_users_id.remove(0)
grades = list(range(len(grades)))
random.shuffle(g... | [
"random.shuffle"
] | [((304, 326), 'random.shuffle', 'random.shuffle', (['grades'], {}), '(grades)\n', (318, 326), False, 'import random\n')] |
import numpy as np
import matplotlib.pyplot as plt
N = 4
ind = np.arange(N) # the x locations for the groups
width = 0.4 # the width of the bars
fig, ax = plt.subplots()
ax.set_ylim(0,11) # outliers only
#ax2.set_ylim(0,35) # most of the data
#ax.spines['bottom'].set_visible(False)
#ax2.spines['top'].set_vi... | [
"matplotlib.pyplot.subplots",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((66, 78), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (75, 78), True, 'import numpy as np\n'), ((165, 179), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (177, 179), True, 'import matplotlib.pyplot as plt\n'), ((2178, 2188), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2186, 2188)... |
from datetime import timedelta
from random import sample, randint
import talktracker as tt
def time_diff(time1, time2):
"""calculate the time different"""
time1_info = timedelta(hours=time1[0], minutes=time1[1], seconds=time1[2])
time2_info = timedelta(hours=time2[0], minutes=time2[1], seconds=time2[2])
... | [
"random.sample",
"talktracker.Team",
"talktracker.Member",
"datetime.timedelta",
"random.randint",
"talktracker.Session"
] | [((178, 239), 'datetime.timedelta', 'timedelta', ([], {'hours': 'time1[0]', 'minutes': 'time1[1]', 'seconds': 'time1[2]'}), '(hours=time1[0], minutes=time1[1], seconds=time1[2])\n', (187, 239), False, 'from datetime import timedelta\n'), ((257, 318), 'datetime.timedelta', 'timedelta', ([], {'hours': 'time2[0]', 'minute... |
"""A simple text editor made in Python 2.7."""
from os import path, chdir
workingdir = path.join(path.dirname(__file__), 'texts')
chdir(workingdir)
from Tkinter import Tk, Text, Button
import tkFileDialog
root = Tk("Text Editor")
text = Text(root)
text.grid()
def saveas():
"""Save file."""
tr... | [
"Tkinter.Tk",
"Tkinter.Button",
"Tkinter.Text",
"os.chdir",
"os.path.dirname",
"tkFileDialog.asksaveasfilename",
"tkFileDialog.askopenfilename"
] | [((135, 152), 'os.chdir', 'chdir', (['workingdir'], {}), '(workingdir)\n', (140, 152), False, 'from os import path, chdir\n'), ((222, 239), 'Tkinter.Tk', 'Tk', (['"""Text Editor"""'], {}), "('Text Editor')\n", (224, 239), False, 'from Tkinter import Tk, Text, Button\n'), ((250, 260), 'Tkinter.Text', 'Text', (['root'], ... |
import sys
n, k= map(int, sys.stdin.readline().split())
def power(a, b):
if b == 0:
return 1
if b % 2:
return (power(a, b//2) ** 2 * a) % P
else:
return (power(a, b//2) ** 2) % P
P = 1000000007
f = [1 for _ in range(n + 1)]
for i in range(2, n + 1):
f[i] = (f[i - 1] * i) % P
A = f[n]
B = (f[n-k]*f[k])%P
p... | [
"sys.stdin.readline"
] | [((26, 46), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (44, 46), False, 'import sys\n')] |
from modem_base import Modem
from network import LTE
import logging
class ModemSequans(Modem):
def __init__(self):
self.lte = LTE()
def power_on(self):
self.lte.init()
def power_off(self):
self.lte.deinit(dettach=True, reset=True)
def init(self):
return True
def... | [
"network.PPP",
"network.LTE",
"logging.debug"
] | [((140, 145), 'network.LTE', 'LTE', ([], {}), '()\n', (143, 145), False, 'from network import LTE\n'), ((527, 549), 'network.PPP', 'network.PPP', (['self.uart'], {}), '(self.uart)\n', (538, 549), False, 'import network\n'), ((1558, 1580), 'logging.debug', 'logging.debug', (['command'], {}), '(command)\n', (1571, 1580),... |
from ravendb.exceptions.exceptions import NonUniqueObjectException, InvalidOperationException
from ravendb.tests.test_base import UserWithId, TestBase
class TestTrackEntity(TestBase):
def setUp(self):
super(TestTrackEntity, self).setUp()
def test_storing_document_with_the_same_id_in_the_same_session_... | [
"ravendb.tests.test_base.UserWithId"
] | [((410, 446), 'ravendb.tests.test_base.UserWithId', 'UserWithId', (['"""User1"""', 'None', '"""users/1"""'], {}), "('User1', None, 'users/1')\n", (420, 446), False, 'from ravendb.tests.test_base import UserWithId, TestBase\n'), ((538, 574), 'ravendb.tests.test_base.UserWithId', 'UserWithId', (['"""User2"""', 'None', '"... |
# Copyright (c) Meta Platforms, Inc
import os
import sys
from setuptools import find_packages, setup
REQUIRED_MAJOR = 3
REQUIRED_MINOR = 7
TEST_REQUIRES = ["numpy", "pytest", "pytest-cov", "scipy"]
DEV_REQUIRES = TEST_REQUIRES + [
"black",
"flake8",
"flake8-bugbear",
"mypy",
"toml",
"usort"... | [
"setuptools.find_packages",
"os.path.join",
"sys.exit"
] | [((729, 744), 'sys.exit', 'sys.exit', (['error'], {}), '(error)\n', (737, 744), False, 'import sys\n'), ((2096, 2213), 'setuptools.find_packages', 'find_packages', ([], {'include': "['flowtorch', 'flowtorch.*']", 'exclude': "['debug', 'tests', 'website', 'examples', 'scripts']"}), "(include=['flowtorch', 'flowtorch.*']... |
from flask import Blueprint
bp = Blueprint('db_analysis',
__name__,
template_folder='templates',
static_folder='static',
static_url_path='/db_analysis/static'
)
from retrobiocat_web.app.db_analysis.routes import bioinformatics, ssn
| [
"flask.Blueprint"
] | [((34, 164), 'flask.Blueprint', 'Blueprint', (['"""db_analysis"""', '__name__'], {'template_folder': '"""templates"""', 'static_folder': '"""static"""', 'static_url_path': '"""/db_analysis/static"""'}), "('db_analysis', __name__, template_folder='templates',\n static_folder='static', static_url_path='/db_analysis/st... |
#!/usr/bin/env python3.9
"""Tasks file used by the *invoke* command.
This simplifies some common development tasks.
Run these tasks with the `invoke` tool.
"""
from __future__ import annotations
import sys
import os
import shutil
import getpass
from glob import glob
from pathlib import Path
import keyring
import s... | [
"semver.VersionInfo",
"keyring.get_credential",
"pathlib.Path",
"keyring.set_password",
"pathlib.Path.cwd",
"os.environ.get",
"getpass.getpass",
"invoke.run",
"setuptools_scm.get_version",
"os.path.isdir",
"invoke.Exit",
"os.mkdir",
"shutil.rmtree",
"getpass.getuser",
"semver.parse_versi... | [((434, 477), 'os.environ.get', 'os.environ.get', (['"""PYTHONBIN"""', 'sys.executable'], {}), "('PYTHONBIN', sys.executable)\n", (448, 477), False, 'import os\n'), ((594, 611), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (609, 611), False, 'import getpass\n'), ((2659, 2687), 'invoke.task', 'task', ([], {'p... |
from Jumpscale import j
from io import BytesIO
import binascii
def main(self):
"""
kosmos -p 'j.servers.gedis.test("threebot_redis_registration")'
"""
####THREEBOT REGISTRATION
phonebook = j.threebot.package.phonebook.client_get()
if j.sal.nettools.tcpPortConnectionTest("www.google.com", 44... | [
"Jumpscale.j.data.nacl.configure",
"Jumpscale.j.clients.redis.get",
"binascii.hexlify",
"io.BytesIO",
"Jumpscale.j.data.nacl.payload_verify",
"Jumpscale.j.threebot.package.phonebook.client_get",
"Jumpscale.j.data.serializers.json.loads",
"Jumpscale.j.data.serializers.json.dumps",
"Jumpscale.j.sal.ne... | [((213, 254), 'Jumpscale.j.threebot.package.phonebook.client_get', 'j.threebot.package.phonebook.client_get', ([], {}), '()\n', (252, 254), False, 'from Jumpscale import j\n'), ((263, 322), 'Jumpscale.j.sal.nettools.tcpPortConnectionTest', 'j.sal.nettools.tcpPortConnectionTest', (['"""www.google.com"""', '(443)'], {}),... |
#!/usr/bin/env python2
# -*- coding:utf-8 -*-
import os
import argparse
import glob
from functools import partial
import fontforge
import psMat
import source
opt_parser= argparse.ArgumentParser()
opt_parser.add_argument("--cjkv_info", type= str,
help= u"the path of cjkv_info")
opt_parser.add_argument("--region"... | [
"argparse.ArgumentParser",
"os.path.join",
"os.path.splitext",
"psMat.translate",
"fontforge.font",
"os.path.dirname",
"functools.partial",
"os.path.basename",
"os.mkdir"
] | [((173, 198), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (196, 198), False, 'import argparse\n'), ((2445, 2461), 'fontforge.font', 'fontforge.font', ([], {}), '()\n', (2459, 2461), False, 'import fontforge\n'), ((1716, 1738), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n... |
import logging
import pandas as pd
from homeassistant.components.alarm_control_panel import (
AlarmControlPanel
)
from homeassistant.core import callback
from homeassistant.util import convert
from .ringalarmdevice import RingAlarmDevice
from .constants import *
from homeassistant.const import (
STATE_ALARM_... | [
"logging.getLogger"
] | [((398, 425), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (415, 425), False, 'import logging\n')] |
from sys import stdin, stdout, stderr
import traceback
import time
from player import Player
from field.field import Field
class Game:
def __init__(self):
self.time_per_move = -1
self.timebank = -1
self.last_update = None
self.max_rounds = -1
self.round = 0
self.pl... | [
"field.field.Field",
"time.clock",
"player.Player",
"sys.stderr.flush",
"time.sleep",
"sys.stdin.readline",
"sys.stdout.flush",
"traceback.print_exc",
"time.time"
] | [((435, 442), 'field.field.Field', 'Field', ([], {}), '()\n', (440, 442), False, 'from field.field import Field\n'), ((521, 532), 'time.time', 'time.time', ([], {}), '()\n', (530, 532), False, 'import time\n'), ((3155, 3169), 'sys.stdout.flush', 'stdout.flush', ([], {}), '()\n', (3167, 3169), False, 'from sys import st... |
import click
import random
from pyfiglet import Figlet
from termcolor import colored, cprint
import imagenet
@click.command()
@click.option("--count", default=10, help="Yield number of codenames.")
def codename_gen(count):
"""Enjoy the codenames 🍺"""
imagenet_cls = imagenet.imagenet1000_labels()
f = Figle... | [
"random.choice",
"click.option",
"pyfiglet.Figlet",
"imagenet.imagenet1000_labels",
"click.command",
"termcolor.cprint"
] | [((111, 126), 'click.command', 'click.command', ([], {}), '()\n', (124, 126), False, 'import click\n'), ((128, 198), 'click.option', 'click.option', (['"""--count"""'], {'default': '(10)', 'help': '"""Yield number of codenames."""'}), "('--count', default=10, help='Yield number of codenames.')\n", (140, 198), False, 'i... |
import logging
import json
from abc import ABCMeta, abstractmethod
from django.contrib import auth
from django.contrib.auth import update_session_auth_hash, password_validation
from django.contrib.auth.tokens import default_token_generator
from django.core.exceptions import ValidationError, PermissionDenied
from djang... | [
"logging.getLogger",
"binder.exceptions.BinderForbidden",
"binder.exceptions.BinderNotFound",
"django.http.HttpResponse",
"binder.exceptions.BinderMethodNotAllowed",
"binder.router.detail_route",
"django.contrib.auth.logout",
"binder.json.JsonResponse",
"django.contrib.auth.authenticate",
"json.lo... | [((1007, 1034), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1024, 1034), False, 'import logging\n'), ((1345, 1376), 'binder.router.detail_route', 'detail_route', ([], {'name': '"""masquerade"""'}), "(name='masquerade')\n", (1357, 1376), False, 'from binder.router import list_route, de... |
#+ echo=False
import numpy
from biobakery_workflows import utilities, visualizations, files
from anadama2 import PweaveDocument
document=PweaveDocument()
# get the variables for this document generation task
vars = document.get_vars()
# determine the document format
pdf_format = True if vars["format"] == "pdf" ... | [
"biobakery_workflows.files.ShotGunVis.path",
"numpy.transpose",
"anadama2.PweaveDocument",
"biobakery_workflows.utilities.microbial_read_proportion_multiple_databases",
"biobakery_workflows.visualizations.qc_read_counts"
] | [((141, 157), 'anadama2.PweaveDocument', 'PweaveDocument', ([], {}), '()\n', (155, 157), False, 'from anadama2 import PweaveDocument\n'), ((450, 514), 'biobakery_workflows.visualizations.qc_read_counts', 'visualizations.qc_read_counts', (['document', "vars['dna_read_counts']"], {}), "(document, vars['dna_read_counts'])... |
# Code behind module for Shapefile_Demo.ipynb
################################
##
## Import Statments
##
################################
# Import standard Python modules
import sys
import datacube
import numpy as np
import fiona
import xarray as xr
from rasterio.features import geometry_mask
import shapely
from sha... | [
"shapely.ops.transform",
"fiona.open",
"pyproj.Proj",
"shapely.geometry.shape",
"rasterio.features.geometry_mask"
] | [((901, 927), 'fiona.open', 'fiona.open', (['shapefile', '"""r"""'], {}), "(shapefile, 'r')\n", (911, 927), False, 'import fiona\n'), ((1422, 1531), 'rasterio.features.geometry_mask', 'geometry_mask', (['geometries'], {'out_shape': 'geobox.shape', 'transform': 'geobox.affine', 'all_touched': '(True)', 'invert': '(True)... |
'''
This example provides three examples of a simple plot of 1-D data.
1. a publication-ready single column figure, which is printed to png (600 dpi), pdf, and svg
2. a presentation-ready figure on a black background
Four steps are involved in each figure:
- load/generate the data
- construct a 1d plot (figure, axis,... | [
"jalapeno.plots.plots.print_fig",
"jalapeno.plots.colorscheme.FigColors.scheme",
"jalapeno.plots.plots.SquareFigure",
"numpy.linspace",
"numpy.cos",
"jalapeno.plots.plots.print_fig_to_pdf"
] | [((573, 603), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(600)'], {}), '(0, 2 * np.pi, 600)\n', (584, 603), True, 'import numpy as np\n'), ((1024, 1096), 'jalapeno.plots.plots.print_fig', 'jpp.print_fig', (['fig', '"""xy-for-publication"""', "['pdf', 'png', 'svg']"], {'dpi': '(600)'}), "(fig, 'xy-for-pub... |