code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license... | [
"json.loads"
] | [((2019, 2032), 'json.loads', 'json.loads', (['s'], {}), '(s)\n', (2029, 2032), False, 'import json\n')] |
import os.path as osp
import sys
import subprocess
subprocess.call(['pip', 'install', 'cvbase'])
import cvbase as cvb
import torch
from torch.autograd import gradcheck
sys.path.append(osp.abspath(osp.join(__file__, '../../')))
from biupdownsample import biupsample_naive, BiupsampleNaive
from biupdownsample import bid... | [
"biupdownsample.BidownsampleNaive",
"os.path.join",
"torch.cuda.synchronize",
"cvbase.Timer",
"biupdownsample.BiupsampleNaive",
"subprocess.call",
"torch.randn",
"cvbase.ProgressBar"
] | [((52, 97), 'subprocess.call', 'subprocess.call', (["['pip', 'install', 'cvbase']"], {}), "(['pip', 'install', 'cvbase'])\n", (67, 97), False, 'import subprocess\n'), ((918, 943), 'cvbase.ProgressBar', 'cvb.ProgressBar', (['loop_num'], {}), '(loop_num)\n', (933, 943), True, 'import cvbase as cvb\n'), ((952, 963), 'cvba... |
from django.template.loader import render_to_string
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from wagtail.admin.edit_handlers import EditHandler
class FormSubmissionsPanel(EditHandler):
template = "wagtailforms/edit_handlers/form_responses_panel.html"
... | [
"django.utils.translation.ugettext"
] | [((454, 473), 'django.utils.translation.ugettext', '_', (['"""{} submissions"""'], {}), "('{} submissions')\n", (455, 473), True, 'from django.utils.translation import ugettext as _\n')] |
# -*- coding: utf-8 -*-
"""
Extension that adjust project file tree to include a namespace package.
This extension adds a **namespace** option to
:obj:`~pyscaffold.api.create_project` and provides correct values for the
options **root_pkg** and **namespace_pkg** to the following functions in the
action list.
"""
impo... | [
"os.path.isdir",
"os.path.join"
] | [((4027, 4077), 'os.path.join', 'join_path', (["opts['project']", '"""src"""', "opts['package']"], {}), "(opts['project'], 'src', opts['package'])\n", (4036, 4077), True, 'from os.path import join as join_path\n'), ((4150, 4199), 'os.path.join', 'join_path', (["opts['project']", '"""src"""', 'namespace_path'], {}), "(o... |
# encoding: utf-8
from mock import call, patch
from django.template import Template, Context
from django.test import TestCase
from core.models import MockModel
@patch("haystack.templatetags.more_like_this.SearchQuerySet")
class MoreLikeThisTagTestCase(TestCase):
def render(self, template, context):
# Wh... | [
"core.models.MockModel.objects.get",
"mock.patch",
"django.template.Template",
"django.template.Context",
"mock.call"
] | [((165, 225), 'mock.patch', 'patch', (['"""haystack.templatetags.more_like_this.SearchQuerySet"""'], {}), "('haystack.templatetags.more_like_this.SearchQuerySet')\n", (170, 225), False, 'from mock import call, patch\n'), ((388, 406), 'django.template.Template', 'Template', (['template'], {}), '(template)\n', (396, 406)... |
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
import editdistance
import os
impo... | [
"fairseq.data.Dictionary.load",
"os.path.join",
"torch.scalar_tensor",
"fairseq.data.FileAudioDataset",
"fairseq.data.encoders.build_tokenizer",
"fairseq.data.data_utils.post_process",
"editdistance.eval"
] | [((3879, 4176), 'fairseq.data.FileAudioDataset', 'FileAudioDataset', (['manifest'], {'sample_rate': 'self.args.sample_rate', 'max_sample_size': 'self.args.max_sample_size', 'min_sample_size': 'self.args.max_sample_size', 'min_length': 'self.args.min_sample_size', 'pad': '(self.args.labels is not None or self.args.enabl... |
"""
All your views aka. your template endpoints go here.
There are two ways to create a view.
1. Create a new Subclass inheriting from one of the flask_template_master views
2. Use the view-factory function flask_template_master.views.create_template_endpoint
Each view requires an 1 (and 2 optional) things:
1. An envi... | [
"flask_template_master.compiler.LatexCompiler",
"flask_template_master.views.create_template_endpoint",
"flask_template_master.global_provider.DictGlobalProvider",
"flask_template_master.Api",
"jinja2.DictLoader",
"jinja2.FileSystemLoader"
] | [((1086, 1091), 'flask_template_master.Api', 'Api', ([], {}), '()\n', (1089, 1091), False, 'from flask_template_master import Api\n'), ((2296, 2311), 'flask_template_master.compiler.LatexCompiler', 'LatexCompiler', ([], {}), '()\n', (2309, 2311), False, 'from flask_template_master.compiler import LatexCompiler\n'), ((2... |
import math
import numpy as np
import torch
import torch.nn.functional as F
from torch import nn
class SimpleMLP(nn.Module):
"""Simple MLP function approximator for Q-Learning."""
def __init__(self, in_dim, out_dim, hidden_units=256, num_hidden_layers=1):
super().__init__()
self.input_layer... | [
"torch.nn.functional.linear",
"numpy.prod",
"torch.nn.functional.softmax",
"torch.nn.ReLU",
"math.sqrt",
"torch.sum",
"torch.nn.Linear",
"torch.empty",
"torch.randn",
"torch.flatten"
] | [((580, 612), 'torch.nn.Linear', 'nn.Linear', (['hidden_units', 'out_dim'], {}), '(hidden_units, out_dim)\n', (589, 612), False, 'from torch import nn\n'), ((1877, 1894), 'torch.randn', 'torch.randn', (['size'], {}), '(size)\n', (1888, 1894), False, 'import torch\n'), ((3022, 3037), 'numpy.prod', 'np.prod', (['in_dim']... |
import torch
from torch import nn
from torch.nn import functional as F
class BicubicDownSample(nn.Module):
def bicubic_kernel(self, x, a=-0.50):
"""
This equation is exactly copied from the website below:
https://clouard.users.greyc.fr/Pantheon/experiments/rescaling/index-en.html#bicubic
... | [
"torch.nn.functional.conv2d",
"torch.abs",
"torch.pow",
"torch.transpose",
"torch.tensor",
"torch.round",
"torch.sum",
"torch.nn.functional.pad",
"torch.reshape",
"torch.cat"
] | [((347, 359), 'torch.abs', 'torch.abs', (['x'], {}), '(x)\n', (356, 359), False, 'import torch\n'), ((1070, 1109), 'torch.reshape', 'torch.reshape', (['k'], {'shape': '(1, 1, size, 1)'}), '(k, shape=(1, 1, size, 1))\n', (1083, 1109), False, 'import torch\n'), ((1128, 1158), 'torch.cat', 'torch.cat', (['[k1, k1, k1]'], ... |
import nose2.tools
from typing import Union
from app.util import has_attributes
class SampleClass:
pass
class TestUtil:
@nose2.tools.params(
('SET_VALUE', True),
(None, False),
('NO_ATTRIBUTE', False),
(False, True),
('', True),
(0, True),
)
def test... | [
"app.util.has_attributes"
] | [((522, 549), 'app.util.has_attributes', 'has_attributes', (['obj', '"""attr"""'], {}), "(obj, 'attr')\n", (536, 549), False, 'from app.util import has_attributes\n')] |
# Copyright 2022 by Autodesk, Inc.
# Permission to use, copy, modify, and distribute this software in object code form
# for any purpose and without fee is hereby granted, provided that the above copyright
# notice appears in all copies and that both that copyright notice and the limited
# warranty and restricted ... | [
"dataclasses.field"
] | [((1312, 1343), 'dataclasses.field', 'field', ([], {'repr': '(False)', 'default': 'None'}), '(repr=False, default=None)\n', (1317, 1343), False, 'from dataclasses import dataclass, field\n'), ((1473, 1502), 'dataclasses.field', 'field', ([], {'repr': '(False)', 'init': '(False)'}), '(repr=False, init=False)\n', (1478, ... |
"""
This process creates the two kafka topics to be used.
The input-topic with ten partitions and the output-topic with one partition.
Also preloads the kafka cluster with test data (if flag is set to true).
"""
import os
import time
import json
import logging
from confluent_kafka.admin import AdminClient, NewTopic
fro... | [
"logging.getLogger",
"confluent_kafka.admin.AdminClient",
"os.environ.get",
"os.path.join",
"time.sleep",
"confluent_kafka.Producer",
"os.path.dirname",
"json.load",
"confluent_kafka.admin.NewTopic",
"logging.info"
] | [((382, 401), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (399, 401), False, 'import logging\n'), ((516, 573), 'os.environ.get', 'os.environ.get', (['"""KAFKA_CLUSTER_CONNECT"""', '"""localhost:9092"""'], {}), "('KAFKA_CLUSTER_CONNECT', 'localhost:9092')\n", (530, 573), False, 'import os\n'), ((586, 627... |
#!/usr/bin/python3
import os
import time
import sys
os.system("clear")
print('''\033[91m
CREATED BY Hironotori
''')
def slowprint(s):
for c in s + '\n' :
sys.stdout.write(c)
sys.stdout.flush()
slowprint(''' \033[93m
[1] apt-pkg pip-pip3 [2] apt-pkg python
[3] apt-pkg python2 [4] ... | [
"sys.stdout.write",
"os.system",
"sys.stdout.flush",
"sys.exit"
] | [((53, 71), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (62, 71), False, 'import os\n'), ((1214, 1238), 'os.system', 'os.system', (['"""pkg install"""'], {}), "('pkg install')\n", (1223, 1238), False, 'import os\n'), ((1240, 1264), 'os.system', 'os.system', (['"""pkg upgrade"""'], {}), "('pkg upgrad... |
import pygame
import random
pygame.init()
pygame.font.init()
class Card(object):
""" The Card Class """
def __init__(self, left, top, width, height,
back_color, front_color, solved_color,
display,
font_color, text_font, value=None):
self._rect = pyg... | [
"random.choice",
"pygame.mouse.get_pressed",
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.time.wait",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.mouse.get_pos",
"pygame.Rect",
"pygame.draw.rect",
"pygame.font.init",
"pygame.display.set_caption",
"pygame.Color",
... | [((29, 42), 'pygame.init', 'pygame.init', ([], {}), '()\n', (40, 42), False, 'import pygame\n'), ((43, 61), 'pygame.font.init', 'pygame.font.init', ([], {}), '()\n', (59, 61), False, 'import pygame\n'), ((2788, 2828), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""Comic Sans MS"""', '(48)'], {}), "('Comic Sans MS'... |
import os
class config:
host = 'zhangxuanyang.zhangxuanyang.ws2.hh-c.brainpp.cn'
username = 'admin'
port = 5672
exp_name = os.path.dirname(os.path.abspath(__file__))
exp_name = '-'.join(i for i in exp_name.split(os.path.sep) if i);
test_send_pipe = exp_name + '-test-send_pipe'
test_recv_p... | [
"os.path.abspath"
] | [((157, 182), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (172, 182), False, 'import os\n')] |
#!/usr/bin/env python3.5
from time import sleep, time
from datetime import datetime, timedelta
from pid.decorator import pidfile
#from subprocess import call
from RPi import GPIO
import requests
import json
#import config
import logging
import signal
import sys
#13: grün
#16: braun
#19: orange
#20: grün
#21: braun
#2... | [
"logging.basicConfig",
"RPi.GPIO.cleanup",
"signal.signal",
"RPi.GPIO.output",
"RPi.GPIO.setup",
"json.dumps",
"time.sleep",
"datetime.datetime.now",
"RPi.GPIO.input",
"sys.exit",
"pid.decorator.pidfile",
"logging.info",
"RPi.GPIO.setmode"
] | [((858, 998), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s - %(levelname)s - %(message)s"""', 'filename': '"""/var/log/deConzSensors.log"""'}), "(level=logging.INFO, format=\n '%(asctime)s - %(levelname)s - %(message)s', filename=\n '/var/log/deConzSensors... |
from shop.forms import UserForm
from django.views import generic
from django.urls import reverse_lazy
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import auth
from .models import Product, Contact, Categ... | [
"django.shortcuts.render",
"math.ceil",
"shop.forms.UserForm",
"django.http.HttpResponse",
"django.shortcuts.get_object_or_404",
"json.dumps",
"shop.models.User.objects.get",
"django.shortcuts.redirect",
"django.core.paginator.Paginator"
] | [((1142, 1164), 'django.core.paginator.Paginator', 'Paginator', (['products', '(6)'], {}), '(products, 6)\n', (1151, 1164), False, 'from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger\n'), ((2977, 3020), 'django.shortcuts.render', 'render', (['request', '"""shop/search.html"""', 'params'], {}), "(r... |
import FWCore.ParameterSet.Config as cms
from SimMuon.GEMDigitizer.muonGEMDigis_cfi import *
from SimMuon.GEMDigitizer.muonGEMPadDigis_cfi import *
from SimMuon.GEMDigitizer.muonGEMPadDigiClusters_cfi import *
muonGEMDigiTask = cms.Task(simMuonGEMDigis, simMuonGEMPadDigis, simMuonGEMPadDigiClusters)
muonGEMDigi = cms... | [
"FWCore.ParameterSet.Config.Task",
"FWCore.ParameterSet.Config.Sequence"
] | [((230, 302), 'FWCore.ParameterSet.Config.Task', 'cms.Task', (['simMuonGEMDigis', 'simMuonGEMPadDigis', 'simMuonGEMPadDigiClusters'], {}), '(simMuonGEMDigis, simMuonGEMPadDigis, simMuonGEMPadDigiClusters)\n', (238, 302), True, 'import FWCore.ParameterSet.Config as cms\n'), ((317, 346), 'FWCore.ParameterSet.Config.Seque... |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in co... | [
"logging.getLogger",
"time.time"
] | [((1469, 1494), 'logging.getLogger', 'logging.getLogger', (['"""root"""'], {}), "('root')\n", (1486, 1494), False, 'import logging\n'), ((4501, 4512), 'time.time', 'time.time', ([], {}), '()\n', (4510, 4512), False, 'import time\n'), ((4968, 4979), 'time.time', 'time.time', ([], {}), '()\n', (4977, 4979), False, 'impor... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2019 <NAME>
# MIT License (https://opensource.org/licenses/MIT)
import logging
import numpy as np
import torch
from parallel_wavegan.layers import Conv1d
from parallel_wavegan.layers import Conv1d1x1
from parallel_wavegan.layers import Conv2d
from parallel... | [
"logging.basicConfig",
"parallel_wavegan.layers.UpsampleNetwork",
"numpy.prod",
"parallel_wavegan.layers.Conv1d1x1",
"parallel_wavegan.layers.Conv1d",
"parallel_wavegan.layers.Conv2d",
"torch.randn"
] | [((418, 536), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s"""'}), "(level=logging.DEBUG, format=\n '%(asctime)s (%(module)s:%(lineno)d) %(levelname)s: %(message)s')\n", (437, 536), False, 'import logging\n')... |
# Copyright 2020 The Tilt Brush Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to ... | [
"numpy.array",
"numpy.linalg.norm",
"sys.exit",
"numpy.cov",
"numpy.mean",
"os.path.exists",
"numpy.histogram",
"argparse.ArgumentParser",
"tiltbrush.export.iter_meshes",
"tiltbrush.export.TiltBrushMesh.from_meshes",
"numpy.dot",
"os.unlink",
"io.StringIO",
"sys.stdout.flush",
"tiltbrush... | [((2232, 2271), 'sys.stdout.write', 'sys.stdout.write', (["('%-79s\\r' % text[:79])"], {}), "('%-79s\\r' % text[:79])\n", (2248, 2271), False, 'import sys\n'), ((2274, 2292), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2290, 2292), False, 'import sys\n'), ((2314, 2353), 'sys.stdout.write', 'sys.stdout.wr... |
from multiprocessing import Pool
from multiprocessing.pool import ThreadPool
from queue import Queue
from .chunks import chunks
__all__ = 'map_parallel', 'map_multicore', 'map_multithread'
def _pool_map_stream(pool_type, pipe, fn, workers):
assert callable(fn), fn
assert isinstance(workers, int), workers
... | [
"random.random"
] | [((2264, 2279), 'random.random', 'random.random', ([], {}), '()\n', (2277, 2279), False, 'import random, time\n')] |
""" SQLAlchemy database models. """
from datetime import datetime
from depot.fields.sqlalchemy import UploadedFileField
from app import db
from app.util.data import many_to_many, foreign_key
from app.config import TOKEN_LEN
class User(db.Model):
""" User model class. """
id = db.Column(db.Integer(), primary_k... | [
"app.util.data.many_to_many",
"app.db.Integer",
"app.db.String",
"app.db.DateTime",
"app.db.Boolean",
"depot.fields.sqlalchemy.UploadedFileField",
"app.db.Binary",
"app.db.Text",
"app.util.data.foreign_key"
] | [((950, 994), 'app.util.data.foreign_key', 'foreign_key', (['"""User"""'], {'backref_name': '"""sessions"""'}), "('User', backref_name='sessions')\n", (961, 994), False, 'from app.util.data import many_to_many, foreign_key\n'), ((1283, 1335), 'app.util.data.many_to_many', 'many_to_many', (['"""Group"""', '"""User"""'],... |
import h5py
import numpy as np
import os, pdb
import tensorflow as tf
from rllab.envs.base import EnvSpec
from rllab.envs.normalized_env import normalize as normalize_env
import rllab.misc.logger as logger
from sandbox.rocky.tf.algos.trpo import TRPO
from sandbox.rocky.tf.policies.gaussian_mlp_policy import Gaussian... | [
"numpy.clip",
"rllab.misc.logger.add_text_output",
"numpy.array",
"hgail.misc.datasets.RecognitionDataset",
"rllab.misc.logger.set_snapshot_mode",
"rllab.misc.logger.set_snapshot_dir",
"numpy.mean",
"numpy.savez",
"os.path.exists",
"hgail.algos.hgail_impl.Level",
"numpy.where",
"hgail.policies... | [((2367, 2398), 'numpy.savez', 'np.savez', (['filepath'], {'trajs': 'trajs'}), '(filepath, trajs=trajs)\n', (2375, 2398), True, 'import numpy as np\n'), ((4357, 4403), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.julia/v0.6/NGSIM/data"""'], {}), "('~/.julia/v0.6/NGSIM/data')\n", (4375, 4403), False, 'import os,... |
import argparse
import os
from glob import glob
import imageio
from tqdm import tqdm
from csbdeep.utils import normalize
from stardist.models import StarDist3D
def get_image_files(root, image_folder, ext):
# get the image and label mask paths and validate them
image_pattern = os.path.join(root, image_folder... | [
"os.makedirs",
"argparse.ArgumentParser",
"imageio.imsave",
"tqdm.tqdm",
"os.path.join",
"csbdeep.utils.normalize",
"os.path.split",
"imageio.volread",
"stardist.models.StarDist3D",
"glob.glob"
] | [((289, 332), 'os.path.join', 'os.path.join', (['root', 'image_folder', 'f"""*{ext}"""'], {}), "(root, image_folder, f'*{ext}')\n", (301, 332), False, 'import os\n'), ((410, 429), 'glob.glob', 'glob', (['image_pattern'], {}), '(image_pattern)\n', (414, 429), False, 'from glob import glob\n'), ((829, 882), 'stardist.mod... |
import os, sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import json
import logging
import yaml
import requests
import time
from actions.migrate_job_action import MigrateJobAction
from actions.send_alert_action import SendAlertAction
from actions.reboot_node_action import RebootNodeAction
from action... | [
"logging.getLogger",
"logging.debug",
"actions.reboot_node_action.RebootNodeAction",
"actions.send_alert_action.SendAlertAction",
"datetime.datetime.strptime",
"datetime.datetime.utcnow",
"logging.warning",
"utils.prometheus_util.format_url_query",
"requests.get",
"utils.k8s_util.get_job_info_inde... | [((582, 611), 'logging.getLogger', 'logging.getLogger', (['"""activity"""'], {}), "('activity')\n", (599, 611), False, 'import logging\n'), ((1163, 1178), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', ([], {}), '()\n', (1176, 1178), False, 'from email.mime.multipart import MIMEMultipart\n'), ((47, 72), 'os.pat... |
from __future__ import print_function, unicode_literals, absolute_import, division
from six.moves import range, zip, map, reduce, filter
from keras.layers import Input, Conv2D, Conv3D, Activation, Lambda
from keras.models import Model
from keras.layers.merge import Add, Concatenate
import tensorflow as tf
from keras i... | [
"keras.layers.merge.Concatenate",
"numpy.abs",
"six.moves.range",
"numpy.float32",
"re.compile",
"keras.layers.merge.Add",
"keras.layers.Lambda",
"numpy.max",
"keras.layers.Input",
"numpy.zeros",
"keras.layers.Activation",
"keras.models.Model",
"numpy.min",
"six.moves.zip"
] | [((11943, 12140), 're.compile', 're.compile', (['"""^(?P<model>resunet|unet)(?P<n_dim>\\\\d)(?P<prob_out>p)?_(?P<n_depth>\\\\d+)_(?P<kern_size>\\\\d+)_(?P<n_first>\\\\d+)(_(?P<n_channel_out>\\\\d+)out)?(_(?P<last_activation>.+)-last)?$"""'], {}), "(\n '^(?P<model>resunet|unet)(?P<n_dim>\\\\d)(?P<prob_out>p)?_(?P<n_d... |
from maxdb import DB
def runtime_on_any_exception(func):
def decorate(*args, **kwargs):
try:
func(*args, **kwargs)
except:
raise RuntimeError
return decorate
class CLIUtils(object):
DEFAULT_PATH = 'storage.json'
def __init__(self):
self._db = None
... | [
"maxdb.DB"
] | [((5976, 5990), 'maxdb.DB', 'DB', (['self._path'], {}), '(self._path)\n', (5978, 5990), False, 'from maxdb import DB\n')] |
from cognibench.models import CNBModel
from cognibench.capabilities import ContinuousAction, ContinuousObservation
from cognibench.continuous import ContinuousSpace
from cognibench.models.wrappers import MatlabWrapperMixin
class PsPMModel(MatlabWrapperMixin, CNBModel, ContinuousAction, ContinuousObservation):
nam... | [
"cognibench.models.CNBModel.__init__",
"cognibench.continuous.ContinuousSpace",
"cognibench.models.wrappers.MatlabWrapperMixin.__init__"
] | [((747, 858), 'cognibench.models.wrappers.MatlabWrapperMixin.__init__', 'MatlabWrapperMixin.__init__', (['self'], {'lib_paths': 'lib_paths', 'import_base_path': 'import_base_path', 'predict_fn': 'pred'}), '(self, lib_paths=lib_paths, import_base_path=\n import_base_path, predict_fn=pred)\n', (774, 858), False, 'from... |
import unittest
from programy.config.file.yaml_file import YamlConfigurationFile
from programy.clients.restful.config import RestConfiguration
from programy.clients.events.console.config import ConsoleConfiguration
class RestConfigurationTests(unittest.TestCase):
def test_init(self):
yaml = YamlConfigura... | [
"programy.config.file.yaml_file.YamlConfigurationFile",
"programy.clients.restful.config.RestConfiguration",
"programy.clients.events.console.config.ConsoleConfiguration"
] | [((307, 330), 'programy.config.file.yaml_file.YamlConfigurationFile', 'YamlConfigurationFile', ([], {}), '()\n', (328, 330), False, 'from programy.config.file.yaml_file import YamlConfigurationFile\n'), ((634, 659), 'programy.clients.restful.config.RestConfiguration', 'RestConfiguration', (['"""rest"""'], {}), "('rest'... |
r"""Train a neural network to predict feedback for a program string."""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import os
import sys
import random
import numpy as np
from tqdm import tqdm
import torch
import torch.optim as optim
import torch.utils.... | [
"torch.manual_seed",
"argparse.ArgumentParser",
"os.makedirs",
"torch.nn.functional.binary_cross_entropy",
"os.path.join",
"numpy.zeros",
"torch.cuda.is_available",
"os.path.isdir",
"numpy.random.seed",
"torch.round",
"torch.utils.data.DataLoader",
"torch.no_grad",
"torch.device"
] | [((677, 702), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (700, 702), False, 'import argparse\n'), ((1216, 1260), 'torch.device', 'torch.device', (["('cuda' if args.cuda else 'cpu')"], {}), "('cuda' if args.cuda else 'cpu')\n", (1228, 1260), False, 'import torch\n'), ((1407, 1435), 'torch.ma... |
"""
Sparse Poisson Recovery (SPoRe) module for solving Multiple Measurement Vector
problem with Poisson signals (MMVP) by batch stochastic gradient ascent and
Monte Carlo integration
Authors: <NAME>, <NAME>
Reference:
[1] <NAME>, <NAME>, <NAME>, and <NAME>, "Extreme Compressed
Sensing of Poisson Rates from Multip... | [
"numpy.mean",
"numpy.random.default_rng",
"numpy.ones",
"numpy.random.choice",
"numpy.size",
"numpy.log",
"numpy.any",
"numpy.max",
"numpy.sum",
"numpy.zeros",
"numpy.array",
"numpy.isnan",
"numpy.random.seed",
"numpy.einsum",
"numpy.linalg.norm",
"numpy.shape",
"time.time"
] | [((5468, 5479), 'numpy.shape', 'np.shape', (['Y'], {}), '(Y)\n', (5476, 5479), True, 'import numpy as np\n'), ((5488, 5508), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (5502, 5508), True, 'import numpy as np\n'), ((5530, 5563), 'numpy.zeros', 'np.zeros', (['(self.N, self.max_iter)'], {}), '((sel... |
"""Use translation table to translate coding sequence to protein."""
from Bio.Data import CodonTable # type: ignore
from Bio.Seq import Seq # type: ignore
def translate_cds(cds: str, translation_table: str) -> str:
"""Translate coding sequence to protein.
:param cds: str: DNA coding sequence (CDS... | [
"Bio.Seq.Seq"
] | [((632, 640), 'Bio.Seq.Seq', 'Seq', (['cds'], {}), '(cds)\n', (635, 640), False, 'from Bio.Seq import Seq\n')] |
"""
preprocess-twitter.py
python preprocess-twitter.py "Some random text with #hashtags, @mentions and http://t.co/kdjfkdjf (links). :)"
Script for preprocessing tweets by <NAME>
with small modifications by <NAME>
with translation to Python by <NAME>
Translation of Ruby script to create features for GloVe vectors for T... | [
"regex.sub",
"regex.split"
] | [((1019, 1059), 'regex.sub', 're.sub', (['pattern', 'repl', 'text'], {'flags': 'FLAGS'}), '(pattern, repl, text, flags=FLAGS)\n', (1025, 1059), True, 'import regex as re\n'), ((671, 719), 'regex.split', 're.split', (['"""(?=[A-Z])"""', 'hashtag_body'], {'flags': 'FLAGS'}), "('(?=[A-Z])', hashtag_body, flags=FLAGS)\n", ... |
#! /usr/bin/env python
# -*- coding:utf8 -*-
#
# pw_classes.py
#
# This file is part of pyplanes, a software distributed under the MIT license.
# For any question, please contact one of the authors cited below.
#
# Copyright (c) 2020
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# Permission is hereby g... | [
"pyPLANES.core.multilayer.MultiLayer.__init__",
"numpy.linalg.solve",
"pyPLANES.core.multilayer.MultiLayer.update_frequency",
"numpy.delete",
"pyPLANES.core.calculus.PwCalculus.update_frequency",
"mediapack.Air",
"numpy.exp",
"numpy.zeros",
"pyPLANES.core.calculus.PwCalculus.__init__",
"pyPLANES.p... | [((1311, 1316), 'mediapack.Air', 'Air', ([], {}), '()\n', (1314, 1316), False, 'from mediapack import Air, PEM, EqFluidJCA\n'), ((2063, 2098), 'pyPLANES.core.calculus.PwCalculus.__init__', 'PwCalculus.__init__', (['self'], {}), '(self, **kwargs)\n', (2082, 2098), False, 'from pyPLANES.core.calculus import PwCalculus\n'... |
import os
import requests
from bs4 import BeautifulSoup
from ekorpkit import eKonf
from ekorpkit.io.download.web import web_download, web_download_unzip
class EDGAR:
def __init__(self, **args):
self.args = eKonf.to_config(args)
self.base_url = self.args.base_url
self.url = self.args.url
... | [
"os.listdir",
"os.makedirs",
"ekorpkit.eKonf.to_config",
"requests.get",
"bs4.BeautifulSoup",
"ekorpkit.io.download.web.web_download"
] | [((220, 241), 'ekorpkit.eKonf.to_config', 'eKonf.to_config', (['args'], {}), '(args)\n', (235, 241), False, 'from ekorpkit import eKonf\n'), ((373, 416), 'os.makedirs', 'os.makedirs', (['self.output_dir'], {'exist_ok': '(True)'}), '(self.output_dir, exist_ok=True)\n', (384, 416), False, 'import os\n'), ((849, 888), 're... |
from HARK.ConsumptionSaving.ConsIndShockModel import PerfForesightConsumerType
import numpy as np
import unittest
class testPerfForesightConsumerType(unittest.TestCase):
def setUp(self):
self.agent = PerfForesightConsumerType()
self.agent_infinite = PerfForesightConsumerType(cycles=0)
PF_... | [
"numpy.mean",
"HARK.ConsumptionSaving.ConsIndShockModel.PerfForesightConsumerType"
] | [((214, 241), 'HARK.ConsumptionSaving.ConsIndShockModel.PerfForesightConsumerType', 'PerfForesightConsumerType', ([], {}), '()\n', (239, 241), False, 'from HARK.ConsumptionSaving.ConsIndShockModel import PerfForesightConsumerType\n'), ((272, 307), 'HARK.ConsumptionSaving.ConsIndShockModel.PerfForesightConsumerType', 'P... |
import os
from os import getcwd
#---------------------------------------------#
# 训练前一定要注意修改classes
# 种类顺序需要和model_data下的txt一样
#---------------------------------------------#
classes = ["cat", "dog"]
sets = ["train", "test"]
wd = getcwd()
for se in sets:
list_file = open('cls_' + se + '.txt', 'w')... | [
"os.listdir",
"os.path.join",
"os.path.splitext",
"os.getcwd"
] | [((246, 254), 'os.getcwd', 'getcwd', ([], {}), '()\n', (252, 254), False, 'from os import getcwd\n'), ((379, 404), 'os.listdir', 'os.listdir', (['datasets_path'], {}), '(datasets_path)\n', (389, 404), False, 'import os\n'), ((575, 613), 'os.path.join', 'os.path.join', (['datasets_path', 'type_name'], {}), '(datasets_pa... |
from PIL import Image
from math import sqrt
import numpy as np
import time
import matplotlib.backends.backend_tkagg
import matplotlib.pyplot as plt
class Point:
x: float
y: float
f: float
h: float
g: float
def __init__(self, x, y, f):
self.x = x
self.y = y
... | [
"PIL.Image.open",
"matplotlib.pyplot.savefig",
"math.sqrt",
"numpy.zeros",
"time.time",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((2189, 2234), 'math.sqrt', 'sqrt', (['((point.x - x) ** 2 + (point.y - y) ** 2)'], {}), '((point.x - x) ** 2 + (point.y - y) ** 2)\n', (2193, 2234), False, 'from math import sqrt\n'), ((6777, 6788), 'time.time', 'time.time', ([], {}), '()\n', (6786, 6788), False, 'import time\n'), ((7223, 7248), 'numpy.zeros', 'np.ze... |
#Author <NAME>
import time
import rnnoise
import numpy as np
def time_rnnoise(rounds=1000):
a = rnnoise.RNNoise()
timer = 0.0
st = time.time()
for i in range(rounds):
inp = np.random.bytes(960)
timer = (time.time() - st)
print(timer)
st = time.time()
for i in range(rounds):
inp = np.random.bytes(960)
va,o... | [
"rnnoise.RNNoise",
"time.time",
"numpy.random.bytes"
] | [((97, 114), 'rnnoise.RNNoise', 'rnnoise.RNNoise', ([], {}), '()\n', (112, 114), False, 'import rnnoise\n'), ((134, 145), 'time.time', 'time.time', ([], {}), '()\n', (143, 145), False, 'import time\n'), ((248, 259), 'time.time', 'time.time', ([], {}), '()\n', (257, 259), False, 'import time\n'), ((179, 199), 'numpy.ran... |
""" shell sort tests module """
import unittest
import random
from sort import shell
from tests import helper
class ShellSortTests(unittest.TestCase):
""" shell sort unit tests class """
max = 100
arr = []
def setUp(self):
""" setting up for the test """
self.arr = random.sample(ran... | [
"tests.helper.is_sorted",
"sort.shell.sort"
] | [((814, 829), 'sort.shell.sort', 'shell.sort', (['inp'], {}), '(inp)\n', (824, 829), False, 'from sort import shell\n'), ((1003, 1026), 'sort.shell.sort', 'shell.sort', (['self.arr[:]'], {}), '(self.arr[:])\n', (1013, 1026), False, 'from sort import shell\n'), ((543, 558), 'sort.shell.sort', 'shell.sort', (['inp'], {})... |
from django_cron import CronJobBase, Schedule
class VerifyLicenceSpeciesJob(CronJobBase):
"""
Verifies LicenceSpecies against TSC server.
"""
RUN_AT_TIMES = ['00:00']
schedule = Schedule(run_at_times=RUN_AT_TIMES)
code = 'applications.verify_licence_species'
def do(self):
pass
| [
"django_cron.Schedule"
] | [((201, 236), 'django_cron.Schedule', 'Schedule', ([], {'run_at_times': 'RUN_AT_TIMES'}), '(run_at_times=RUN_AT_TIMES)\n', (209, 236), False, 'from django_cron import CronJobBase, Schedule\n')] |
# Copyright (c) 2009-2020, quasardb SAS. All rights reserved.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice,... | [
"numpy.amax",
"numpy.amin",
"numpy.unique",
"sys.exit",
"quasardb.ColumnInfo",
"quasardb.Cluster",
"numpy.random.randint",
"numpy.random.uniform",
"quasardb.BatchColumnInfo",
"os.getpid",
"numpy.datetime64",
"numpy.timedelta64",
"builtins.int",
"socket.gethostname",
"traceback.print_exc"... | [((2039, 2050), 'time.time', 'time.time', ([], {}), '()\n', (2048, 2050), False, 'import time\n'), ((2085, 2096), 'time.time', 'time.time', ([], {}), '()\n', (2094, 2096), False, 'import time\n'), ((2928, 2973), 'numpy.random.uniform', 'np.random.uniform', (['(-100.0)', '(100.0)', 'price_count'], {}), '(-100.0, 100.0, ... |
from nerwhal.backends.flashtext_backend import FlashtextBackend
from nerwhal.recognizer_bases import FlashtextRecognizer
def test_single_recognizer(embed):
class TestRecognizer(FlashtextRecognizer):
TAG = "XX"
SCORE = 1.0
@property
def keywords(self):
return ["abc", "c... | [
"nerwhal.backends.flashtext_backend.FlashtextBackend"
] | [((340, 358), 'nerwhal.backends.flashtext_backend.FlashtextBackend', 'FlashtextBackend', ([], {}), '()\n', (356, 358), False, 'from nerwhal.backends.flashtext_backend import FlashtextBackend\n'), ((1114, 1132), 'nerwhal.backends.flashtext_backend.FlashtextBackend', 'FlashtextBackend', ([], {}), '()\n', (1130, 1132), Fa... |
import pytest
import cudf
import mock
from cuxfilter.charts.core.non_aggregate.core_non_aggregate import (
BaseNonAggregate,
)
from cuxfilter.dashboard import DashBoard
from cuxfilter import DataFrame
from cuxfilter.layouts import chart_view
class TestCoreNonAggregateChart:
def test_variables(self):
... | [
"cuxfilter.charts.core.non_aggregate.core_non_aggregate.BaseNonAggregate",
"mock.patch",
"cuxfilter.DataFrame.from_dataframe",
"pytest.mark.parametrize",
"cuxfilter.layouts.chart_view",
"cudf.DataFrame"
] | [((1913, 1977), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""chart, _chart"""', '[(None, None), (1, 1)]'], {}), "('chart, _chart', [(None, None), (1, 1)])\n", (1936, 1977), False, 'import pytest\n'), ((4971, 5273), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""x_range, y_range, query, local... |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""Outline explorer API.
You need to declare a OutlineExplorerProxy, and a function for handle the
edit_goto Signal.
class OutlineExplorerProxyCustom(OutlineExplore... | [
"spyder.config.base.running_under_pytest",
"re.escape",
"qtpy.QtCore.Signal",
"re.match",
"qtpy.QtGui.QTextBlock",
"spyder.config.base._"
] | [((2665, 2681), 'qtpy.QtCore.Signal', 'Signal', (['int', 'int'], {}), '(int, int)\n', (2671, 2681), False, 'from qtpy.QtCore import Signal, QObject\n'), ((2722, 2730), 'qtpy.QtCore.Signal', 'Signal', ([], {}), '()\n', (2728, 2730), False, 'from qtpy.QtCore import Signal, QObject\n'), ((3973, 3981), 'qtpy.QtCore.Signal'... |
import os
import re
import sys
from oguilem.configuration.fitness import OGUILEMFitnessFunctionConfiguration
from oguilem.configuration.ga import OGUILEMGlobOptConfig
from oguilem.configuration.geometry import OGUILEMGeometryConfig
from oguilem.configuration.utils import ConnectedValue, ConfigFileManager
from oguilem.... | [
"oguilem.configuration.ga.OGUILEMGlobOptConfig",
"oguilem.configuration.fitness.OGUILEMFitnessFunctionConfiguration",
"os.path.join",
"oguilem.configuration.geometry.OGUILEMGeometryConfig",
"os.path.isdir",
"oguilem.configuration.utils.ConnectedValue",
"os.mkdir",
"oguilem.configuration.utils.ConfigFi... | [((451, 473), 'oguilem.configuration.ga.OGUILEMGlobOptConfig', 'OGUILEMGlobOptConfig', ([], {}), '()\n', (471, 473), False, 'from oguilem.configuration.ga import OGUILEMGlobOptConfig\n'), ((544, 567), 'oguilem.configuration.geometry.OGUILEMGeometryConfig', 'OGUILEMGeometryConfig', ([], {}), '()\n', (565, 567), False, '... |
from __future__ import unicode_literals
import copy
import json
from six import string_types
from . import default_operators
from . import sql_prepare
from . import values
from .error import WinnowError
from .templating import SqlFragment
from .templating import WinnowSql
class Winnow(object):
"""
Winnow is... | [
"json.loads",
"copy.deepcopy"
] | [((1905, 1924), 'copy.deepcopy', 'copy.deepcopy', (['filt'], {}), '(filt)\n', (1918, 1924), False, 'import copy\n'), ((3781, 3815), 'json.loads', 'json.loads', (["filter_clause['value']"], {}), "(filter_clause['value'])\n", (3791, 3815), False, 'import json\n')] |
import numpy as np
import ROOT
from dummy_distributions import dummy_pt_eta
counts, test_in1, test_in2 = dummy_pt_eta()
f = ROOT.TFile.Open("samples/testSF2d.root")
sf = f.Get("scalefactors_Tight_Electron")
xmin, xmax = sf.GetXaxis().GetXmin(), sf.GetXaxis().GetXmax()
ymin, ymax = sf.GetYaxis().GetXmin(), sf.GetYax... | [
"numpy.empty_like",
"dummy_distributions.dummy_pt_eta",
"ROOT.TFile.Open"
] | [((107, 121), 'dummy_distributions.dummy_pt_eta', 'dummy_pt_eta', ([], {}), '()\n', (119, 121), False, 'from dummy_distributions import dummy_pt_eta\n'), ((127, 167), 'ROOT.TFile.Open', 'ROOT.TFile.Open', (['"""samples/testSF2d.root"""'], {}), "('samples/testSF2d.root')\n", (142, 167), False, 'import ROOT\n'), ((347, 3... |
#from distutils.core import setup
from setuptools import setup, find_packages
from distutils.extension import Extension
import re
import os
import codecs
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pyp... | [
"os.path.dirname",
"os.path.join",
"distutils.extension.Extension",
"re.search"
] | [((177, 202), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (192, 202), False, 'import os\n'), ((541, 614), 're.search', 're.search', (['"""^__version__ = [\'\\\\"]([^\'\\\\"]*)[\'\\\\"]"""', 'version_file', 're.M'], {}), '(\'^__version__ = [\\\'\\\\"]([^\\\'\\\\"]*)[\\\'\\\\"]\', version_fi... |
import logging
from flask import Flask
from flask_sqlalchemy import SQLAlchemy as _BaseSQLAlchemy
from flask_migrate import Migrate
from flask_cors import CORS
from flask_talisman import Talisman
from flask_ipban import IpBan
from config import Config, get_logger_handler
# database
class SQLAlchemy(_BaseSQLAlchemy):... | [
"logging.getLogger",
"flask_cors.CORS",
"flask.Flask",
"config.Config",
"flask_talisman.Talisman",
"flask_migrate.Migrate",
"flask_ipban.IpBan",
"config.get_logger_handler"
] | [((504, 513), 'flask_migrate.Migrate', 'Migrate', ([], {}), '()\n', (511, 513), False, 'from flask_migrate import Migrate\n'), ((521, 527), 'flask_cors.CORS', 'CORS', ([], {}), '()\n', (525, 527), False, 'from flask_cors import CORS\n'), ((539, 549), 'flask_talisman.Talisman', 'Talisman', ([], {}), '()\n', (547, 549), ... |
from pathlib import Path
from requests.auth import _basic_auth_str
import pytest
from bravado_core.formatter import SwaggerFormat, NO_OP
from gc3_query.lib.gc3_config import GC3Config, IDMCredential
TEST_BASE_DIR: Path = Path(__file__).parent.joinpath("GC3Config")
config_dir = TEST_BASE_DIR.joinpath("config")
def te... | [
"pytest.fixture",
"requests.auth._basic_auth_str",
"pathlib.Path",
"gc3_query.lib.gc3_config.GC3Config"
] | [((1331, 1347), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1345, 1347), False, 'import pytest\n'), ((4936, 4952), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (4950, 4952), False, 'import pytest\n'), ((432, 443), 'gc3_query.lib.gc3_config.GC3Config', 'GC3Config', ([], {}), '()\n', (441, 443), False... |
import argparse
import matplotlib.pyplot as plt
import torch
from pytorch_warmup import *
def get_rates(warmup_cls, beta2, max_step):
rates = []
p = torch.nn.Parameter(torch.arange(10, dtype=torch.float32))
optimizer = torch.optim.Adam([{'params': p}], lr=1.0, betas=(0.9, beta2))
lr_scheduler = torch.... | [
"torch.optim.Adam",
"torch.optim.lr_scheduler.LambdaLR",
"matplotlib.pyplot.savefig",
"argparse.ArgumentParser",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"torch.arange",
"matplotlib.pyplot.title",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((669, 723), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Warmup schedule"""'}), "(description='Warmup schedule')\n", (692, 723), False, 'import argparse\n'), ((1259, 1271), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (1269, 1271), True, 'import matplotlib.pyplot as pl... |
"""\
PROMORT example.
"""
import argparse
import random
import sys
import pyecvl.ecvl as ecvl
import pyeddl.eddl as eddl
from pyeddl.tensor import Tensor
import models
def VGG16(in_layer, num_classes):
x = in_layer
x = eddl.ReLu(eddl.Conv(x, 64, [3, 3]))
x = eddl.MaxPool(eddl.ReLu(eddl.Conv(x, 64, [3, ... | [
"pyeddl.eddl.Conv",
"pyecvl.ecvl.TensorToView",
"pyeddl.tensor.Tensor",
"pyecvl.ecvl.DLDataset",
"pyeddl.eddl.save",
"pyeddl.eddl.setlogfile",
"pyecvl.ecvl.DatasetAugmentations",
"pyeddl.eddl.getMetric",
"pyeddl.eddl.Input",
"pyeddl.eddl.train_batch",
"argparse.ArgumentParser",
"pyeddl.eddl.Mo... | [((965, 986), 'pyeddl.eddl.Reshape', 'eddl.Reshape', (['x', '[-1]'], {}), '(x, [-1])\n', (977, 986), True, 'import pyeddl.eddl as eddl\n'), ((1176, 1209), 'pyeddl.eddl.Input', 'eddl.Input', (['[3, size[0], size[1]]'], {}), '([3, size[0], size[1]])\n', (1186, 1209), True, 'import pyeddl.eddl as eddl\n'), ((1220, 1258), ... |
from hawc_hal.maptree.map_tree import map_tree_factory
from hawc_hal.response import hawc_response_factory
import os
from conftest import check_map_trees, check_responses
def test_root_to_hdf_response(response):
r = hawc_response_factory(response)
test_filename = "response.hd5"
# Make sure it doesn't e... | [
"conftest.check_responses",
"os.path.exists",
"conftest.check_map_trees",
"hawc_hal.response.hawc_response_factory",
"hawc_hal.maptree.map_tree.map_tree_factory",
"os.remove"
] | [((223, 254), 'hawc_hal.response.hawc_response_factory', 'hawc_response_factory', (['response'], {}), '(response)\n', (244, 254), False, 'from hawc_hal.response import hawc_response_factory\n'), ((358, 387), 'os.path.exists', 'os.path.exists', (['test_filename'], {}), '(test_filename)\n', (372, 387), False, 'import os\... |
from __future__ import absolute_import
from io import BytesIO
import zstd
from .base import BaseCompressor, BaseDecompressor
from ..protocol import CompressionMethod, CompressionMethodByte
from ..reader import read_binary_uint32
from ..writer import write_binary_uint32, write_binary_uint8
class Compressor(BaseCompr... | [
"zstd.compress",
"io.BytesIO",
"zstd.decompress"
] | [((477, 486), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (484, 486), False, 'from io import BytesIO\n'), ((541, 560), 'zstd.compress', 'zstd.compress', (['data'], {}), '(data)\n', (554, 560), False, 'import zstd\n'), ((1237, 1246), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (1244, 1246), False, 'from io import BytesIO\... |
import math
def main():
print("""
\tComsole by MumuNiMochii version beta 1.6.23
\t\"Originally made with C\"
\tMAIN MENU
\tWhat do you want to execute and evaluate?
\t1.) Add two addends
\t2.) Subtract a minuend from its subtrahend
\t3.) Multiply a multiplicand to its multiplier
\t4.) Divide a dividend to its... | [
"math.pow",
"math.sqrt"
] | [((2408, 2420), 'math.sqrt', 'math.sqrt', (['x'], {}), '(x)\n', (2417, 2420), False, 'import math\n'), ((2225, 2239), 'math.pow', 'math.pow', (['x', 'y'], {}), '(x, y)\n', (2233, 2239), False, 'import math\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
import os
import re
from setuptools import setup, find_packages
# classifiers = """\
# Development Status :: 4 - Beta
# Programming Language :: Python
# Programming Language :: Python :: 3
# Programming Language :: Python :: 3.4
# Programming... | [
"os.path.dirname",
"setuptools.find_packages",
"io.open"
] | [((1213, 1228), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1226, 1228), False, 'from setuptools import setup, find_packages\n'), ((544, 569), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (559, 569), False, 'import os\n'), ((635, 671), 'io.open', 'io.open', (['filepath']... |
from .trainer.models import MultiTaskTagger
from .trainer.utils import load_dictionaries,Config
from .trainer.tasks.multitask_tagging import MultiTaskTaggingModule
from fairseq.data.data_utils import collate_tokens
from attacut import tokenize
class HoogBERTaEncoder(object):
def __init__(self,layer=12,cuda=False... | [
"attacut.tokenize"
] | [((856, 870), 'attacut.tokenize', 'tokenize', (['sent'], {}), '(sent)\n', (864, 870), False, 'from attacut import tokenize\n'), ((1404, 1418), 'attacut.tokenize', 'tokenize', (['sent'], {}), '(sent)\n', (1412, 1418), False, 'from attacut import tokenize\n')] |
from pyconductor import load_test_values, calculate_conductance
def conductance_calc():
preloaded_dict = load_test_values()
while preloaded_dict:
print(
"[1] - Show currently available materials in Material Dictionary\n"
"[2] - Add a material (will not be saved upon restart)\n"... | [
"pyconductor.load_test_values",
"pyconductor.calculate_conductance"
] | [((111, 129), 'pyconductor.load_test_values', 'load_test_values', ([], {}), '()\n', (127, 129), False, 'from pyconductor import load_test_values, calculate_conductance\n'), ((864, 914), 'pyconductor.calculate_conductance', 'calculate_conductance', (['preloaded_dict[main_prompt]'], {}), '(preloaded_dict[main_prompt])\n'... |
from fuzzconfig import FuzzConfig
import nonrouting
import fuzzloops
import re
cfgs = [
FuzzConfig(job="SYSCONFIG40", device="LIFCL-40", sv="../shared/empty_40.v",
tiles=["CIB_R0C75:EFB_0", "CIB_R0C72:BANKREF0", "CIB_R0C77:EFB_1_OSC", "CIB_R0C79:EFB_2",
"CIB_R0C81:I2C_EFB_3", "CIB_R0C85:PMU", "CIB_... | [
"fuzzconfig.FuzzConfig"
] | [((93, 379), 'fuzzconfig.FuzzConfig', 'FuzzConfig', ([], {'job': '"""SYSCONFIG40"""', 'device': '"""LIFCL-40"""', 'sv': '"""../shared/empty_40.v"""', 'tiles': "['CIB_R0C75:EFB_0', 'CIB_R0C72:BANKREF0', 'CIB_R0C77:EFB_1_OSC',\n 'CIB_R0C79:EFB_2', 'CIB_R0C81:I2C_EFB_3', 'CIB_R0C85:PMU',\n 'CIB_R0C87:MIB_CNR_32_FAFD... |
from uuid import UUID
import os
import pytest
from notifications_utils.base64_uuid import base64_to_uuid, uuid_to_base64, base64_to_bytes, bytes_to_base64
def test_bytes_to_base64_to_bytes():
b = os.urandom(32)
b64 = bytes_to_base64(b)
assert base64_to_bytes(b64) == b
@pytest.mark.parametrize(
"url... | [
"notifications_utils.base64_uuid.base64_to_uuid",
"uuid.UUID",
"os.urandom",
"pytest.mark.parametrize",
"notifications_utils.base64_uuid.bytes_to_base64",
"pytest.raises",
"notifications_utils.base64_uuid.base64_to_bytes",
"notifications_utils.base64_uuid.uuid_to_base64"
] | [((287, 408), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""url_val"""', "['AAAAAAAAAAAAAAAAAAAAAQ', 'AAAAAAAAAAAAAAAAAAAAAQ=',\n 'AAAAAAAAAAAAAAAAAAAAAQ==']"], {}), "('url_val', ['AAAAAAAAAAAAAAAAAAAAAQ',\n 'AAAAAAAAAAAAAAAAAAAAAQ=', 'AAAAAAAAAAAAAAAAAAAAAQ=='])\n", (310, 408), False, 'import pytes... |
"""The devolo_home_control integration."""
from __future__ import annotations
import asyncio
from functools import partial
from types import MappingProxyType
from typing import Any
from devolo_home_control_api.exceptions.gateway import GatewayOfflineError
from devolo_home_control_api.homecontrol import HomeControl
fr... | [
"homeassistant.components.zeroconf.async_get_instance",
"functools.partial",
"devolo_home_control_api.mydevolo.Mydevolo"
] | [((3395, 3405), 'devolo_home_control_api.mydevolo.Mydevolo', 'Mydevolo', ([], {}), '()\n', (3403, 3405), False, 'from devolo_home_control_api.mydevolo import Mydevolo\n'), ((1617, 1650), 'homeassistant.components.zeroconf.async_get_instance', 'zeroconf.async_get_instance', (['hass'], {}), '(hass)\n', (1644, 1650), Fals... |
## -------------------------------------------------------- ##
# Trab 1 IA 2019-2
#
# <NAME>
#
# hillClimbing.py: implements the hill climbing metaheuristic for the bag problem
#
# Python version: 3.7.4
## -------------------------------------------------------- ##
import bagProblem as bp
from time import time... | [
"bagProblem.state_Verify",
"bagProblem.state_Value",
"time.time",
"bagProblem.state_Expansion"
] | [((888, 894), 'time.time', 'time', ([], {}), '()\n', (892, 894), False, 'from time import time\n'), ((542, 569), 'bagProblem.state_Value', 'bp.state_Value', (['si[i]', 'OBJs'], {}), '(si[i], OBJs)\n', (556, 569), True, 'import bagProblem as bp\n'), ((597, 628), 'bagProblem.state_Verify', 'bp.state_Verify', (['si[i]', '... |
from bs4 import BeautifulSoup
from optimizers.AdvancedJSOptimizer import AdvancedJSOptimizer
from optimizers.CSSOptimizer import CSSOptimizer
class HTMLParser(object):
def __init__(self, html):
self.soup = BeautifulSoup(html, 'lxml')
def js_parser(self):
for script in self.soup.find_all('scri... | [
"bs4.BeautifulSoup",
"optimizers.AdvancedJSOptimizer.AdvancedJSOptimizer",
"optimizers.CSSOptimizer.CSSOptimizer"
] | [((220, 247), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""lxml"""'], {}), "(html, 'lxml')\n", (233, 247), False, 'from bs4 import BeautifulSoup\n'), ((344, 365), 'optimizers.AdvancedJSOptimizer.AdvancedJSOptimizer', 'AdvancedJSOptimizer', ([], {}), '()\n', (363, 365), False, 'from optimizers.AdvancedJSOptimizer... |
'''Provide interface for game.'''
from typing import Any, Dict, List, Optional, Union
import flask
from flask import Blueprint, url_for
from flask_login import current_user, login_required
from flask_wtf import FlaskForm
from flask_sse import sse
from werkzeug.wrappers import Response
from wtforms import IntegerField... | [
"flask.render_template",
"wtforms.validators.NumberRange",
"wtforms.validators.DataRequired",
"wtforms.SubmitField",
"flask.url_for",
"spades.game.GameState",
"flask_sse.sse.publish",
"flask.Blueprint",
"spades.game.models.player.Player"
] | [((511, 538), 'flask.Blueprint', 'Blueprint', (['"""main"""', '__name__'], {}), "('main', __name__)\n", (520, 538), False, 'from flask import Blueprint, url_for\n'), ((593, 604), 'spades.game.GameState', 'GameState', ([], {}), '()\n', (602, 604), False, 'from spades.game import GameState\n'), ((665, 690), 'wtforms.Subm... |
# Copyright (c) 2019 - The Procedural Generation for Gazebo authors
# For information on the respective copyright owner see the NOTICE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#... | [
"inspect.isclass",
"inspect.getmembers"
] | [((1812, 1846), 'inspect.getmembers', 'inspect.getmembers', (['current_module'], {}), '(current_module)\n', (1830, 1846), False, 'import inspect\n'), ((2445, 2479), 'inspect.getmembers', 'inspect.getmembers', (['current_module'], {}), '(current_module)\n', (2463, 2479), False, 'import inspect\n'), ((3043, 3077), 'inspe... |
# Licensed to Modin Development Team under one or more contributor license agreements.
# See the NOTICE file distributed with this work for additional information regarding
# copyright ownership. The Modin Development Team licenses this file to you under the
# Apache License, Version 2.0 (the "License"); you may not u... | [
"pyarrow.feather.read_feather"
] | [((2189, 2207), 'pyarrow.feather.read_feather', 'read_feather', (['path'], {}), '(path)\n', (2201, 2207), False, 'from pyarrow.feather import read_feather\n')] |
"""A script is a series of operations."""
import json
import os
from .ops import create
class Script(object):
"""A script is a series of operations."""
def __init__(self, s=None):
"""Parse a script from a JSON string."""
if s is not None:
self.parsed_script = json.loads(s)
... | [
"json.loads"
] | [((301, 314), 'json.loads', 'json.loads', (['s'], {}), '(s)\n', (311, 314), False, 'import json\n')] |
"""
Totally untested file. Will be removed in subsequent commits
"""
import tensorflow as tf
import matplotlib.image as mpimg
import numpy as np
from math import ceil, floor
import os
IMAGE_SIZE = 720
def central_scale_images(X_imgs, scales):
# Various settings needed for Tensorflow operation
boxes = np.zeros... | [
"tensorflow.image.transpose_image",
"os.listdir",
"tensorflow.reset_default_graph",
"tensorflow.image.rot90",
"math.ceil",
"math.floor",
"tensorflow.placeholder",
"tensorflow.Session",
"tensorflow.image.flip_up_down",
"tensorflow.global_variables_initializer",
"numpy.array",
"tensorflow.image.... | [((4951, 5006), 'os.listdir', 'os.listdir', (['"""/home/pallab/gestures-cnn/images/resized/"""'], {}), "('/home/pallab/gestures-cnn/images/resized/')\n", (4961, 5006), False, 'import os\n'), ((637, 687), 'numpy.array', 'np.array', (['[IMAGE_SIZE, IMAGE_SIZE]'], {'dtype': 'np.int32'}), '([IMAGE_SIZE, IMAGE_SIZE], dtype=... |
#!/usr/bin/env python3
# encoding: utf-8
#
# (C) 2012-2016 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: BSD-3-Clause
"""\
Link To The Past - a backup tool
Hash functions and commands.
"""
import hashlib
import zlib
class CRC32(object):
"""\
CRC32 API compatible to the hashlib functions (subset used by t... | [
"doctest.testmod",
"zlib.crc32"
] | [((1687, 1704), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (1702, 1704), False, 'import doctest\n'), ((531, 559), 'zlib.crc32', 'zlib.crc32', (['data', 'self.value'], {}), '(data, self.value)\n', (541, 559), False, 'import zlib\n')] |
import unittest
import numpy as np
from astroNN.lamost import wavelength_solution, pseudo_continuum
class LamostToolsTestCase(unittest.TestCase):
def test_wavelength_solution(self):
wavelength_solution()
wavelength_solution(dr=5)
self.assertRaises(ValueError, wavelength_solution, dr=1)
... | [
"unittest.main",
"astroNN.lamost.wavelength_solution",
"numpy.ones"
] | [((432, 447), 'unittest.main', 'unittest.main', ([], {}), '()\n', (445, 447), False, 'import unittest\n'), ((197, 218), 'astroNN.lamost.wavelength_solution', 'wavelength_solution', ([], {}), '()\n', (216, 218), False, 'from astroNN.lamost import wavelength_solution, pseudo_continuum\n'), ((227, 252), 'astroNN.lamost.wa... |
from django.http import HttpResponse
from rest_framework.decorators import api_view
from rest_framework.decorators import parser_classes
from rest_framework.parsers import JSONParser
import numpy as np
import json
import os
from .utils.spectrogram_utils import SpectrogramUtils
from .utils.feature_extraction_utils impor... | [
"numpy.mean",
"os.listdir",
"numpy.ones",
"numpy.std",
"json.dumps",
"numpy.max",
"numpy.array",
"numpy.empty",
"rest_framework.decorators.parser_classes",
"numpy.min",
"json.load",
"rest_framework.decorators.api_view",
"json.dump"
] | [((626, 643), 'rest_framework.decorators.api_view', 'api_view', (["['GET']"], {}), "(['GET'])\n", (634, 643), False, 'from rest_framework.decorators import api_view\n'), ((645, 674), 'rest_framework.decorators.parser_classes', 'parser_classes', (['(JSONParser,)'], {}), '((JSONParser,))\n', (659, 674), False, 'from rest... |
# coding=utf-8
# Filename: h5tree.py
"""
Print the ROOT file structure.
Usage:
rtree FILE
rtree (-h | --help)
rtree --version
Options:
FILE Input file.
-h --help Show this screen.
"""
from __future__ import division, absolute_import, print_function
from km3pipe.io.root import open_rfile
_... | [
"docopt.docopt",
"km3pipe.io.root.open_rfile"
] | [((556, 573), 'km3pipe.io.root.open_rfile', 'open_rfile', (['rfile'], {}), '(rfile)\n', (566, 573), False, 'from km3pipe.io.root import open_rfile\n'), ((696, 711), 'docopt.docopt', 'docopt', (['__doc__'], {}), '(__doc__)\n', (702, 711), False, 'from docopt import docopt\n')] |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from psychopy.visual import Window, TextStim
from psychopy.core import wait, Clock, quit
from psychopy.event import clearEvents, waitKeys, Mouse
from psychopy.gui import Dlg
from time import... | [
"psychopy.event.waitKeys",
"psychopy.core.quit",
"random.shuffle",
"psychopy.visual.TextStim",
"psychopy.gui.Dlg",
"datetime.datetime.now",
"psychopy.event.clearEvents",
"codecs.open",
"time.gmtime",
"psychopy.visual.Window",
"psychopy.core.wait"
] | [((2414, 2437), 'psychopy.event.waitKeys', 'waitKeys', ([], {'keyList': "['b']"}), "(keyList=['b'])\n", (2422, 2437), False, 'from psychopy.event import clearEvents, waitKeys, Mouse\n'), ((2508, 2514), 'psychopy.core.quit', 'quit', ([], {}), '()\n', (2512, 2514), False, 'from psychopy.core import wait, Clock, quit\n'),... |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
... | [
"ansible.module_utils.basic.AnsibleModule",
"ansible.module_utils.network.f5.bigip.F5RestClient",
"ansible.module_utils.network.f5.icontrol.upload_file",
"ansible.module_utils.network.f5.common.F5ModuleError",
"time.sleep",
"os.path.split",
"ansible.module_utils.network.f5.icontrol.module_provisioned",
... | [((13811, 13953), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'spec.argument_spec', 'supports_check_mode': 'spec.supports_check_mode', 'mutually_exclusive': 'spec.mutually_exclusive'}), '(argument_spec=spec.argument_spec, supports_check_mode=spec.\n supports_check_mode, mutual... |
from tweepy import OAuthHandler, Stream, API
from tweepy.streaming import StreamListener
import json
import logging
import pymongo
import config
client = pymongo.MongoClient(host='mongo_container', port=27018)
db = client.tweets_db
auth = OAuthHandler(config.CONSUMER_API_KEY, config.CONSUMER_API_SECRET)
auth.set_acc... | [
"json.loads",
"tweepy.streaming.StreamListener",
"tweepy.API",
"logging.critical",
"pymongo.MongoClient",
"tweepy.OAuthHandler"
] | [((156, 211), 'pymongo.MongoClient', 'pymongo.MongoClient', ([], {'host': '"""mongo_container"""', 'port': '(27018)'}), "(host='mongo_container', port=27018)\n", (175, 211), False, 'import pymongo\n'), ((242, 307), 'tweepy.OAuthHandler', 'OAuthHandler', (['config.CONSUMER_API_KEY', 'config.CONSUMER_API_SECRET'], {}), '... |
"""
ASGI config for op_trans project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
from op_trans.websocket import webs... | [
"os.environ.setdefault",
"django.core.asgi.get_asgi_application",
"op_trans.websocket.websocket_application",
"op_trans.redis_cli.RedisCli.get"
] | [((379, 447), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""op_trans.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'op_trans.settings')\n", (400, 447), False, 'import os\n'), ((470, 492), 'django.core.asgi.get_asgi_application', 'get_asgi_application', ([], {}), '()\n', (490,... |
# -*- coding: utf-8 -*-
"""API routes config for notifai_recruitment project.
REST framework adds support for automatic URL routing to Django, and provides simple, quick and consistent
way of wiring view logic to a set of URLs.
For more information on this file, see
https://www.django-rest-framework.org/api-guide/rou... | [
"rest_framework.routers.DefaultRouter"
] | [((419, 442), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (440, 442), False, 'from rest_framework import routers\n')] |
import torch
import torch.nn as nn
class TorchModel(nn.ModuleList):
def __init__(self):
super(TorchModel, self).__init__()
self.linear_1 = nn.Linear(2, 12)
self.linear_2 = nn.Linear(12, 1)
def forward(self, x):
out = self.linear_1(x)
out = torch.tanh(out)
out = self.linear_2(out)
out = torch.sigmo... | [
"torch.tanh",
"torch.sigmoid",
"torch.nn.Linear"
] | [((145, 161), 'torch.nn.Linear', 'nn.Linear', (['(2)', '(12)'], {}), '(2, 12)\n', (154, 161), True, 'import torch.nn as nn\n'), ((180, 196), 'torch.nn.Linear', 'nn.Linear', (['(12)', '(1)'], {}), '(12, 1)\n', (189, 196), True, 'import torch.nn as nn\n'), ((258, 273), 'torch.tanh', 'torch.tanh', (['out'], {}), '(out)\n'... |
#!/usr/bin/env python3
#
# base.py
"""
Base functionality.
"""
#
# Copyright (c) 2020 <NAME> <<EMAIL>>
#
# Based on cyberpandas
# https://github.com/ContinuumIO/cyberpandas
# Copyright (c) 2018, Anaconda, Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted pr... | [
"numpy.unique",
"numpy.sort",
"numpy.asarray",
"numpy.isnan",
"numpy.concatenate",
"typing.TypeVar"
] | [((5872, 5885), 'typing.TypeVar', 'TypeVar', (['"""_A"""'], {}), "('_A')\n", (5879, 5885), False, 'from typing import Dict, Iterable, List, Optional, Sequence, SupportsFloat, Tuple, Type, TypeVar, Union, overload\n'), ((9643, 9675), 'typing.TypeVar', 'TypeVar', (['"""_F"""'], {'bound': '"""UserFloat"""'}), "('_F', boun... |
"""
view predication for point cloud,
Run valid_one_point_cloud first
"""
import torch
import numpy as np
import sys
import os
import pptk
# ------ Configurations ------
# path to pth file
pth_file = "../tmp/scene0015_00_vh_clean_2.pth.Random.100"
show_gt = False # show groundtruth or not; groudtruth draw ... | [
"numpy.array",
"torch.load",
"pptk.viewer"
] | [((2278, 2299), 'numpy.array', 'np.array', (['CLASS_COLOR'], {}), '(CLASS_COLOR)\n', (2286, 2299), True, 'import numpy as np\n'), ((2370, 2390), 'torch.load', 'torch.load', (['pth_file'], {}), '(pth_file)\n', (2380, 2390), False, 'import torch\n'), ((2987, 3018), 'pptk.viewer', 'pptk.viewer', (['coords', 'pred_color'],... |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0008_grow_owner'),
]
operations = [
migrations.CreateModel(
name='Measurement',
fields=[
... | [
"django.db.models.DateTimeField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.IntegerField"
] | [((340, 433), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (356, 433), False, 'from django.db import models, migrations\... |
import datetime
from django.conf import settings
from django.db import models
from django.utils import translation
import tower
from babel import Locale, numbers
from jingo import env
from jinja2.filters import do_dictsort
from tower import ugettext as _
import amo
from amo.fields import DecimalCharField
from amo.he... | [
"amo.utils.get_locale_from_lang",
"django.db.models.ForeignKey",
"babel.numbers.format_currency",
"django.utils.translation.to_locale",
"jingo.env.get_template",
"datetime.datetime.now",
"jinja2.filters.do_dictsort",
"tower.ugettext",
"mkt.prices.models.Refund.objects.safer_get_or_create",
"dateti... | [((635, 692), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""webapps.Addon"""'], {'blank': '(True)', 'null': '(True)'}), "('webapps.Addon', blank=True, null=True)\n", (652, 692), False, 'from django.db import models\n'), ((767, 829), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""inapp.InAppProduc... |
import datetime
from pymongo import MongoClient
import pymongo
import pprint
try:
db = MongoClient("mongodb://localhost:27017")["hkust"]
f=0.05
try:
print("Querying Documents...")
listOfCourseWithWaitingListSize = db.course.aggregate([
{ "$unwind": "$sections" },
# { "$project": { "newProduct": {"$multi... | [
"datetime.datetime.strptime",
"keras.models.Sequential",
"keras.layers.Dense",
"pymongo.MongoClient",
"numpy.loadtxt",
"time.time",
"pprint.pprint"
] | [((4443, 4493), 'numpy.loadtxt', 'numpy.loadtxt', (['trainingDataFilename'], {'delimiter': '""","""'}), "(trainingDataFilename, delimiter=',')\n", (4456, 4493), False, 'import numpy\n'), ((4702, 4714), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (4712, 4714), False, 'from keras.models import Sequential\n... |
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_all_tables():
db.create_all()
def initialize_db(app: Flask):
db.init_app(app)
db.app = app
from investing_algorithm_framework.core.models.order_status import OrderStatus
from investing_algorithm_framework.core... | [
"flask_sqlalchemy.SQLAlchemy"
] | [((70, 82), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', ([], {}), '()\n', (80, 82), False, 'from flask_sqlalchemy import SQLAlchemy\n')] |
from sqlalchemy import Column, Integer
from sqlalchemy import ForeignKey
from sqlalchemy.orm import declarative_base
from .base import Base
class RelSaleSizeProject(Base):
__tablename__ = 'rel_salesizes_projects'
id = Column(Integer, primary_key=True)
project_id = Column(Integer, ForeignKey('projects.id')... | [
"sqlalchemy.ForeignKey",
"sqlalchemy.Column"
] | [((228, 261), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (234, 261), False, 'from sqlalchemy import Column, Integer\n'), ((295, 320), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""projects.id"""'], {}), "('projects.id')\n", (305, 320), False, 'from sqlalchem... |
"""
jinja2content.py
----------------
DONT EDIT THIS FILE
Pelican plugin that processes Markdown files as jinja templates.
"""
from jinja2 import Environment, FileSystemLoader, ChoiceLoader
import os
from pelican import signals
from pelican.readers import MarkdownReader, HTMLReader, RstReader
from pelican.utils imp... | [
"pelican.utils.pelican_open",
"pelican.signals.readers_init.connect",
"os.path.join",
"jinja2.ChoiceLoader",
"os.unlink",
"tempfile.NamedTemporaryFile",
"jinja2.FileSystemLoader"
] | [((2193, 2233), 'pelican.signals.readers_init.connect', 'signals.readers_init.connect', (['add_reader'], {}), '(add_reader)\n', (2221, 2233), False, 'from pelican import signals\n'), ((824, 873), 'os.path.join', 'os.path.join', (["self.settings['THEME']", '"""templates"""'], {}), "(self.settings['THEME'], 'templates')\... |
from blacklist import BLACKLIST
from flask import Flask, jsonify
from flask_restful import Api
from resources.hotel import Hoteis, Hotel
from resources.user import User, UserLogin, UserLogout, UserRegister, Users
from resources.site import Site, Sites
from flask_jwt_extended import JWTManager
app = Flask(__name__)
ap... | [
"flask_jwt_extended.JWTManager",
"flask_restful.Api",
"flask.Flask",
"database.sql_alchemy.db.init_app",
"database.sql_alchemy.db.create_all",
"flask.jsonify"
] | [((302, 317), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (307, 317), False, 'from flask import Flask, jsonify\n'), ((542, 550), 'flask_restful.Api', 'Api', (['app'], {}), '(app)\n', (545, 550), False, 'from flask_restful import Api\n'), ((557, 572), 'flask_jwt_extended.JWTManager', 'JWTManager', (['app... |
import struct
import pycom
import time
from network import LoRa
def blink(seconds, rgb):
pycom.rgbled(rgb)
time.sleep(seconds)
pycom.rgbled(0x000000) # off
def setUSFrequencyPlan(lora):
""" Sets the frequency plan that matches the TTN gateway in the USA """
# remove all US915 channels
for c... | [
"struct.calcsize",
"time.sleep",
"struct.pack",
"pycom.rgbled",
"network.LoRa"
] | [((95, 112), 'pycom.rgbled', 'pycom.rgbled', (['rgb'], {}), '(rgb)\n', (107, 112), False, 'import pycom\n'), ((117, 136), 'time.sleep', 'time.sleep', (['seconds'], {}), '(seconds)\n', (127, 136), False, 'import time\n'), ((141, 156), 'pycom.rgbled', 'pycom.rgbled', (['(0)'], {}), '(0)\n', (153, 156), False, 'import pyc... |
import os
import json
STOPWORDS_JSON_PATH = os.path.join(
os.path.dirname(os.path.abspath(__file__)), os.pardir, "corpora/stopwords.json"
)
with open(STOPWORDS_JSON_PATH, "r", encoding="utf-8") as f:
STOPWORD = json.load(f)["stopwords"]
| [
"os.path.abspath",
"json.load"
] | [((79, 104), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (94, 104), False, 'import os\n'), ((221, 233), 'json.load', 'json.load', (['f'], {}), '(f)\n', (230, 233), False, 'import json\n')] |
import argparse
import os
import shutil
from tqdm import tqdm
import logging
from src.utils.common import read_yaml, create_directories
import random
from src.utils.model import log_model_summary
import tensorflow as tf
STAGE= "Base Model Creation"
logging.basicConfig(
filename=os.path.join("logs",'running_logs.... | [
"tensorflow.keras.layers.Conv2D",
"argparse.ArgumentParser",
"tensorflow.keras.Sequential",
"os.path.join",
"logging.exception",
"tensorflow.keras.optimizers.Adam",
"src.utils.model.log_model_summary",
"tensorflow.keras.layers.Dense",
"src.utils.common.create_directories",
"tensorflow.keras.layers... | [((471, 493), 'src.utils.common.read_yaml', 'read_yaml', (['config_path'], {}), '(config_path)\n', (480, 493), False, 'from src.utils.common import read_yaml, create_directories\n'), ((526, 555), 'logging.info', 'logging.info', (['"""Layer Defined"""'], {}), "('Layer Defined')\n", (538, 555), False, 'import logging\n')... |
# coding: utf-8
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Un... | [
"paddle.metric.Accuracy",
"paddle.Model",
"visualdl.LogWriter",
"importlib.import_module",
"paddle.nn.CrossEntropyLoss",
"os.path.join",
"config.parser_args",
"models.utils.validate",
"models.utils.seed_paddle",
"paddle.load",
"models.utils.train_per_epoch",
"paddle.callbacks.VisualDL",
"pad... | [((1497, 1528), 'visualdl.LogWriter', 'LogWriter', ([], {'logdir': 'args.save_dir'}), '(logdir=args.save_dir)\n', (1506, 1528), False, 'from visualdl import LogWriter\n'), ((1546, 1567), 'paddle.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1565, 1567), True, 'import paddle.nn as nn\n'), ((3438, 3483)... |
import os
from . import common
import cv2
import numpy as np
import imageio
import torch
import torch.utils.data as data
class Video(data.Dataset):
def __init__(self, args, name='Video', train=False, benchmark=False):
self.args = args
self.name = name
self.scale = args.scale
self... | [
"os.path.basename",
"cv2.VideoCapture"
] | [((526, 557), 'cv2.VideoCapture', 'cv2.VideoCapture', (['args.dir_demo'], {}), '(args.dir_demo)\n', (542, 557), False, 'import cv2\n'), ((471, 502), 'os.path.basename', 'os.path.basename', (['args.dir_demo'], {}), '(args.dir_demo)\n', (487, 502), False, 'import os\n')] |
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in... | [
"mogan.tests.unit.db.utils.get_test_server",
"mock.patch",
"mogan.objects.base.MoganObjectRegistry.register_notification_objects",
"oslo_versionedobjects.fixture.ObjectVersionChecker",
"mock.MagicMock",
"mogan.objects.fields.StringField",
"mock.Mock",
"mogan.objects.fields.IntegerField",
"mogan.noti... | [((1067, 1110), 'mogan.objects.base.MoganObjectRegistry.register_if', 'base.MoganObjectRegistry.register_if', (['(False)'], {}), '(False)\n', (1103, 1110), False, 'from mogan.objects import base\n'), ((1359, 1402), 'mogan.objects.base.MoganObjectRegistry.register_if', 'base.MoganObjectRegistry.register_if', (['(False)'... |
from plash.eval import eval, register_macro, shell_escape_args
@register_macro()
def defpm(name, *lines):
'define a new package manager'
@register_macro(name, group='package managers')
@shell_escape_args
def package_manager(*packages):
if not packages:
return
sh_packages =... | [
"plash.eval.eval",
"plash.eval.register_macro"
] | [((66, 82), 'plash.eval.register_macro', 'register_macro', ([], {}), '()\n', (80, 82), False, 'from plash.eval import eval, register_macro, shell_escape_args\n'), ((546, 1074), 'plash.eval.eval', 'eval', (["[['defpm', 'apt', 'apt-get update', 'apt-get install -y {}'], ['defpm',\n 'add-apt-repository', 'apt-get insta... |
import os
import re
import shutil
import unittest
from pathlib import Path
from dianna.visualization.text import highlight_text
class Example1:
original_text = 'Doloremque aliquam totam ut. Aspernatur repellendus autem quia deleniti. Natus accusamus ' \
'doloribus et in quam officiis veniam et... | [
"re.split",
"dianna.visualization.text.highlight_text",
"pathlib.Path",
"os.mkdir",
"shutil.rmtree"
] | [((3723, 3744), 're.split', 're.split', (['regex', 'text'], {}), '(regex, text)\n', (3731, 3744), False, 'import re\n'), ((1716, 1790), 'dianna.visualization.text.highlight_text', 'highlight_text', (['Example1.explanation'], {'original_text': 'Example1.original_text'}), '(Example1.explanation, original_text=Example1.or... |
import os
import sys
import builtins
import versioneer
if sys.version_info[:2] < (3, 7):
raise RuntimeError("Python version >= 3.7 required.")
builtins.__RBC_SETUP__ = True
if os.path.exists('MANIFEST'):
os.remove('MANIFEST')
CONDA_BUILD = int(os.environ.get('CONDA_BUILD', '0'))
CONDA_ENV = os.environ.get('... | [
"os.path.exists",
"sys.path.insert",
"setuptools.find_packages",
"os.environ.get",
"setuptools.setup",
"versioneer.get_version",
"os.getcwd",
"os.chdir",
"os.path.abspath",
"versioneer.get_cmdclass",
"os.remove"
] | [((183, 209), 'os.path.exists', 'os.path.exists', (['"""MANIFEST"""'], {}), "('MANIFEST')\n", (197, 209), False, 'import os\n'), ((215, 236), 'os.remove', 'os.remove', (['"""MANIFEST"""'], {}), "('MANIFEST')\n", (224, 236), False, 'import os\n'), ((256, 290), 'os.environ.get', 'os.environ.get', (['"""CONDA_BUILD"""', '... |
"""
Schedule adjustments are functions that accept a `datetime` and modify it in some way.
Adjustments have the signature `Callable[[datetime], datetime]`.
"""
from datetime import datetime, timedelta
from typing import Callable
import pendulum
import prefect.schedules.filters
def add(interval: timedelta) -> Calla... | [
"pendulum.instance"
] | [((1002, 1023), 'pendulum.instance', 'pendulum.instance', (['dt'], {}), '(dt)\n', (1019, 1023), False, 'import pendulum\n'), ((633, 654), 'pendulum.instance', 'pendulum.instance', (['dt'], {}), '(dt)\n', (650, 654), False, 'import pendulum\n')] |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import math
def f(x):
return math.exp(x)/x**3
def int(a,b):
h = (b-a)/104
x_par = a+h
x_impar = a+2*h
soma_par = 0
soma_impar = 0
for i in range(52):
soma_par += f(x_par)
x_par += 2*h
for i in range(51):
soma_impar... | [
"math.exp"
] | [((82, 93), 'math.exp', 'math.exp', (['x'], {}), '(x)\n', (90, 93), False, 'import math\n')] |