code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from django.shortcuts import render, redirect, reverse
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.core.paginator import Paginator
from hknweb.utils import markdownify
from hknweb.utils import allow_public_access
from hknweb.events.constants import (
ACCESSLEVEL_... | [
"django.shortcuts.render",
"hknweb.utils.get_access_level",
"hknweb.utils.markdownify",
"hknweb.events.models.AttendanceForm.objects.filter",
"hknweb.events.models.Rsvp.objects.filter",
"django.contrib.messages.warning",
"django.shortcuts.get_object_or_404",
"django.shortcuts.redirect",
"django.shor... | [((727, 758), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Event'], {'pk': 'id'}), '(Event, pk=id)\n', (744, 758), False, 'from django.shortcuts import get_object_or_404\n'), ((1495, 1527), 'hknweb.events.models.Rsvp.objects.filter', 'Rsvp.objects.filter', ([], {'event': 'event'}), '(event=event)\n', (... |
import sys
import numpy as np
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D # NOQA
import seaborn # NOQA
from spherecluster import sample_vMF
plt.ion()
n_clusters = 3
mus = np.random.randn(3, n_clusters)
mus, r = np.linalg.qr(mus, mode='reduced')
kappas = [15, 15, 15]
num_points_per... | [
"numpy.linalg.qr",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.ion",
"matplotlib.pyplot.axis",
"numpy.random.randn",
"spherecluster.sample_vMF",
"matplotlib.pyplot.show"
] | [((177, 186), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (184, 186), True, 'from matplotlib import pyplot as plt\n'), ((209, 239), 'numpy.random.randn', 'np.random.randn', (['(3)', 'n_clusters'], {}), '(3, n_clusters)\n', (224, 239), True, 'import numpy as np\n'), ((249, 282), 'numpy.linalg.qr', 'np.linalg.q... |
from ariadne import MutationType
from datetime import datetime as dt
from models.scope import Scope
from schemas.helpers.normalize import change_keys
from schemas.scope import ScopeCreate
mutations_resolvers = MutationType()
@mutations_resolvers.field("scopeCreate")
async def resolve_scope_create(_, info, scope) ->... | [
"schemas.scope.ScopeCreate",
"models.scope.Scope.get_instance",
"ariadne.MutationType"
] | [((212, 226), 'ariadne.MutationType', 'MutationType', ([], {}), '()\n', (224, 226), False, 'from ariadne import MutationType\n'), ((344, 364), 'models.scope.Scope.get_instance', 'Scope.get_instance', ([], {}), '()\n', (362, 364), False, 'from models.scope import Scope\n'), ((376, 443), 'schemas.scope.ScopeCreate', 'Sco... |
from typing import Optional
import napari
import napari.layers
import numpy as np
from napari.utils.geometry import project_point_onto_plane
def point_in_bounding_box(point: np.ndarray, bounding_box: np.ndarray) -> bool:
"""Determine whether an nD point is inside an nD bounding box.
Parameters
---------... | [
"numpy.atleast_2d",
"numpy.cross",
"numpy.asarray",
"numpy.any",
"numpy.squeeze",
"numpy.array",
"numpy.zeros",
"numpy.einsum",
"numpy.empty",
"numpy.cos",
"numpy.sin",
"numpy.all",
"napari.utils.geometry.project_point_onto_plane"
] | [((1901, 1922), 'numpy.atleast_2d', 'np.atleast_2d', (['vector'], {}), '(vector)\n', (1914, 1922), True, 'import numpy as np\n'), ((2006, 2030), 'numpy.array', 'np.array', (['start_position'], {}), '(start_position)\n', (2014, 2030), True, 'import numpy as np\n'), ((2050, 2072), 'numpy.array', 'np.array', (['end_positi... |
# 分析黑魔法防御课界面
import cv2
import sys
sys.path.append(r"C:\\Users\\SAT") # 添加自定义包的路径
from UniversalAutomaticAnswer.conf.confImp import get_yaml_file
from UniversalAutomaticAnswer.screen.screenImp import ScreenImp # 加入自定义包
from UniversalAutomaticAnswer.ocr.ocrImp import OCRImp
from UniversalAutomaticAnswer.util.filter imp... | [
"UniversalAutomaticAnswer.ocr.ocrImp.OCRImp",
"UniversalAutomaticAnswer.conf.confImp.get_yaml_file",
"win32api.SetCursorPos",
"time.sleep",
"win32api.mouse_event",
"UniversalAutomaticAnswer.screen.screenImp.ScreenImp",
"UniversalAutomaticAnswer.util.filter.filterLine",
"sys.path.append",
"random.ran... | [((36, 73), 'sys.path.append', 'sys.path.append', (['"""C:\\\\\\\\Users\\\\\\\\SAT"""'], {}), "('C:\\\\\\\\Users\\\\\\\\SAT')\n", (51, 73), False, 'import sys\n'), ((452, 476), 'UniversalAutomaticAnswer.conf.confImp.get_yaml_file', 'get_yaml_file', (['conf_path'], {}), '(conf_path)\n', (465, 476), False, 'from Universa... |
import datetime
import uuid
from typing import Optional
from models.base import CustomBaseModel
class ConvertVideoIn(CustomBaseModel):
source_path: str
destination_path: str
resolution: str
codec_name: Optional[str] = None
display_aspect_ratio: Optional[str] = None
fps: Optional[int] = None
... | [
"datetime.datetime.now",
"uuid.uuid4"
] | [((383, 395), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (393, 395), False, 'import uuid\n'), ((432, 455), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (453, 455), False, 'import datetime\n')] |
import logging
import uuid
from typing import Iterable
import numpy as np
import pyaudio
from cltl.backend.api.util import raw_frames_to_np
from cltl.backend.spi.audio import AudioSource
logger = logging.getLogger(__name__)
class PyAudioSource(AudioSource):
BUFFER = 8
def __init__(self, rate, channels, fr... | [
"logging.getLogger",
"cltl.backend.api.util.raw_frames_to_np",
"pyaudio.PyAudio",
"uuid.uuid4"
] | [((199, 226), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (216, 226), False, 'import logging\n'), ((495, 512), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ([], {}), '()\n', (510, 512), False, 'import pyaudio\n'), ((673, 739), 'cltl.backend.api.util.raw_frames_to_np', 'raw_frames_to_np', (['s... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
# Copyright 2017 ROBOTIS CO., LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a cop... | [
"sys.stdin.fileno",
"termios.tcsetattr",
"msvcrt.getch",
"termios.tcgetattr",
"sys.stdin.read"
] | [((1293, 1311), 'sys.stdin.fileno', 'sys.stdin.fileno', ([], {}), '()\n', (1309, 1311), False, 'import sys, tty, termios\n'), ((1331, 1352), 'termios.tcgetattr', 'termios.tcgetattr', (['fd'], {}), '(fd)\n', (1348, 1352), False, 'import sys, tty, termios\n'), ((1443, 1460), 'sys.stdin.read', 'sys.stdin.read', (['(1)'], ... |
import os
import argparse
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm
def plot_1d(X_train, Y_train, X_test, Y_test, mean=None, std=None, str_figure=None, show_fig=True):
plt.rc('text', usetex=True)
fig = plt.figure(figsize=(8, 6))
ax = fig.gca()
ax.plot(X_test, Y_... | [
"os.path.exists",
"numpy.abs",
"argparse.ArgumentParser",
"numpy.log",
"os.path.join",
"numpy.max",
"matplotlib.pyplot.close",
"matplotlib.pyplot.figure",
"scipy.stats.norm.logpdf",
"matplotlib.pyplot.tight_layout",
"numpy.min",
"os.mkdir",
"matplotlib.pyplot.rc",
"matplotlib.pyplot.show"
... | [((212, 239), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (218, 239), True, 'import matplotlib.pyplot as plt\n'), ((251, 277), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(8, 6)'}), '(figsize=(8, 6))\n', (261, 277), True, 'import matplotlib.pyplot... |
import random
import cocos
from cocos.tiles import TileSet, RectCell, RectMapLayer
from cocos.director import director
from cocos.layer.scrolling import ScrollingManager
import pyglet
from game import Game
from views import WorldMap, CharacterView2
class MainLayer(cocos.layer.Layer):
is_event_handler = True
... | [
"cocos.scene.Scene",
"random.Random",
"cocos.director.director.run",
"views.CharacterView2",
"cocos.director.director.init",
"views.WorldMap",
"game.Game",
"cocos.layer.scrolling.ScrollingManager",
"cocos.director.director.set_show_FPS"
] | [((977, 1047), 'cocos.director.director.init', 'director.init', ([], {'width': '(800)', 'height': '(600)', 'resizable': '(False)', 'autoscale': '(False)'}), '(width=800, height=600, resizable=False, autoscale=False)\n', (990, 1047), False, 'from cocos.director import director\n'), ((1052, 1079), 'cocos.director.directo... |
from flask import Blueprint
blueprint = Blueprint('board', __name__)
from rboard.board import routes
| [
"flask.Blueprint"
] | [((41, 69), 'flask.Blueprint', 'Blueprint', (['"""board"""', '__name__'], {}), "('board', __name__)\n", (50, 69), False, 'from flask import Blueprint\n')] |
from src.commons.big_query.copy_job_async.result_check.result_check_request import \
ResultCheckRequest
from src.commons.big_query.copy_job_async.task_creator import TaskCreator
class BigQueryJobReference(object):
def __init__(self, project_id, job_id, location):
self.project_id = project_id
s... | [
"src.commons.big_query.copy_job_async.result_check.result_check_request.ResultCheckRequest"
] | [((1021, 1286), 'src.commons.big_query.copy_job_async.result_check.result_check_request.ResultCheckRequest', 'ResultCheckRequest', ([], {'task_name_suffix': 'copy_job_request.task_name_suffix', 'copy_job_type_id': 'copy_job_request.copy_job_type_id', 'job_reference': 'self', 'retry_count': 'copy_job_request.retry_count... |
from django.contrib import admin, messages
from django.shortcuts import render
from django.utils.translation import gettext_lazy as _
from inline_actions.actions import DefaultActionsMixin, ViewAction
from inline_actions.admin import InlineActionsMixin, InlineActionsModelAdminMixin
from . import forms
from .models im... | [
"django.shortcuts.render",
"django.contrib.admin.register",
"django.utils.translation.gettext_lazy"
] | [((3830, 3857), 'django.contrib.admin.register', 'admin.register', (['AuthorProxy'], {}), '(AuthorProxy)\n', (3844, 3857), False, 'from django.contrib import admin, messages\n'), ((4047, 4069), 'django.contrib.admin.register', 'admin.register', (['Author'], {}), '(Author)\n', (4061, 4069), False, 'from django.contrib i... |
"""Commands module common setup."""
from importlib import import_module
from typing import Sequence
def available_commands():
"""Index available commands."""
return [
{"name": "help", "summary": "Print available commands"},
{"name": "provision", "summary": "Provision an agent"},
{"nam... | [
"importlib.import_module"
] | [((802, 828), 'importlib.import_module', 'import_module', (['module_path'], {}), '(module_path)\n', (815, 828), False, 'from importlib import import_module\n')] |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.15.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x02\x05\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
... | [
"PyQt5.QtCore.qVersion",
"PyQt5.QtCore.qUnregisterResourceData",
"PyQt5.QtCore.qRegisterResourceData"
] | [((3776, 3877), 'PyQt5.QtCore.qRegisterResourceData', 'QtCore.qRegisterResourceData', (['rcc_version', 'qt_resource_struct', 'qt_resource_name', 'qt_resource_data'], {}), '(rcc_version, qt_resource_struct,\n qt_resource_name, qt_resource_data)\n', (3804, 3877), False, 'from PyQt5 import QtCore\n'), ((3907, 4010), 'P... |
#!/usr/bin/env python
"""
Given one or more DCC experiment IDs, looks at all read2s that were submitted and updates each r2 file
object such that it's paired_with property points to the correct r1. This works by looking at the aliases
in the r2 file object to see if there is one with _R2_001 in it. If so, it sets pair... | [
"encode_utils.connection.Connection",
"argparse.ArgumentParser",
"re.compile"
] | [((615, 659), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (638, 659), False, 'import argparse\n'), ((829, 851), 'encode_utils.connection.Connection', 'euc.Connection', (['"""prod"""'], {}), "('prod')\n", (843, 851), True, 'import encode_utils.connec... |
import copy
import pytest
from river import utils
from river import ensemble
estimator = ensemble.SRPClassifier(
n_models=3, # Smaller ensemble than the default to avoid bottlenecks
seed=42)
@pytest.mark.parametrize('estimator, check', [
pytest.param(
estimator,
check,
id=f'{... | [
"river.utils.estimator_checks.yield_checks",
"river.ensemble.SRPClassifier",
"pytest.param",
"copy.deepcopy"
] | [((93, 136), 'river.ensemble.SRPClassifier', 'ensemble.SRPClassifier', ([], {'n_models': '(3)', 'seed': '(42)'}), '(n_models=3, seed=42)\n', (115, 136), False, 'from river import ensemble\n'), ((618, 642), 'copy.deepcopy', 'copy.deepcopy', (['estimator'], {}), '(estimator)\n', (631, 642), False, 'import copy\n'), ((258... |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"The core logic of how plugins integrate with `popen_nspawn`"
import functools
import subprocess
from contextlib impor... | [
"functools.partial"
] | [((1594, 1646), 'functools.partial', 'functools.partial', (['p.wrap_setup_subvol', 'setup_subvol'], {}), '(p.wrap_setup_subvol, setup_subvol)\n', (1611, 1646), False, 'import functools\n'), ((1704, 1742), 'functools.partial', 'functools.partial', (['p.wrap_setup', 'setup'], {}), '(p.wrap_setup, setup)\n', (1721, 1742),... |
# -*- coding: utf-8 -*-
import time
import pandas as pd
# self-made
import manage_mysql
def horseDB(table,search):
con = manage_mysql.connect()
c = con.cursor()
column=[]
value=[]
for i in range(len(search)):
if i%2 == 0:
column.append(search[i])
else:
va... | [
"pandas.read_sql",
"manage_mysql.connect",
"time.time"
] | [((129, 151), 'manage_mysql.connect', 'manage_mysql.connect', ([], {}), '()\n', (149, 151), False, 'import manage_mysql\n'), ((524, 545), 'pandas.read_sql', 'pd.read_sql', (['sql', 'con'], {}), '(sql, con)\n', (535, 545), True, 'import pandas as pd\n'), ((4468, 4490), 'manage_mysql.connect', 'manage_mysql.connect', ([]... |
"""
Code for working with data.
In-memory format (as a list):
- board: Tensor (8, 8, 2) [bool; one-hot]
- move: Tensor (64,) [bool; one-hot]
- value: Tensor () [float32]
On-disk format (to save space and quicken loading):
- board: int64
- move: int64
- value: float32
"""
from typing import Dict, Tuple
import ... | [
"tensorflow.one_hot",
"tensorflow.io.parse_single_example",
"tensorflow.train.Int64List",
"tensorflow.range",
"tensorflow.train.Features",
"tensorflow.io.FixedLenFeature",
"tensorflow.train.FloatList",
"tensorflow.reshape",
"tensorflow.expand_dims",
"tensorflow.stack"
] | [((441, 477), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[2]', 'tf.int64'], {}), '([2], tf.int64)\n', (462, 477), True, 'import tensorflow as tf\n'), ((491, 526), 'tensorflow.io.FixedLenFeature', 'tf.io.FixedLenFeature', (['[]', 'tf.int64'], {}), '([], tf.int64)\n', (512, 526), True, 'import tensorflo... |
from typing import List
import requests
from pathlib import Path
from datetime import date, datetime
from bs4 import BeautifulSoup
from helper.classes import Channel, Program
from helper.utils import get_channel_by_name, get_epg_datetime
TIMEZONE_OFFSET = "+0800"
PROGRAM_URL = "https://epg.beinsports.com/utctime_id.ph... | [
"helper.classes.Channel",
"pathlib.Path",
"helper.utils.get_epg_datetime",
"requests.get",
"bs4.BeautifulSoup",
"datetime.datetime.now",
"datetime.date.today"
] | [((833, 845), 'datetime.date.today', 'date.today', ([], {}), '()\n', (843, 845), False, 'from datetime import date, datetime\n'), ((1278, 1323), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text'], {'features': '"""html.parser"""'}), "(r.text, features='html.parser')\n", (1291, 1323), False, 'from bs4 import BeautifulSou... |
#! /usr/bin/env python3
"""Converts cpplint output to JUnit XML format."""
import argparse
import collections
import os
import re
import sys
from typing import Dict, List
from xml.etree import ElementTree
from exitstatus import ExitStatus
class CpplintError(object):
def __init__(self, file: str, line: int, mes... | [
"argparse.ArgumentParser",
"xml.etree.ElementTree.Element",
"xml.etree.ElementTree.ElementTree",
"collections.defaultdict",
"os.path.relpath",
"re.search"
] | [((656, 744), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Converts cpplint output to JUnit XML format."""'}), "(description=\n 'Converts cpplint output to JUnit XML format.')\n", (679, 744), False, 'import argparse\n'), ((2053, 2085), 'xml.etree.ElementTree.Element', 'ElementTree.E... |
import unittest
import requests_mock
from werkzeug.test import EnvironBuilder
from werkzeug.wrappers import Request
from perimeterx import px_constants
from perimeterx.px_config import PxConfig
from perimeterx.px_context import PxContext
from perimeterx.px_proxy import PxProxy
class Test_PXProxy(unittest.TestCase):... | [
"perimeterx.px_context.PxContext",
"requests_mock.mock",
"perimeterx.px_config.PxConfig",
"werkzeug.test.EnvironBuilder",
"perimeterx.px_proxy.PxProxy",
"werkzeug.wrappers.Request"
] | [((1201, 1221), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (1219, 1221), False, 'import requests_mock\n'), ((2137, 2157), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (2155, 2157), False, 'import requests_mock\n'), ((3304, 3324), 'requests_mock.mock', 'requests_mock.mock', ([], {}), ... |
import collections
import functools
import json
import logging
import multiprocessing
import os
import time
from collections import OrderedDict
from queue import PriorityQueue, Empty
from typing import List, Tuple, Any
from itertools import cycle, islice
import minerl.herobraine.env_spec
from minerl.herobraine.hero imp... | [
"logging.getLogger",
"numpy.clip",
"numpy.asanyarray",
"numpy.array",
"copy.deepcopy",
"minerl.data.version.assert_prefix",
"os.listdir",
"os.path.isdir",
"minerl.data.util.forever",
"collections.OrderedDict",
"os.path.isfile",
"cv2.cvtColor",
"itertools.islice",
"gym.envs.registration.spe... | [((393, 420), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (410, 420), False, 'import logging\n'), ((1962, 2006), 'multiprocessing.Pool', 'multiprocessing.Pool', (['self.number_of_workers'], {}), '(self.number_of_workers)\n', (1982, 2006), False, 'import multiprocessing\n'), ((4271, 431... |
# Copyright 2021 MosaicML. All Rights Reserved.
"""The CPU device used for training."""
from __future__ import annotations
import logging
from contextlib import contextmanager
from typing import Any, Dict, Generator, TypeVar, Union
import torch
from composer.core import Precision
from composer.trainer.devices.devi... | [
"logging.getLogger",
"composer.core.Precision",
"torch.device",
"typing.TypeVar"
] | [((359, 386), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (376, 386), False, 'import logging\n'), ((426, 470), 'typing.TypeVar', 'TypeVar', (['"""T_nnModule"""'], {'bound': 'torch.nn.Module'}), "('T_nnModule', bound=torch.nn.Module)\n", (433, 470), False, 'from typing import Any, Dict,... |
import os
import re
import json
import ast
import csv
import sys
import shutil
# ["allreduce-lambf16", "reducescatter-lamb-allgatherf16", "test-lambf16"] + \
# ["allreduce-adamf16", "reducescatter-adam-allgatherf16", "test-adamf16"] +\
all_binaries = ["adam-ar-c", "adam-rs-c-ag", "adam-fuse-rs-c-ag"] + \
["lamb-... | [
"os.listdir",
"os.path.join",
"ast.literal_eval",
"os.path.isdir",
"re.findall"
] | [((1630, 1660), 're.findall', 're.findall', (['"""{.+}"""', 'stdout_txt'], {}), "('{.+}', stdout_txt)\n", (1640, 1660), False, 'import re\n'), ((2585, 2616), 'os.path.join', 'os.path.join', (['_dir', '"""json.json"""'], {}), "(_dir, 'json.json')\n", (2597, 2616), False, 'import os\n'), ((4062, 4085), 'os.listdir', 'os.... |
from typing import Type, TypeVar, MutableMapping, Any, Iterable
from datapipelines import DataSource, DataSink, PipelineContext, Query, validate_query
from cassiopeia_championgg.dto import ChampionGGStatsListDto, ChampionGGStatsDto
from cassiopeia.datastores.uniquekeys import convert_region_to_platform
from .common i... | [
"typing.TypeVar",
"cassiopeia_championgg.dto.ChampionGGStatsListDto",
"cassiopeia_championgg.dto.ChampionGGStatsDto",
"datapipelines.validate_query",
"datapipelines.Query.has"
] | [((351, 363), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (358, 363), False, 'from typing import Type, TypeVar, MutableMapping, Any, Iterable\n'), ((1234, 1319), 'datapipelines.validate_query', 'validate_query', (['_validate_get_gg_champion_list_query', 'convert_region_to_platform'], {}), '(_validate_get... |
"""
Tests for pika.frame
"""
try:
import unittest2 as unittest
except ImportError:
import unittest
from pika import exceptions
from pika import frame
from pika import spec
class FrameTests(unittest.TestCase):
BASIC_ACK = ('\x01\x00\x01\x00\x00\x00\r\x00<\x00P\x00\x00\x00\x00\x00\x00'
'... | [
"pika.frame.ProtocolHeader",
"pika.spec.Basic.Ack",
"pika.spec.BasicProperties",
"pika.frame.Body",
"pika.frame.Frame",
"pika.frame.decode_frame",
"pika.frame.Heartbeat"
] | [((756, 778), 'pika.frame.Frame', 'frame.Frame', (['(655371)', '(1)'], {}), '(655371, 1)\n', (767, 778), False, 'from pika import frame\n'), ((1271, 1308), 'pika.frame.Body', 'frame.Body', (['(1)', '"""I like it that sound"""'], {}), "(1, 'I like it that sound')\n", (1281, 1308), False, 'from pika import frame\n'), ((1... |
import logging
from datetime import datetime, timedelta
import requests
from core.utils.customClasses import UserFilter
from core.utils.default_responses import (api_accepted_202,
api_bad_request_400,
api_block_by_policy_451,
... | [
"django.contrib.auth.authenticate",
"core.utils.default_responses.api_bad_request_400",
"requests.post",
"apps.blog.serializers.PostGetShortSerializers",
"core.utils.func.create_ref_link",
"core.utils.default_responses.api_block_by_policy_451",
"core.utils.default_responses.api_payment_required_402",
... | [((1350, 1389), 'requests.post', 'requests.post', (['post_url'], {'data': 'post_data'}), '(post_url, data=post_data)\n', (1363, 1389), False, 'import requests\n'), ((1435, 1452), 'rest_framework.response.Response', 'Response', (['content'], {}), '(content)\n', (1443, 1452), False, 'from rest_framework.response import R... |
"""
The system RoBERTa trains on the AGB dataset with softmax loss function.
At every 1000 training steps, the model is evaluated on the AGB dev set.
"""
from torch.utils.data import DataLoader
from sentence_transformers import models, losses
from sentence_transformers import SentencesDataset, LoggingHandler, Sentence... | [
"os.listdir",
"sentence_transformers.SentenceTransformer",
"sentence_transformers.SentencesDataset",
"os.path.join",
"sentence_transformers.LoggingHandler",
"os.path.isfile",
"torch.utils.data.DataLoader",
"sentence_transformers.evaluation.LabelGenerationEvaluator"
] | [((914, 947), 'os.path.join', 'os.path.join', (['root_dir', 'f"""run{i}"""'], {}), "(root_dir, f'run{i}')\n", (926, 947), False, 'import os\n'), ((976, 995), 'os.listdir', 'os.listdir', (['run_dir'], {}), '(run_dir)\n', (986, 995), False, 'import os\n'), ((1017, 1049), 'os.path.join', 'os.path.join', (['run_dir', 'mode... |
import ipywidgets as widgets
from traitlets import Unicode, Int, validate
import os
import json
from datetime import datetime,timedelta
from IPython.display import Javascript
from IPython.display import HTML
from cognipy.ontology import Ontology
from IPython.display import clear_output
_JS_initialized = False
def _In... | [
"os.path.exists",
"ipywidgets.VBox",
"functools.reduce",
"ipywidgets.Output",
"IPython.display.clear_output",
"traitlets.Int",
"ipywidgets.Layout",
"os.path.abspath",
"IPython.display.HTML",
"traitlets.Unicode",
"cognipy.ontology.Ontology"
] | [((1833, 1849), 'ipywidgets.Output', 'widgets.Output', ([], {}), '()\n', (1847, 1849), True, 'import ipywidgets as widgets\n'), ((4196, 4212), 'ipywidgets.Output', 'widgets.Output', ([], {}), '()\n', (4210, 4212), True, 'import ipywidgets as widgets\n'), ((4277, 4327), 'ipywidgets.VBox', 'widgets.VBox', (['[e, w, o]'],... |
import subprocess
import os
from glim.core import Facade
from glim import Log
from glim import paths
DEFAULT_CONFIG = {
'source': os.path.join(paths.APP_PATH, 'assets/js'),
}
class JSLint(object):
def __init__(self, config):
self.config = DEFAULT_CONFIG
for key, value in config.items():
self.config[key] = ... | [
"glim.Log.write",
"subprocess.Popen",
"os.path.join",
"glim.Log.debug",
"glim.Log.error",
"glim.Log.info"
] | [((134, 175), 'os.path.join', 'os.path.join', (['paths.APP_PATH', '"""assets/js"""'], {}), "(paths.APP_PATH, 'assets/js')\n", (146, 175), False, 'import os\n'), ((328, 347), 'glim.Log.debug', 'Log.debug', (['"""config"""'], {}), "('config')\n", (337, 347), False, 'from glim import Log\n'), ((454, 488), 'glim.Log.debug'... |
import os
import pandas as pd
import numpy as np
import torch
from torchvision import transforms
from torch.utils.data import Dataset
import matplotlib.pyplot as plt
from skimage import io
import pdb
class FrameDataset(Dataset):
def __init__(self, csv_file, train_dir):
self.labels = pd.read_csv(csv_file)
... | [
"torchvision.transforms.ToPILImage",
"pandas.read_csv",
"os.path.join",
"skimage.io.imread",
"torchvision.transforms.Normalize",
"torchvision.transforms.Resize",
"torchvision.transforms.ToTensor",
"matplotlib.pyplot.show"
] | [((297, 318), 'pandas.read_csv', 'pd.read_csv', (['csv_file'], {}), '(csv_file)\n', (308, 318), True, 'import pandas as pd\n'), ((695, 813), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[-0.485 / 0.229, -0.456 / 0.224, -0.406 / 0.225]', 'std': '[1 / 0.229, 1 / 0.224, 1 / 0.225]'}), '(mean=... |
# Authors: <NAME>
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
name = 'pairwise_theano_broadcast_' + dtype
rval = theano.fu... | [
"theano.tensor.sqr",
"theano.Out",
"theano.tensor.dot"
] | [((359, 389), 'theano.Out', 'theano.Out', (['dists'], {'borrow': '(True)'}), '(dists, borrow=True)\n', (369, 389), False, 'import theano\n'), ((765, 795), 'theano.Out', 'theano.Out', (['dists'], {'borrow': '(True)'}), '(dists, borrow=True)\n', (775, 795), False, 'import theano\n'), ((204, 229), 'theano.tensor.sqr', 'TT... |
# -*- coding: utf-8 -*-
# Author: <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
import pytest
from renormalizer.mps import Mps, Mpo, MpDm, ThermalProp
from renormalizer.mps.backend import np
from renormalizer.tests.parameter import holstein_model
from renormalizer.utils import Quantity
creation_operator = Mpo.onsite(... | [
"pytest.approx",
"renormalizer.mps.backend.np.allclose",
"renormalizer.mps.backend.np.zeros",
"renormalizer.mps.Mps.ground_state",
"renormalizer.utils.Quantity",
"renormalizer.mps.Mpo.onsite",
"renormalizer.mps.MpDm.max_entangled_gs",
"renormalizer.mps.ThermalProp"
] | [((309, 388), 'renormalizer.mps.Mpo.onsite', 'Mpo.onsite', (['holstein_model', '"""a^\\\\dagger"""'], {'dof_set': '{holstein_model.mol_num // 2}'}), "(holstein_model, 'a^\\\\dagger', dof_set={holstein_model.mol_num // 2})\n", (319, 388), False, 'from renormalizer.mps import Mps, Mpo, MpDm, ThermalProp\n'), ((447, 479),... |
# -*- coding: utf-8 -*-
import sys
import numpy as np
import torch
from torch.autograd import Variable
from pytorch2keras.converter import pytorch_to_keras
import torchvision
import os.path as osp
import os
os.environ['KERAS_BACKEND'] = 'tensorflow'
from keras import backend as K
K.clear_session()
K.set_image_dim_or... | [
"tensorflow.gfile.GFile",
"os.path.exists",
"os.listdir",
"tensorflow.Session",
"numpy.asarray",
"numpy.subtract",
"numpy.max",
"numpy.exp",
"tensorflow.python.keras.backend.get_session",
"tensorflow.GraphDef",
"keras.backend.clear_session",
"tensorflow.python.keras.models.load_model",
"nump... | [((284, 301), 'keras.backend.clear_session', 'K.clear_session', ([], {}), '()\n', (299, 301), True, 'from keras import backend as K\n'), ((302, 332), 'keras.backend.set_image_dim_ordering', 'K.set_image_dim_ordering', (['"""tf"""'], {}), "('tf')\n", (326, 332), True, 'from keras import backend as K\n'), ((828, 837), 'n... |
from typing import List, Union
import json
class Product:
def __init__(self, name: str, code: str, price: float):
self.name = name
self.code = code
self.price = price
# Breakdown coupon's description into quantifiable attributes
# For example: BOGO on coffee can be translated as an objec... | [
"json.dumps"
] | [((1689, 1748), 'json.dumps', 'json.dumps', (['self.basket_items'], {'default': '(lambda x: x.__dict__)'}), '(self.basket_items, default=lambda x: x.__dict__)\n', (1699, 1748), False, 'import json\n')] |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------... | [
"asyncio.get_running_loop",
"asyncio._get_running_loop"
] | [((402, 428), 'asyncio.get_running_loop', 'asyncio.get_running_loop', ([], {}), '()\n', (426, 428), False, 'import asyncio\n'), ((484, 511), 'asyncio._get_running_loop', 'asyncio._get_running_loop', ([], {}), '()\n', (509, 511), False, 'import asyncio\n')] |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by app... | [
"metagym.liftsim.environment.env.LiftSim",
"rule_benchmark.dispatcher.Rule_dispatcher",
"traceback.print_tb",
"sys.exc_info",
"copy.deepcopy",
"metagym.liftsim.environment.mansion.utils.ElevatorAction",
"traceback.extract_tb"
] | [((12809, 12828), 'metagym.liftsim.environment.env.LiftSim', 'LiftSim', (['configfile'], {}), '(configfile)\n', (12816, 12828), False, 'from metagym.liftsim.environment.env import LiftSim\n'), ((12438, 12458), 'copy.deepcopy', 'copy.deepcopy', (['state'], {}), '(state)\n', (12451, 12458), False, 'import copy\n'), ((129... |
import os
import wave
from array import array
from struct import pack
from sys import byteorder
import pyaudio
import soundfile
from .emotion_recognition import EmotionRecognizer
from .utils import get_best_estimators
THRESHOLD = 500
CHUNK_SIZE = 1024
FORMAT = pyaudio.paInt16
RATE = 16000
SILENCE = 30
def is_silen... | [
"wave.open",
"pyaudio.PyAudio",
"array.array",
"argparse.ArgumentParser"
] | [((564, 574), 'array.array', 'array', (['"""h"""'], {}), "('h')\n", (569, 574), False, 'from array import array\n'), ((1774, 1791), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ([], {}), '()\n', (1789, 1791), False, 'import pyaudio\n'), ((1974, 1984), 'array.array', 'array', (['"""h"""'], {}), "('h')\n", (1979, 1984), False, ... |
# coding=utf-8
import os
import json
# 获取最新模型预测数据文件夹
def get_latest_model_predict_data_dir(new_epochs_ckpt_dir=None):
# 获取文件下最新文件路径
def new_report(test_report):
lists = os.listdir(test_report) # 列出目录的下所有文件和文件夹保存到lists
lists.sort(key=lambda fn: os.path.getmtime(test_report + "/" + fn)) # 按时间排序... | [
"os.path.exists",
"os.listdir",
"os.makedirs",
"json.dumps",
"os.path.join",
"os.path.dirname",
"os.path.getmtime"
] | [((186, 209), 'os.listdir', 'os.listdir', (['test_report'], {}), '(test_report)\n', (196, 209), False, 'import os\n'), ((340, 376), 'os.path.join', 'os.path.join', (['test_report', 'lists[-1]'], {}), '(test_report, lists[-1])\n', (352, 376), False, 'import os\n'), ((733, 777), 'os.path.join', 'os.path.join', (['input_n... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id: InstructionTestGen.py $
"""
Instruction Test Generator.
"""
from __future__ import print_function;
__copyright__ = \
"""
Copyright (C) 2012-2017 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.... | [
"random.Random",
"os.urandom",
"io.open",
"sys.stderr.write",
"os.path.basename"
] | [((5119, 5147), 'random.Random', 'random.Random', (['g_iMyRandSeed'], {}), '(g_iMyRandSeed)\n', (5132, 5147), False, 'import random\n'), ((94670, 94743), 'sys.stderr.write', 'sys.stderr.write', (["('InstructionTestGen.py: Seed = %s\\n' % (g_iMyRandSeed,))"], {}), "('InstructionTestGen.py: Seed = %s\\n' % (g_iMyRandSeed... |
# -*- coding: utf-8 -*-
""".. moduleauthor:: <NAME>"""
import abc
from copy import copy
from dataclasses import dataclass
from multiprocessing.managers import SharedMemoryManager
from multiprocessing.shared_memory import SharedMemory
from typing import Tuple, List, Optional, final, TypeVar, Generic
from torch.utils.da... | [
"bann.b_data_functions.errors.custom_erors.KnownErrorBannData",
"copy.copy",
"numpy.array",
"multiprocessing.managers.SharedMemoryManager",
"numpy.ndarray",
"numpy.dtype",
"typing.TypeVar"
] | [((1243, 1259), 'typing.TypeVar', 'TypeVar', (['"""_TypD"""'], {}), "('_TypD')\n", (1250, 1259), False, 'from typing import Tuple, List, Optional, final, TypeVar, Generic\n'), ((509, 526), 'numpy.dtype', 'np.dtype', (['"""float"""'], {}), "('float')\n", (517, 526), True, 'import numpy as np\n'), ((3903, 3967), 'numpy.n... |
#!/bin/python3
import exploit
import ui_setup
from time import sleep
checkrain = exploit.Checkrain()
checkrain.REMOTE_SSH_CC = '<EMAIL>'
window = ui_setup.UI.window
keep_printing=True
while True:
if window['-OUTPUT-'].DisplayText.count('\n') >= 14:
window['-OUTPUT-'].DisplayText = window['-OUTPUT-']... | [
"exploit.Checkrain"
] | [((86, 105), 'exploit.Checkrain', 'exploit.Checkrain', ([], {}), '()\n', (103, 105), False, 'import exploit\n')] |
import os.path
import sys
import types
import typing
import unittest
from datetime import datetime, date
from functools import wraps
from io import BytesIO, StringIO
from typing import List, Tuple, Callable, Any, Optional, Union, Dict, Set, FrozenSet, NewType, TypeVar, Sequence, \
AbstractSet, Iterator, NamedTuple,... | [
"datetime.datetime",
"io.BytesIO",
"functools.wraps",
"typing.NewType",
"datetime.date",
"io.StringIO",
"typing.NamedTuple",
"typing.TypeVar"
] | [((24263, 24285), 'typing.NewType', 'NewType', (['"""UserId"""', 'int'], {}), "('UserId', int)\n", (24270, 24285), False, 'from typing import List, Tuple, Callable, Any, Optional, Union, Dict, Set, FrozenSet, NewType, TypeVar, Sequence, AbstractSet, Iterator, NamedTuple, Collection, Type, Generator, Generic, BinaryIO, ... |
# Generated by Django 2.1.7 on 2019-07-06 04:48
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('jobs', '0004_auto_20190706_0012'),
]
operations = [
migrations.AddField(
model_name='job',
... | [
"django.db.models.DateTimeField"
] | [((355, 431), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'django.utils.timezone.now', 'verbose_name': '"""Date"""'}), "(default=django.utils.timezone.now, verbose_name='Date')\n", (375, 431), False, 'from django.db import migrations, models\n')] |
import os
from app import create_app
from dotenv import load_dotenv
# .env
dotenv_path = os.path.join(os.path.dirname(__file__), ".env")
if os.path.exists(dotenv_path):
load_dotenv(dotenv_path)
app = create_app(os.environ.get("FLASK_CONFIG") or "default")
if __name__ == "__main__":
app.run() | [
"os.path.dirname",
"os.path.exists",
"os.environ.get",
"dotenv.load_dotenv"
] | [((142, 169), 'os.path.exists', 'os.path.exists', (['dotenv_path'], {}), '(dotenv_path)\n', (156, 169), False, 'import os\n'), ((104, 129), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (119, 129), False, 'import os\n'), ((175, 199), 'dotenv.load_dotenv', 'load_dotenv', (['dotenv_path'], {})... |
import os
import tensorflow as tf
import numpy as np
import mcubes
from ops import *
class ZGenerator:
def __init__(self, sess, z_dim=128, ef_dim=32, gf_dim=128, dataset_name=None):
self.sess = sess
self.input_size = 64
self.z_dim = z_dim
self.ef_dim = ef_dim
self... | [
"tensorflow.tile",
"numpy.reshape",
"tensorflow.variable_scope",
"tensorflow.reshape",
"tensorflow.placeholder",
"tensorflow.train.Saver",
"os.path.join",
"mcubes.marching_cubes",
"tensorflow.train.get_checkpoint_state",
"tensorflow.concat",
"numpy.zeros",
"os.path.basename",
"re.finditer",
... | [((587, 642), 'tensorflow.placeholder', 'tf.placeholder', ([], {'shape': '[1, self.z_dim]', 'dtype': 'tf.float32'}), '(shape=[1, self.z_dim], dtype=tf.float32)\n', (601, 642), True, 'import tensorflow as tf\n'), ((669, 729), 'tensorflow.placeholder', 'tf.placeholder', ([], {'shape': '[self.batch_size, 3]', 'dtype': 'tf... |
import pytest
from faker import Faker
from fastapi.encoders import jsonable_encoder
from pydantic.types import SecretStr
from sqlalchemy.orm import Session
from app import crud, schemas
from app.core import security
def test_create_user(db: Session) -> None:
faker = Faker()
profile = faker.profile()
emai... | [
"app.crud.user.create",
"pydantic.types.SecretStr",
"faker.Faker",
"pytest.mark.parametrize",
"fastapi.encoders.jsonable_encoder"
] | [((1726, 1791), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""search_by"""', "('email', 'username', 'id')"], {}), "('search_by', ('email', 'username', 'id'))\n", (1749, 1791), False, 'import pytest\n'), ((274, 281), 'faker.Faker', 'Faker', ([], {}), '()\n', (279, 281), False, 'from faker import Faker\n'),... |
import os
os.environ["PYRO_LOGFILE"] = "pyro.log"
os.environ["PYRO_LOGLEVEL"] = "DEBUG"
import Pyro4
import Pyro4.util
import Pyro4.naming
import sys
import pprint
"""
Front end controller for the 2017/18 Networks and Distributed Systems
Summative Assignment.
Author: Z0954757
"""
sys.excepthook = Pyro4.util.excepth... | [
"Pyro4.Daemon",
"Pyro4.locateNS",
"Pyro4.Proxy",
"pprint.PrettyPrinter"
] | [((329, 351), 'pprint.PrettyPrinter', 'pprint.PrettyPrinter', ([], {}), '()\n', (349, 351), False, 'import pprint\n'), ((538, 552), 'Pyro4.Daemon', 'Pyro4.Daemon', ([], {}), '()\n', (550, 552), False, 'import Pyro4\n'), ((626, 642), 'Pyro4.locateNS', 'Pyro4.locateNS', ([], {}), '()\n', (640, 642), False, 'import Pyro4\... |
import unittest
from kleat.hexamer.search import plus_search, minus_search, search
from kleat.hexamer.hexamer import extract_seq
class TestSearchHexamer(unittest.TestCase):
def test_plus_search(self):
self.assertEqual(plus_search('GGGAATAAAG', 9), ('AATAAA', 16, 3))
self.assertEqual(plus_search('... | [
"kleat.hexamer.search.search",
"kleat.hexamer.search.minus_search",
"kleat.hexamer.search.plus_search"
] | [((233, 261), 'kleat.hexamer.search.plus_search', 'plus_search', (['"""GGGAATAAAG"""', '(9)'], {}), "('GGGAATAAAG', 9)\n", (244, 261), False, 'from kleat.hexamer.search import plus_search, minus_search, search\n'), ((307, 334), 'kleat.hexamer.search.plus_search', 'plus_search', (['"""GGGAATAAA"""', '(9)'], {}), "('GGGA... |
import nuke
def delete_pt():
max_pts = int(nuke.thisNode().knob('Max PTS').value()) - 1
if max_pts < 2:
nuke.message('Minimum 2 points')
return
pt_num = int(nuke.thisKnob().name()[6:])
node = nuke.thisNode()
for pt in xrange(pt_num, max_pts):
knob_name = 'pt' + str(pt)
... | [
"nuke.message",
"nuke.Text_Knob",
"nuke.PyScript_Knob",
"nuke.thisNode",
"nuke.thisKnob",
"nuke.Tab_Knob"
] | [((228, 243), 'nuke.thisNode', 'nuke.thisNode', ([], {}), '()\n', (241, 243), False, 'import nuke\n'), ((962, 977), 'nuke.thisNode', 'nuke.thisNode', ([], {}), '()\n', (975, 977), False, 'import nuke\n'), ((1991, 2006), 'nuke.thisNode', 'nuke.thisNode', ([], {}), '()\n', (2004, 2006), False, 'import nuke\n'), ((3559, 3... |
import json
from django.core.management.base import BaseCommand
from 臺灣言語平臺.正規化團隊模型 import 正規化sheet表
from django.conf import settings
class Command(BaseCommand):
help = '加sheet的json'
def add_arguments(self, parser):
parser.add_argument(
'服務帳戶json',
type=str,
hel... | [
"json.load",
"臺灣言語平臺.正規化團隊模型.正規化sheet表.加sheet"
] | [((605, 698), '臺灣言語平臺.正規化團隊模型.正規化sheet表.加sheet', '正規化sheet表.加sheet', ([], {'語言腔口': 'settings.MOTHER_TONGUE', 'key_file_name': "參數['服務帳戶json']", 'url': "參數['網址']"}), "(語言腔口=settings.MOTHER_TONGUE, key_file_name=參數['服務帳戶json'],\n url=參數['網址'])\n", (621, 698), False, 'from 臺灣言語平臺.正規化團隊模型 import 正規化sheet表\n'), ((595, 60... |
"""
Flask-GoogleLogin
"""
from base64 import (urlsafe_b64encode as b64encode,
urlsafe_b64decode as b64decode)
from urllib import urlencode
from urlparse import parse_qsl
from functools import wraps
from flask import request, redirect, abort, current_app, url_for
from flask_login import LoginManage... | [
"flask_login.LoginManager",
"flask.request.args.get",
"flask_login.make_secure_token",
"flask.request.args.items",
"functools.wraps",
"flask.url_for",
"flask.abort"
] | [((5317, 5333), 'functools.wraps', 'wraps', (['view_func'], {}), '(view_func)\n', (5322, 5333), False, 'from functools import wraps\n'), ((970, 984), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (982, 984), False, 'from flask_login import LoginManager, make_secure_token\n'), ((4365, 4375), 'flask.abort... |
"""Test agrirouter/environments/environments.py"""
from agrirouter.environments.environments import ProductionEnvironment as PE
from agrirouter.environments.environments import QAEnvironment as QAE
from tests.constants import application_id
class TestPE:
def test_get_base_url(self):
assert PE().get_base_... | [
"agrirouter.environments.environments.ProductionEnvironment._MQTT_URL_TEMPLATE.format",
"agrirouter.environments.environments.QAEnvironment",
"agrirouter.environments.environments.QAEnvironment._MQTT_URL_TEMPLATE.format",
"agrirouter.environments.environments.ProductionEnvironment",
"agrirouter.environments... | [((2200, 2259), 'agrirouter.environments.environments.ProductionEnvironment._MQTT_URL_TEMPLATE.format', 'PE._MQTT_URL_TEMPLATE.format', ([], {'host': '"""localhost"""', 'port': '"""5000"""'}), "(host='localhost', port='5000')\n", (2228, 2259), True, 'from agrirouter.environments.environments import ProductionEnvironmen... |
#/bin/python3
import numpy as np
from scipy import signal as sig
class pySparSDRCompress():
'''
Implementation of the SparSDR Compressor based on
<NAME>., <NAME>., <NAME>., <NAME>., <NAME>., <NAME>. and <NAME>., 2019, June. Sparsdr: Sparsity-proportional backhaul and compute for sdrs. In Proceedings of ... | [
"numpy.abs",
"numpy.fft.fft",
"scipy.signal.windows.hann",
"numpy.zeros",
"numpy.empty",
"numpy.concatenate",
"numpy.expand_dims"
] | [((718, 756), 'scipy.signal.windows.hann', 'sig.windows.hann', (['self.nfft'], {'sym': '(False)'}), '(self.nfft, sym=False)\n', (734, 756), True, 'from scipy import signal as sig\n'), ((782, 820), 'numpy.expand_dims', 'np.expand_dims', (['self.windowVec'], {'axis': '(1)'}), '(self.windowVec, axis=1)\n', (796, 820), Tru... |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not u... | [
"json.loads",
"json.dumps"
] | [((4021, 4052), 'json.dumps', 'json.dumps', (['((func_name,) + args)'], {}), '((func_name,) + args)\n', (4031, 4052), False, 'import json\n'), ((4497, 4521), 'json.loads', 'json.loads', (['workload_key'], {}), '(workload_key)\n', (4507, 4521), False, 'import json\n')] |
import pickle
import pandas as pd
import yaml
from sklearn.linear_model import ElasticNet, LogisticRegression
from sklearn.ensemble import RandomForestRegressor
from config import Config
Config.MODELS_PATH.mkdir(parents=True, exist_ok=True)
with open ("params.yaml", "r") as fd:
params = yaml.safe_load(fd)
model... | [
"sklearn.ensemble.RandomForestRegressor",
"sklearn.linear_model.ElasticNet",
"config.Config.MODELS_PATH.mkdir",
"sklearn.linear_model.LogisticRegression",
"yaml.safe_load"
] | [((189, 242), 'config.Config.MODELS_PATH.mkdir', 'Config.MODELS_PATH.mkdir', ([], {'parents': '(True)', 'exist_ok': '(True)'}), '(parents=True, exist_ok=True)\n', (213, 242), False, 'from config import Config\n'), ((295, 313), 'yaml.safe_load', 'yaml.safe_load', (['fd'], {}), '(fd)\n', (309, 313), False, 'import yaml\n... |
# Copyright (c) 2016 The OpenTracing Authors.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, pub... | [
"opentracing.follows_from",
"time.time",
"pytest.raises",
"opentracing.start_child_span"
] | [((3082, 3147), 'opentracing.start_child_span', 'opentracing.start_child_span', (['parent_span'], {'operation_name': '"""Leela"""'}), "(parent_span, operation_name='Leela')\n", (3110, 3147), False, 'import opentracing\n'), ((5053, 5064), 'time.time', 'time.time', ([], {}), '()\n', (5062, 5064), False, 'import time\n'),... |
from typing import TypeVar, Generic, Optional, Type, Any, Union, Dict, TYPE_CHECKING
from unipipeline.errors.uni_payload_error import UniPayloadParsingError, UniAnswerPayloadParsingError
from unipipeline.errors.uni_sending_to_worker_error import UniSendingToWorkerError
from unipipeline.answer.uni_answer_message import... | [
"unipipeline.errors.uni_work_flow_error.UniWorkFlowError",
"unipipeline.message_meta.uni_message_meta.UniAnswerParams",
"unipipeline.errors.uni_sending_to_worker_error.UniSendingToWorkerError",
"unipipeline.worker.uni_worker_consumer_manager.UniWorkerConsumerManager",
"typing.TypeVar"
] | [((1052, 1097), 'typing.TypeVar', 'TypeVar', (['"""TInputMsgPayload"""'], {'bound': 'UniMessage'}), "('TInputMsgPayload', bound=UniMessage)\n", (1059, 1097), False, 'from typing import TypeVar, Generic, Optional, Type, Any, Union, Dict, TYPE_CHECKING\n'), ((1118, 1174), 'typing.TypeVar', 'TypeVar', (['"""TAnswerMsgPayl... |
import math
from oscontainer.constants import CGROUP_TYPE_V2, PER_CPU_SHARES, NO_LIMIT
from oscontainer.cgroup_subsystem import CgroupController, CgroupSubsystem
from oscontainer.utils import limit_from_str
CPU_WEIGHT = "cpu.weight"
CPU_MAX = "cpu.max"
CPU_CPUSET_CPUS = "cpuset.cpus"
CPU_CPUSET_CPUS_EFFECTIVE = "cpus... | [
"oscontainer.utils.limit_from_str",
"math.ceil",
"math.floor"
] | [((2893, 2925), 'oscontainer.utils.limit_from_str', 'limit_from_str', (['cpu_quota_res[0]'], {}), '(cpu_quota_res[0])\n', (2907, 2925), False, 'from oscontainer.utils import limit_from_str\n'), ((3733, 3759), 'oscontainer.utils.limit_from_str', 'limit_from_str', (['memory_str'], {}), '(memory_str)\n', (3747, 3759), Fal... |
"""
Enum Assembler-Directives
"""
from enum import Enum, auto
class AssemblerDirectives(Enum):
START = auto()
END = auto()
ORG = auto()
DEFINE = auto()
@classmethod
def to_string(cls):
return "{START},{END},{ORG},{DEFINE}".format(
START=cls.START.name,
END=cls... | [
"enum.auto"
] | [((110, 116), 'enum.auto', 'auto', ([], {}), '()\n', (114, 116), False, 'from enum import Enum, auto\n'), ((127, 133), 'enum.auto', 'auto', ([], {}), '()\n', (131, 133), False, 'from enum import Enum, auto\n'), ((144, 150), 'enum.auto', 'auto', ([], {}), '()\n', (148, 150), False, 'from enum import Enum, auto\n'), ((16... |
from dataclasses import dataclass
import pele_platform.Checker.main as ck
import pele_platform.Frag.simulation as fr
import pele_platform.Adaptive.simulation as ad
from pele_platform.Allosteric.main import run_allosteric
import pele_platform.gpcr.main as gpcr
import pele_platform.out_in.main as outin
from pele_platform... | [
"pele_platform.out_in.main.OutInLauncher",
"pele_platform.gpcr.main.GpcrLauncher",
"pele_platform.Checker.main.check_executable_and_env_variables",
"pele_platform.PPI.main.run_ppi",
"pele_platform.Allosteric.main.run_allosteric",
"pele_platform.Adaptive.simulation.run_adaptive",
"pele_platform.Frag.simu... | [((1038, 1087), 'pele_platform.Checker.main.check_executable_and_env_variables', 'ck.check_executable_and_env_variables', (['self._args'], {}), '(self._args)\n', (1075, 1087), True, 'import pele_platform.Checker.main as ck\n'), ((1153, 1180), 'pele_platform.Adaptive.simulation.run_adaptive', 'ad.run_adaptive', (['self.... |
"""
all subsets of given subset
"""
def subsets_of_subset(subset):
s = subset
superset = subset
while True:
yield s
s = (s - 1) & superset
if s == superset:
break
# --- end of library ---
def debugprint(g):
for x in g:
print(f"{x:06b}")
TEST_1 = """
>>> ... | [
"doctest.testmod",
"doctest.run_docstring_examples",
"sys.exit"
] | [((458, 475), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (473, 475), False, 'import doctest\n'), ((801, 811), 'sys.exit', 'sys.exit', ([], {}), '()\n', (809, 811), False, 'import sys\n'), ((585, 632), 'doctest.run_docstring_examples', 'doctest.run_docstring_examples', (['g[k]', 'g'], {'name': 'k'}), '(g[k]... |
#from https://www.assemblyai.com/blog/end-to-end-speech-recognition-pytorch/
from torch import nn
import torch.nn.functional as F
from hw_asr.base import BaseModel
class CNNLayerNorm(nn.Module):
def __init__(self, n_feats):
super().__init__()
self.layer_norm = nn.LayerNorm(n_feats)
def forw... | [
"torch.nn.Dropout",
"torch.nn.GELU",
"torch.nn.Sequential",
"torch.nn.LayerNorm",
"torch.nn.functional.gelu",
"torch.nn.Conv2d",
"torch.nn.Linear",
"torch.nn.GRU"
] | [((285, 306), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['n_feats'], {}), '(n_feats)\n', (297, 306), False, 'from torch import nn\n'), ((1768, 1882), 'torch.nn.GRU', 'nn.GRU', ([], {'input_size': 'rnn_dim', 'hidden_size': 'hidden_size', 'num_layers': '(1)', 'batch_first': 'batch_first', 'bidirectional': '(True)'}), '(inpu... |
import datetime
from typing import List
from reminders.events import Buttons, Alerts
from reminders.screen import Screen
# highest level, things that can be in a list menu
class ListMenuItem:
def __init__(self, name):
self._name = str(name)
@property
def name(self):
return self._name
... | [
"reminders.screen.Screen.menu_screen",
"reminders.screen.Screen.off",
"datetime.datetime.now",
"reminders.screen.Screen.toggle_backlight",
"reminders.screen.Screen.TextLine",
"datetime.timedelta",
"reminders.events.Alerts.sort_alerts"
] | [((2412, 2435), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2433, 2435), False, 'import datetime\n'), ((4075, 4106), 'reminders.screen.Screen.menu_screen', 'Screen.menu_screen', (['title', 'text'], {}), '(title, text)\n', (4093, 4106), False, 'from reminders.screen import Screen\n'), ((5944, 59... |
import voluptuous as vol
from homeassistant.const import CONF_HOST, CONF_NAME
from .const import (
CONF_CHILD_LOCK,
CONF_CLIMATE,
CONF_DEVICE_ID,
CONF_DISPLAY_LIGHT,
CONF_LOCAL_KEY,
CONF_TYPE,
CONF_TYPE_AUTO,
CONF_TYPE_DEHUMIDIFIER,
CONF_TYPE_FAN,
CONF_TYPE_GECO_HEATER,
CONF... | [
"voluptuous.Required",
"voluptuous.Optional",
"voluptuous.In"
] | [((751, 887), 'voluptuous.In', 'vol.In', (['[CONF_TYPE_AUTO, CONF_TYPE_GPPH_HEATER, CONF_TYPE_DEHUMIDIFIER,\n CONF_TYPE_FAN, CONF_TYPE_GECO_HEATER, CONF_TYPE_GPCV_HEATER]'], {}), '([CONF_TYPE_AUTO, CONF_TYPE_GPPH_HEATER, CONF_TYPE_DEHUMIDIFIER,\n CONF_TYPE_FAN, CONF_TYPE_GECO_HEATER, CONF_TYPE_GPCV_HEATER])\n', (... |
from nlp20 import get_england
import re
str = get_england()
lines = str.split('\n')
p = re.compile(r'^(=+)\s*(.+?)\s*=+')
for l in lines:
m = re.search(p, l)
if m is not None:
level = len(m.group(1)) - 1
print(m.group(2), level)
| [
"re.search",
"nlp20.get_england",
"re.compile"
] | [((47, 60), 'nlp20.get_england', 'get_england', ([], {}), '()\n', (58, 60), False, 'from nlp20 import get_england\n'), ((89, 123), 're.compile', 're.compile', (['"""^(=+)\\\\s*(.+?)\\\\s*=+"""'], {}), "('^(=+)\\\\s*(.+?)\\\\s*=+')\n", (99, 123), False, 'import re\n'), ((148, 163), 're.search', 're.search', (['p', 'l'],... |
"""This file contain the model for the usermanagement app."""
from django.contrib.auth.models import AbstractUser, Group, Permission
from django.db import models
class UserProfile(AbstractUser):
"""
Define a user.
Here, we use heritage of abstract user and addition of the field nb_tries
to detect if ... | [
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.IntegerField"
] | [((384, 414), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (403, 414), False, 'from django.db import models\n'), ((1127, 1159), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1143, 1159), False, 'from django.d... |
# -*- coding: utf-8 -*-:
from django import template
import urllib
import hashlib
register = template.Library()
def gravatar(email, size=80, username=None):
gravatar_url = "http://www.gravatar.com/avatar.php?"
gravatar_url += urllib.urlencode({
'gravatar_id': hashlib.md5(email).hexdigest(),
... | [
"hashlib.md5",
"django.template.Library"
] | [((95, 113), 'django.template.Library', 'template.Library', ([], {}), '()\n', (111, 113), False, 'from django import template\n'), ((280, 298), 'hashlib.md5', 'hashlib.md5', (['email'], {}), '(email)\n', (291, 298), False, 'import hashlib\n')] |
# encoding=utf-8
"""
Misc PyTorch utils
Author: <EMAIL>
update 12.7
Usage:
`from torch_utils import *`
`func_name()` # to call functions in this file
"""
from datetime import datetime
import math
import os
import torch
import torch.nn as nn
from tensorboardX import SummaryWriter
#########################... | [
"os.path.exists",
"tensorboardX.SummaryWriter",
"os.makedirs",
"torch.load",
"os.path.join",
"math.cos",
"datetime.datetime.now",
"os.mkdir",
"utils.misc_utils.format_num",
"torch.clamp"
] | [((648, 672), 'torch.clamp', 'torch.clamp', (['x', 'min', 'max'], {}), '(x, min, max)\n', (659, 672), False, 'import torch\n'), ((9263, 9313), 'tensorboardX.SummaryWriter', 'SummaryWriter', (['log_dir'], {'max_queue': '(0)', 'flush_secs': '(10)'}), '(log_dir, max_queue=0, flush_secs=10)\n', (9276, 9313), False, 'from t... |
#!/usr/bin/python
# Copyright (c) 2017 <NAME>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: route_vpn
short_description: Crea... | [
"ansible.module_utils.stonesoft_util.Cache",
"traceback.format_exc",
"smc.vpn.route.RouteVPN.create_ipsec_tunnel",
"smc.vpn.route.TunnelEndpoint.create_ipsec_endpoint",
"smc.vpn.elements.ExternalGateway.update_or_create"
] | [((10225, 10232), 'ansible.module_utils.stonesoft_util.Cache', 'Cache', ([], {}), '()\n', (10230, 10232), False, 'from ansible.module_utils.stonesoft_util import StonesoftModuleBase, Cache\n'), ((13173, 13272), 'smc.vpn.route.TunnelEndpoint.create_ipsec_endpoint', 'TunnelEndpoint.create_ipsec_endpoint', (['local_engine... |
# Lint as: python3
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by ... | [
"tfx.orchestration.metadata.Metadata",
"tfx.utils.io_utils.copy_dir",
"os.path.join",
"absl.logging.info",
"tfx.orchestration.metadata.sqlite_metadata_connection_config",
"collections.defaultdict",
"ml_metadata.proto.metadata_store_pb2.MetadataStoreClientConfig",
"tensorflow.io.gfile.exists"
] | [((2681, 2710), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (2704, 2710), False, 'import collections\n'), ((2224, 2268), 'os.path.join', 'os.path.join', (['output_dir', 'component_id', 'name'], {}), '(output_dir, component_id, name)\n', (2236, 2268), False, 'import os\n'), ((5014, ... |
import argparse
from distutils.util import strtobool
def str2bool(x):
return bool(strtobool(x))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--num_epochs', type=int, default=1000)
parser.add_argument('--learning_rate', type=float, default=0.0005)
parser.add_argument... | [
"distutils.util.strtobool",
"argparse.ArgumentParser"
] | [((135, 160), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (158, 160), False, 'import argparse\n'), ((88, 100), 'distutils.util.strtobool', 'strtobool', (['x'], {}), '(x)\n', (97, 100), False, 'from distutils.util import strtobool\n')] |
# This a training script launched with py_config_runner
# It should obligatory contain `run(config, **kwargs)` method
import sys
from collections.abc import Mapping
from pathlib import Path
import torch
from apex import amp
from dataflow.datasets import VOCSegmentationOpencv
from py_config_runner.config_utils import ... | [
"ignite.engine.create_supervised_evaluator",
"apex.amp.scale_loss",
"ignite.contrib.engines.common.add_early_stopping_by_val_score",
"ignite.engine.Engine",
"torch.cuda.is_available",
"ignite.distributed.get_local_rank",
"ignite.contrib.engines.common.ProgressBar",
"utils.exp_tracking.get_output_path"... | [((1180, 1203), 'ignite.distributed.auto_model', 'idist.auto_model', (['model'], {}), '(model)\n', (1196, 1203), True, 'import ignite.distributed as idist\n'), ((2849, 2878), 'ignite.engine.Engine', 'Engine', (['train_update_function'], {}), '(train_update_function)\n', (2855, 2878), False, 'from ignite.engine import E... |
#! usr/bin/dev python
from stages import Stages #Le as fases
from code import tanks #Responsável pelos tanques do player
from images import imagens #imagens do jogo
import pygame
import random
screen_Dimension=[32*20,32*20]
pygame.init()
screen = pygame.display.set_mode(screen_Dimension)
pygame.display.... | [
"pygame.display.set_caption",
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.display.set_mode",
"code.tanks.PlayerTank",
"pygame.time.Clock",
"stages.Stages.Stages",
"pygame.display.update"
] | [((238, 251), 'pygame.init', 'pygame.init', ([], {}), '()\n', (249, 251), False, 'import pygame\n'), ((262, 303), 'pygame.display.set_mode', 'pygame.display.set_mode', (['screen_Dimension'], {}), '(screen_Dimension)\n', (285, 303), False, 'import pygame\n'), ((305, 358), 'pygame.display.set_caption', 'pygame.display.se... |
from typing import Dict
from numba import njit
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams['image.cmap'] = 'binary'
def read_parameters(filename: str) -> Dict[str, float]:
"""Read parameters from a file to a dictionary and return it."""
parameters = {}
with open(filename, "r") as file:... | [
"numpy.random.choice",
"numpy.random.permutation",
"numpy.random.random",
"matplotlib.pyplot.tick_params",
"numpy.argmax",
"numpy.any",
"numpy.max",
"numpy.argsort",
"numpy.sum",
"numpy.zeros",
"numpy.random.randint",
"numpy.empty_like",
"numpy.empty",
"numpy.min",
"matplotlib.pyplot.sub... | [((1140, 1187), 'numpy.empty', 'np.empty', (['population.shape[0]'], {'dtype': 'np.float32'}), '(population.shape[0], dtype=np.float32)\n', (1148, 1187), True, 'import numpy as np\n'), ((2136, 2189), 'numpy.empty', 'np.empty', (['(population.shape[0] // 2,)'], {'dtype': 'np.int32'}), '((population.shape[0] // 2,), dtyp... |
#!/usr/bin/env python
u"""
radial_basis.py
Written by <NAME> (01/2022)
Interpolates data using radial basis functions
CALLING SEQUENCE:
ZI = radial_basis(xs, ys, zs, XI, YI, polynomial=0,
smooth=smooth, epsilon=epsilon, method='inverse')
INPUTS:
xs: scaled input X data
ys: scaled input Y data
... | [
"numpy.mean",
"numpy.eye",
"numpy.sqrt",
"numpy.ones",
"numpy.log",
"numpy.ndim",
"numpy.squeeze",
"numpy.exp",
"numpy.array",
"numpy.zeros",
"numpy.dot",
"numpy.linalg.lstsq",
"numpy.concatenate",
"numpy.shape",
"numpy.tri"
] | [((3345, 3359), 'numpy.squeeze', 'np.squeeze', (['xs'], {}), '(xs)\n', (3355, 3359), True, 'import numpy as np\n'), ((3369, 3383), 'numpy.squeeze', 'np.squeeze', (['ys'], {}), '(ys)\n', (3379, 3383), True, 'import numpy as np\n'), ((3393, 3407), 'numpy.squeeze', 'np.squeeze', (['zs'], {}), '(zs)\n', (3403, 3407), True,... |
from flask import Flask, jsonify, request
app = Flask(__name__)
@app.route('/', methods =['GET', 'POST'])
def index():
if (request.method == 'POST'):
some_json = request.get_json()
return jsonify({'you sent': some_json}),201
else:
return jsonify({"about" : "Hello World!"})
@app.route('... | [
"flask.jsonify",
"flask.request.get_json",
"flask.Flask"
] | [((48, 63), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (53, 63), False, 'from flask import Flask, jsonify, request\n'), ((385, 413), 'flask.jsonify', 'jsonify', (["{'result': n1 * 10}"], {}), "({'result': n1 * 10})\n", (392, 413), False, 'from flask import Flask, jsonify, request\n'), ((175, 193), 'fla... |
from django.conf.urls import url
from rest_framework.urlpatterns import format_suffix_patterns
from . import views
urlpatterns = [
url(r'^risks/$', views.RiskTypeList.as_view(), name='risks_list'),
url(r'^risks/(?P<pk>[0-9]+)/$', views.RiskTypeDetail.as_view(), name='risk_details'),
url(r'^fields/$', views... | [
"rest_framework.urlpatterns.format_suffix_patterns"
] | [((381, 416), 'rest_framework.urlpatterns.format_suffix_patterns', 'format_suffix_patterns', (['urlpatterns'], {}), '(urlpatterns)\n', (403, 416), False, 'from rest_framework.urlpatterns import format_suffix_patterns\n')] |
"""Test for certbot_nginx.nginxparser."""
import copy
import operator
import tempfile
import unittest
from pyparsing import ParseException
from certbot_nginx.nginxparser import (
RawNginxParser, loads, load, dumps, dump, UnspacedList)
from certbot_nginx.tests import util
FIRST = operator.itemgetter(0)
class T... | [
"certbot_nginx.nginxparser.RawNginxParser.block.parseString",
"certbot_nginx.nginxparser.RawNginxParser.assignment.parseString",
"tempfile.TemporaryFile",
"certbot_nginx.nginxparser.dump",
"certbot_nginx.nginxparser.load",
"certbot_nginx.tests.util.get_data_filename",
"certbot_nginx.nginxparser.loads",
... | [((288, 310), 'operator.itemgetter', 'operator.itemgetter', (['(0)'], {}), '(0)\n', (307, 310), False, 'import operator\n'), ((16666, 16681), 'unittest.main', 'unittest.main', ([], {}), '()\n', (16679, 16681), False, 'import unittest\n'), ((7556, 7614), 'certbot_nginx.nginxparser.loads', 'loads', (['"""if ($http_accept... |
import pytest
from ..model_base_test import ModelBaseTest
from tests.sampleresponse.cardless_credit import cardless_credit_payment_response
from xendit.models import CardlessCredit, CardlessCreditType
# fmt: off
class TestCreateCardlessCreditPayment(ModelBaseTest):
@pytest.fixture
def default_cardles... | [
"xendit.models.CardlessCredit.helper_create_shipping_address",
"xendit.models.CardlessCredit.helper_create_customer_details",
"xendit.models.CardlessCredit.helper_create_item",
"tests.sampleresponse.cardless_credit.cardless_credit_payment_response"
] | [((897, 1101), 'xendit.models.CardlessCredit.helper_create_shipping_address', 'CardlessCredit.helper_create_shipping_address', ([], {'first_name': '"""<NAME>"""', 'last_name': '"""<NAME>"""', 'address': '"""Jl Teknologi No. 12"""', 'city': '"""Jakarta"""', 'postal_code': '"""12345"""', 'phone': '"""081513114262"""', 'c... |
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
PROXY_URL_NO_AUTH_1 = 'http://<proxy hostname 1>:<proxy port 2>'
cls = get_driver(Provider.RACKSPACE)
driver = cls('username', 'api key', region='ord',
http_proxy=PROXY_URL_NO_AUTH_1)
| [
"libcloud.compute.providers.get_driver"
] | [((167, 197), 'libcloud.compute.providers.get_driver', 'get_driver', (['Provider.RACKSPACE'], {}), '(Provider.RACKSPACE)\n', (177, 197), False, 'from libcloud.compute.providers import get_driver\n')] |
import os
from cakechat.config import BASE_CORPUS_NAME, S3_MODELS_BUCKET_NAME, S3_TOKENS_IDX_REMOTE_DIR, \
S3_NN_MODEL_REMOTE_DIR, S3_CONDITIONS_IDX_REMOTE_DIR
from cakechat.dialog_model.model import get_nn_model
from cakechat.utils.s3 import S3FileResolver
from cakechat.utils.text_processing import get_index_to_t... | [
"os.path.exists",
"cakechat.utils.s3.S3FileResolver",
"cakechat.utils.text_processing.get_index_to_condition_path",
"cakechat.utils.s3.S3FileResolver.init_resolver",
"cakechat.utils.text_processing.get_index_to_token_path",
"cakechat.utils.text_processing.load_index_to_item"
] | [((447, 488), 'cakechat.utils.text_processing.get_index_to_token_path', 'get_index_to_token_path', (['BASE_CORPUS_NAME'], {}), '(BASE_CORPUS_NAME)\n', (470, 488), False, 'from cakechat.utils.text_processing import get_index_to_token_path, load_index_to_item, get_index_to_condition_path\n'), ((1043, 1082), 'cakechat.uti... |
import os
import sys
import glob
import time
import copy
import random
import numpy as np
import utils
import logging
import argparse
import tensorflow as tf
import tensorflow.keras as keras
from model import NASNetworkCIFAR
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# os.environ['CUDA_VISIBLE_DEVICES'] = '1'
# Basic m... | [
"tensorflow.train.Checkpoint",
"model.NASNetworkCIFAR",
"tensorflow.GradientTape",
"tensorflow.nn.softmax",
"tensorflow.keras.losses.CategoricalCrossentropy",
"utils.AvgMeter",
"utils.count_parameters_in_MB",
"logging.info",
"tensorflow.clip_by_global_norm",
"argparse.ArgumentParser",
"tensorflo... | [((346, 371), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (369, 371), False, 'import argparse\n'), ((1616, 1652), 'utils.create_exp_dir', 'utils.create_exp_dir', (['args.model_dir'], {}), '(args.model_dir)\n', (1636, 1652), False, 'import utils\n'), ((1692, 1803), 'logging.basicConfig', 'log... |
from ethronsoft.gcspypi.package.package_manager import PackageManager
from ethronsoft.gcspypi.utilities.console import Console
from ethronsoft.gcspypi.parsers.commons import init_repository
def handle_(config, data):
with Console(verbose=config.get("verbose", False), exit_on_error=True) as c:
repo = init_r... | [
"ethronsoft.gcspypi.parsers.commons.init_repository"
] | [((314, 354), 'ethronsoft.gcspypi.parsers.commons.init_repository', 'init_repository', (['c', "config['repository']"], {}), "(c, config['repository'])\n", (329, 354), False, 'from ethronsoft.gcspypi.parsers.commons import init_repository\n')] |
from elasticsearch_dsl import *
import os
from glob import glob
import json
import re
from . import to_zh_cn
class Poet(Document):
dynasty = Text()
author = Text()
title = Text(analyzer='jieba_index', search_analyzer='jieba_search')
paragraphs = Text(analyzer='jieba_index', search_analyzer='jieba_sear... | [
"os.path.abspath",
"json.load",
"os.path.basename",
"re.compile"
] | [((849, 890), 're.compile', 're.compile', (['"""^[a-zA-Z]+\\\\.([a-zA-Z]+)\\\\."""'], {}), "('^[a-zA-Z]+\\\\.([a-zA-Z]+)\\\\.')\n", (859, 890), False, 'import re\n'), ((921, 946), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (936, 946), False, 'import os\n'), ((1092, 1104), 'json.load', 'js... |
from PIL import ImageGrab, Image
import cv2 as cv
import numpy as np
import match.template_matching as tm
import match.bilging as b
from mss import mss
def grab_screen():
img_src = ImageGrab.grab()
return cv.cvtColor(np.array(img_src.convert('RGB')), cv.COLOR_RGB2BGR)
class ScreenGrabber(object):
def gr... | [
"mss.mss",
"PIL.ImageGrab.grab",
"match.template_matching.build_pattern",
"PIL.Image.frombytes",
"cv2.imread"
] | [((187, 203), 'PIL.ImageGrab.grab', 'ImageGrab.grab', ([], {}), '()\n', (201, 203), False, 'from PIL import ImageGrab, Image\n'), ((1392, 1458), 'match.template_matching.build_pattern', 'tm.build_pattern', (['p', 'n'], {'shape': '(45, 45)', 'circle_mask': 'c', 'threshold': 't'}), '(p, n, shape=(45, 45), circle_mask=c, ... |
"""
---
title: Deep Convolutional Generative Adversarial Networks (DCGAN)
summary: A simple PyTorch implementation/tutorial of Deep Convolutional Generative Adversarial Networks (DCGAN).
---
# Deep Convolutional Generative Adversarial Networks (DCGAN)
This is a [PyTorch](https://pytorch.org) implementation of paper
[... | [
"torch.nn.ConvTranspose2d",
"torch.nn.BatchNorm2d",
"torch.nn.ReLU",
"labml.experiment.start",
"torch.nn.Tanh",
"torch.nn.LeakyReLU",
"torch.nn.init.constant_",
"torch.nn.Conv2d",
"labml_nn.gan.original.experiment.Configs",
"labml.experiment.configs",
"labml.experiment.create",
"torch.nn.init.... | [((3588, 3597), 'labml_nn.gan.original.experiment.Configs', 'Configs', ([], {}), '()\n', (3595, 3597), False, 'from labml_nn.gan.original.experiment import Configs\n'), ((3602, 3639), 'labml.experiment.create', 'experiment.create', ([], {'name': '"""mnist_dcgan"""'}), "(name='mnist_dcgan')\n", (3619, 3639), False, 'fro... |
import os
import sys
import pandas as pd
import numpy as np
import scipy.stats as st
import matplotlib.pyplot as plt
def read_udp(file_path):
with open(file_path, "r") as f:
data_dict = {'send':{}, 'rec':{}}
data = pd.read_csv(file_path, sep=",", engine='python', error_bad_lines=False, skiprows=1)
... | [
"matplotlib.pyplot.savefig",
"pandas.read_csv",
"os.path.join",
"matplotlib.pyplot.subplots",
"os.walk"
] | [((914, 932), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {}), '(1, 2)\n', (926, 932), True, 'import matplotlib.pyplot as plt\n'), ((2051, 2090), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""udp-latency-nofilter.png"""'], {}), "('udp-latency-nofilter.png')\n", (2062, 2090), True, 'import matplot... |
import re
from collections.abc import MutableMapping
from typing import Dict, List
import markovify
import nltk
class RangeDict(MutableMapping):
"""Enables a dictionary whose keys are ranges."""
def __init__(self, iterable: Dict):
if not isinstance(iterable, dict):
raise TypeError("You m... | [
"re.split",
"nltk.pos_tag"
] | [((1311, 1354), 're.split', 're.split', (['self.word_split_pattern', 'sentence'], {}), '(self.word_split_pattern, sentence)\n', (1319, 1354), False, 'import re\n'), ((1398, 1417), 'nltk.pos_tag', 'nltk.pos_tag', (['words'], {}), '(words)\n', (1410, 1417), False, 'import nltk\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File : live_visualisation.py
# Author : <NAME> <<EMAIL>>
# Date : 10.04.2020
# Last Modified By: <NAME> <<EMAIL>>
from djitellopy.realtime_plot.RealtimePlotter import *
import redis
import numpy as np
import traceback
import matplotlib
#... | [
"numpy.array",
"redis.StrictRedis"
] | [((484, 536), 'redis.StrictRedis', 'redis.StrictRedis', ([], {'host': '"""localhost"""', 'port': '(6379)', 'db': '(0)'}), "(host='localhost', port=6379, db=0)\n", (501, 536), False, 'import redis\n'), ((2214, 2253), 'numpy.array', 'np.array', (['new_data[:-1]'], {'dtype': 'np.float'}), '(new_data[:-1], dtype=np.float)\... |
import unittest
from conans.errors import ConanException
from conans.model.username import Username
class UsernameTest(unittest.TestCase):
def username_test(self):
Username("userwith-hypens")
self.assertRaises(ConanException, Username, "")
self.assertRaises(ConanException, Username, "A"*... | [
"conans.model.username.Username"
] | [((180, 207), 'conans.model.username.Username', 'Username', (['"""userwith-hypens"""'], {}), "('userwith-hypens')\n", (188, 207), False, 'from conans.model.username import Username\n'), ((332, 350), 'conans.model.username.Username', 'Username', (["('A' * 30)"], {}), "('A' * 30)\n", (340, 350), False, 'from conans.model... |
import torch
import hcat.lib.functional
from hcat.lib.functional import IntensityCellReject
from hcat.backends.backend import Backend
from hcat.models.r_unet import embed_model as RUnet
from hcat.train.transforms import median_filter, erosion
import hcat.lib.utils
from hcat.lib.utils import graceful_exit
import os.pat... | [
"hcat.lib.functional.IntensityCellReject",
"hcat.lib.utils.graceful_exit",
"hcat.models.r_unet.embed_model",
"hcat.train.transforms.erosion",
"torch.tensor",
"hcat.train.transforms.median_filter",
"torch.zeros"
] | [((2732, 2827), 'hcat.lib.utils.graceful_exit', 'graceful_exit', (["('\\x1b[1;31;40m' + 'ERROR: Spatial Embedding Failed. Aborting...' + '\\x1b[0m')"], {}), "('\\x1b[1;31;40m' +\n 'ERROR: Spatial Embedding Failed. Aborting...' + '\\x1b[0m')\n", (2745, 2827), False, 'from hcat.lib.utils import graceful_exit\n'), ((47... |
from mortar_rdb import register_session, get_session
from mortar_rdb.interfaces import ISession
from testfixtures.components import TestComponents
from sqlalchemy.exc import OperationalError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm.session import Session
from sqlalchemy.schema import ... | [
"testfixtures.ShouldRaise",
"sqlalchemy.types.String",
"testfixtures.components.TestComponents",
"mortar_rdb.get_session",
"testfixtures.Comparison",
"zope.component.interfaces.ComponentLookupError",
"mortar_rdb.register_session",
"sqlalchemy.ext.declarative.declarative_base",
"testfixtures.LogCaptu... | [((725, 741), 'testfixtures.components.TestComponents', 'TestComponents', ([], {}), '()\n', (739, 741), False, 'from testfixtures.components import TestComponents\n'), ((762, 780), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (778, 780), False, 'from sqlalchemy.ext.declarative im... |
"""GageRnR.
The input data should be structured
in a 3d array n[i,j,k] where
i = operator, j = part, k = measurement
Stored to file this data would look:
m1 m2 m3
3.29; 3.41; 3.64 # p1 | o1
2.44; 2.32; 2.42 # p2
3.08; 3.25; 3.07 # p1 | o2
2.53; 1.78; 2.32 # p2
3.04; 2.89; 2.85 # p1 | o3
1.62; 1.87; 2.04 # ... | [
"GageRnR.Statistics",
"GageRnR.GageRnR",
"GageRnR.DataLoader",
"GageRnR.Linearity",
"GageRnR.Normality",
"docopt.docopt"
] | [((1960, 2010), 'docopt.docopt', 'docopt', (['__doc__', 'argv'], {'version': 'GageRnR.__version__'}), '(__doc__, argv, version=GageRnR.__version__)\n', (1966, 2010), False, 'from docopt import docopt\n'), ((2677, 2697), 'GageRnR.DataLoader', 'GageRnR.DataLoader', ([], {}), '()\n', (2695, 2697), False, 'import GageRnR\n... |
import pytest
from wikidict.render import parse_word
from wikidict.utils import process_templates
@pytest.mark.parametrize(
"word, pronunciations, gender, etymology, definitions",
[
("ababalhar", [], "", ["De baba."], ["<i>(popular)</i> babar; conspurcar"]),
(
"alguém",
... | [
"pytest.mark.parametrize",
"wikidict.render.parse_word",
"wikidict.utils.process_templates"
] | [((102, 2951), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""word, pronunciations, gender, etymology, definitions"""', '[(\'ababalhar\', [], \'\', [\'De baba.\'], [\'<i>(popular)</i> babar; conspurcar\']\n ), (\'alguém\', [\'aw.ˈgẽj\'], \'\', [\n \'Do latim <i>alĭquem</i> <sup>(la)</sup>.\'], [\'pes... |
"""
These tests require an AWS account to be set up, but don't require any manual
intervention beyond some initial setup. Also, these tests create instances (which cost
money!). Either `meadowrun-manage install` needs to be set up, or `meadowrun-manage
clean` needs to be run periodically
"""
import asyncio
import date... | [
"meadowrun.aws_integration.ec2_pricing._get_ec2_instance_types",
"meadowrun.meadowrun_pb2.ProcessState",
"meadowrun.aws_integration.grid_tasks_sqs.worker_loop",
"meadowrun.aws_integration.ec2_ssh_keys.ensure_meadowrun_key_pair",
"meadowrun.aws_integration.management_lambdas.adjust_ec2_instances._get_running... | [((5127, 5163), 'pprint.pprint', 'pprint.pprint', (['chosen_instance_types'], {}), '(chosen_instance_types)\n', (5140, 5163), False, 'import pprint\n'), ((1683, 1726), 'meadowrun.run_job.AllocCloudInstance', 'AllocCloudInstance', (['(1)', '(2)', '(80)', '"""EC2"""', 'REGION'], {}), "(1, 2, 80, 'EC2', REGION)\n", (1701,... |
# SPDX-FileCopyrightText: 2021 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
import os
import shutil
import subprocess
from termcolor import cprint
from trace_for_guess.skip import skip
def rescale_file(in_file, out_file, template_file, alg):
"""Regrid a NetCDF file using NCO (i.e. the ncremap command).
... | [
"trace_for_guess.skip.skip",
"subprocess.run",
"shutil.which",
"os.path.isfile",
"termcolor.cprint",
"os.remove"
] | [((1280, 1320), 'trace_for_guess.skip.skip', 'skip', (['[in_file, template_file]', 'out_file'], {}), '([in_file, template_file], out_file)\n', (1284, 1320), False, 'from trace_for_guess.skip import skip\n'), ((1452, 1500), 'termcolor.cprint', 'cprint', (['("Regridding \'%s\'..." % in_file)', '"""yellow"""'], {}), '("Re... |
from typing import Union
import yaml
class ConfigReader:
def __init__(self):
with open("config.yml", "r") as f:
data = yaml.safe_load(f)
self.data = data
def __getattr__(self, __name: str):
s = __name.split("_")
data = self.data
try:
for i in s... | [
"yaml.safe_load"
] | [((146, 163), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (160, 163), False, 'import yaml\n'), ((556, 573), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (570, 573), False, 'import yaml\n')] |