code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from enum import Enum
from typing import Dict, Any
from jwt.algorithms import get_default_algorithms
from cryptography.hazmat._types import (
_PRIVATE_KEY_TYPES,
_PUBLIC_KEY_TYPES,
)
# custom types
PrivateKey = _PRIVATE_KEY_TYPES
PublicKey = _PUBLIC_KEY_TYPES
JWTClaims = Dict[str, Any]
class EncryptionKeyFo... | [
"jwt.algorithms.get_default_algorithms"
] | [((910, 934), 'jwt.algorithms.get_default_algorithms', 'get_default_algorithms', ([], {}), '()\n', (932, 934), False, 'from jwt.algorithms import get_default_algorithms\n')] |
# coding=utf8 微信公众号代码
from werobot import WeRoBot
from .models import *
import time
robot = WeRoBot(enable_session=False,
token='<PASSWORD>',
app_id='wx87d17a791d346b6b',
app_secret='8d2d3270d9c8c564056dbe43110a7dce', )
# @robot.handler
# def hello(message):
# retur... | [
"time.localtime",
"werobot.WeRoBot"
] | [((92, 222), 'werobot.WeRoBot', 'WeRoBot', ([], {'enable_session': '(False)', 'token': '"""<PASSWORD>"""', 'app_id': '"""wx87d17a791d346b6b"""', 'app_secret': '"""8d2d3270d9c8c564056dbe43110a7dce"""'}), "(enable_session=False, token='<PASSWORD>', app_id=\n 'wx87d17a791d346b6b', app_secret='8d2d3270d9c8c564056dbe4311... |
import board
import busio
import digitalio
import time
import adafruit_requests as requests
from adafruit_wiznet5k.adafruit_wiznet5k import *
import adafruit_wiznet5k.adafruit_wiznet5k_socket as socket
from adafruit_wiznet5k.adafruit_wiznet5k_ntp import NTP
import adafruit_wiznet5k.adafruit_wiznet5k_dns as dns
... | [
"digitalio.DigitalInOut",
"busio.SPI",
"adafruit_wiznet5k.adafruit_wiznet5k_ntp.NTP",
"time.sleep"
] | [((924, 958), 'digitalio.DigitalInOut', 'digitalio.DigitalInOut', (['board.GP25'], {}), '(board.GP25)\n', (946, 958), False, 'import digitalio\n'), ((1020, 1054), 'digitalio.DigitalInOut', 'digitalio.DigitalInOut', (['W5x00_RSTn'], {}), '(W5x00_RSTn)\n', (1042, 1054), False, 'import digitalio\n'), ((1152, 1184), 'digit... |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.db.models import Max
def set_order(sender, instance, **kwargs):
"""If not set, determine and set the instance's order value."""
from .models import OrderMixin
is_order_subclass = issubclass(instance.__class__, Or... | [
"django.db.models.Max"
] | [((512, 524), 'django.db.models.Max', 'Max', (['"""order"""'], {}), "('order')\n", (515, 524), False, 'from django.db.models import Max\n')] |
import datetime
import logging
import random
import re
import time
from typing import Iterator, List, Union, Dict
from urllib.parse import quote
import pandas as pd
import requests
from bs4 import BeautifulSoup
from .conn_postgresql import ConnPostgreSQL
log = logging.getLogger(__name__)
class HhParser:
"""Пар... | [
"logging.getLogger",
"pandas.isnull",
"pandas.Series",
"requests.Session",
"time.monotonic",
"pandas.merge",
"urllib.parse.quote",
"pandas.DataFrame.from_dict",
"bs4.BeautifulSoup",
"datetime.datetime.now",
"pandas.DataFrame",
"random.random",
"pandas.to_datetime",
"re.search"
] | [((264, 291), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (281, 291), False, 'import logging\n'), ((1178, 1196), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1194, 1196), False, 'import requests\n'), ((3593, 3609), 'time.monotonic', 'time.monotonic', ([], {}), '()\n', (36... |
from bluesky.plan_patterns import spiral_square_pattern
import time as ttime
import numpy as np
import bluesky.plans as bp
from bluesky.plans import rel_spiral_square
from ophyd.sim import NullStatus
# def sample_spiral_scan():
# detectors = [apb_ave]
#
# return general_spiral_scan(detectors, giantxy.x, giant... | [
"numpy.abs",
"time.ctime",
"bluesky.plans.rel_spiral_square",
"numpy.log",
"ophyd.sim.NullStatus",
"bluesky.plans.rel_grid_scan",
"numpy.sum",
"matplotlib.pyplot.figure",
"time.time"
] | [((1561, 1710), 'bluesky.plans.rel_spiral_square', 'rel_spiral_square', (['detectors_list', 'motor1', 'motor2', 'motor1_range', 'motor2_range', 'motor1_nsteps', 'motor2_nsteps'], {'md': "{'plan_name': 'spiral scan'}"}), "(detectors_list, motor1, motor2, motor1_range,\n motor2_range, motor1_nsteps, motor2_nsteps, md=... |
#-*- coding: UTF-8 -*-
from passlib.hash import sha256_crypt
from marshmallow_sqlalchemy import SQLAlchemyAutoSchema
from sqlalchemy import *
from sqlalchemy.types import *
from sqlalchemy.orm import *
from ..engine.db import Base
from .group import Group
class Company(Base):
__table_args__ = { 'schema': 'company... | [
"passlib.hash.sha256_crypt.verify"
] | [((2452, 2526), 'passlib.hash.sha256_crypt.verify', 'sha256_crypt.verify', (['pw', "('$5$rounds=10000' + result.company_basic_password)"], {}), "(pw, '$5$rounds=10000' + result.company_basic_password)\n", (2471, 2526), False, 'from passlib.hash import sha256_crypt\n')] |
import logging
import os
import tempfile
import warnings
from configparser import ConfigParser
from enum import Enum
from pathlib import Path
from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence, Type, Union
from articat.utils.class_or_instance_method import class_or_instance_method
logger = logging.get... | [
"logging.getLogger",
"configparser.ConfigParser",
"pathlib.Path.cwd",
"pathlib.Path.home",
"os.environ.get",
"tempfile.mkdtemp",
"warnings.warn"
] | [((309, 336), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (326, 336), False, 'import logging\n'), ((1171, 1185), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (1183, 1185), False, 'from configparser import ConfigParser\n'), ((1613, 1627), 'configparser.ConfigParser', '... |
import cv2
import time
import os
import matplotlib.pyplot as plt
import torch
from torch import nn
import torchvision.models as models
import torchvision.transforms as transforms
import numpy as np
savepath='./color_heatmap'
if not os.path.exists(savepath):
os.mkdir(savepath)
def draw_features(width, height, x, ... | [
"numpy.argsort",
"matplotlib.pyplot.imshow",
"os.path.exists",
"numpy.max",
"matplotlib.pyplot.close",
"numpy.linspace",
"os.mkdir",
"numpy.min",
"matplotlib.pyplot.axis",
"torchvision.transforms.ToTensor",
"torchvision.models.resnet101",
"cv2.cvtColor",
"torchvision.transforms.Normalize",
... | [((3632, 3657), 'cv2.imread', 'cv2.imread', (['"""example.jpg"""'], {}), "('example.jpg')\n", (3642, 3657), False, 'import cv2\n'), ((3664, 3691), 'cv2.resize', 'cv2.resize', (['img', '(224, 224)'], {}), '(img, (224, 224))\n', (3674, 3691), False, 'import cv2\n'), ((3698, 3734), 'cv2.cvtColor', 'cv2.cvtColor', (['img',... |
import torch
from torch import nn, distributed as dist
from torch.nn import functional as F
class LabelSmoothingLoss(nn.Module):
def __init__(self, ignore_index, eps=0.1, reduction="mean"):
super().__init__()
self.ignore_index = ignore_index
self.eps = eps
self.reduction = reducti... | [
"torch.as_tensor",
"torch.ones",
"torch.log_softmax",
"torch.distributed.all_reduce",
"torch.full_like",
"torch.softmax",
"torch.nonzero",
"torch.sum",
"torch.nn.functional.log_softmax",
"torch.no_grad",
"torch.zeros",
"torch.linspace",
"torch.distributed.get_world_size"
] | [((4219, 4234), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4232, 4234), False, 'import torch\n'), ((415, 440), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['output', '(-1)'], {}), '(output, -1)\n', (428, 440), True, 'from torch.nn import functional as F\n'), ((523, 566), 'torch.full_like', 'torch.ful... |
import random
import torch
import time
import os
import numpy as np
from torch.utils.data import Dataset
from functools import partial
from .utils import dataset_to_dataloader, max_io_workers
from pytorch_transformers.tokenization_bert import BertTokenizer
# the following will be shared on other datasets too if not, ... | [
"pytorch_transformers.tokenization_bert.BertTokenizer.from_pretrained",
"os.path.isfile",
"torch.tensor",
"numpy.zeros",
"numpy.sum",
"functools.partial",
"numpy.concatenate",
"numpy.load",
"torch.zeros",
"numpy.random.shuffle"
] | [((9564, 9658), 'functools.partial', 'partial', (['mean_rgb_unit_norm_transform'], {'mean_rgb': 'mean_rgb', 'unit_norm': 'args.unit_sphere_norm'}), '(mean_rgb_unit_norm_transform, mean_rgb=mean_rgb, unit_norm=args.\n unit_sphere_norm)\n', (9571, 9658), False, 'from functools import partial\n'), ((1616, 1666), 'pytor... |
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: parser_funs
Description :
Author : <NAME>
date:
-------------------------------------------------
Change Activity:
2019/7/28:
-------------------------------------------------
"""
import ... | [
"numpy.where",
"numpy.sum",
"numpy.zeros",
"numpy.argmax"
] | [((520, 556), 'numpy.where', 'np.where', (['(semhead_probs >= 0.5)', '(1)', '(0)'], {}), '(semhead_probs >= 0.5, 1, 0)\n', (528, 556), True, 'import numpy as np\n'), ((584, 629), 'numpy.zeros', 'np.zeros', (['semhead_preds.shape'], {'dtype': 'np.int32'}), '(semhead_preds.shape, dtype=np.int32)\n', (592, 629), True, 'im... |
import functools
import json
import re
from collections import Counter
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
import pandas as pd
import seaborn as sns
from statics import STRUCTURE_TYPES
sns.set_style("whitegrid")
plt.rcParams["figure.figsize"] = (18, 12)
plt.rcParams["font.size"] ... | [
"networkx.layout.fruchterman_reingold_layout",
"pandas.read_csv",
"matplotlib.pyplot.ylabel",
"networkx.traversal.bfs_tree",
"seaborn.set_style",
"networkx.draw_networkx_labels",
"matplotlib.pyplot.xlabel",
"networkx.MultiGraph",
"matplotlib.pyplot.close",
"numpy.random.seed",
"networkx.dfs_post... | [((225, 251), 'seaborn.set_style', 'sns.set_style', (['"""whitegrid"""'], {}), "('whitegrid')\n", (238, 251), True, 'import seaborn as sns\n'), ((325, 345), 'numpy.random.seed', 'np.random.seed', (['(1234)'], {}), '(1234)\n', (339, 345), True, 'import numpy as np\n'), ((3269, 3356), 'pandas.DataFrame', 'pd.DataFrame', ... |
import sys
sys.path.append("..")
from osc_sdk_python import Gateway
gw = Gateway()
res = gw.CreateNet(IpRange='192.168.127.12/32')
error = [error for error in res['Errors'] if error.get('Code') == '4014' and error.get('Details') == 'invalid-block-size']
assert len(error) == 1
| [
"sys.path.append",
"osc_sdk_python.Gateway"
] | [((11, 32), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (26, 32), False, 'import sys\n'), ((73, 82), 'osc_sdk_python.Gateway', 'Gateway', ([], {}), '()\n', (80, 82), False, 'from osc_sdk_python import Gateway\n')] |
""" Classification
Main script for the simulation described in Hyvarinen and Morioka, NIPS 2016.
Perform time-contrastive learning from artificial data.
Source signals are generated based on segment-wise-modulated Laplace distribution (q = |.|).
"""
import os
import pickle
import shutil
from subfunc.... | [
"os.path.exists",
"tcl.tcl_train.train",
"pickle.dump",
"os.makedirs",
"subfunc.generate_artificial_data.generate_artificial_data",
"os.path.join",
"subfunc.preprocessing.pca",
"shutil.rmtree"
] | [((2002, 2037), 'os.path.join', 'os.path.join', (['train_dir', '"""parm.pkl"""'], {}), "(train_dir, 'parm.pkl')\n", (2014, 2037), False, 'import os\n'), ((2688, 2844), 'subfunc.generate_artificial_data.generate_artificial_data', 'generate_artificial_data', ([], {'num_comp': 'num_comp', 'num_segment': 'num_segment', 'nu... |
#! /usr/bin/env python
# _*_ coding: utf-8 _*_
# Copyright(c) 2019 Nippon Telegraph and Telephone Corporation
# Filename: EmControllerStatusGetManager.py
import GlobalModule
from EmControllerStatusGetExecutor import EmControllerStatusGetExecutor
from EmControllerStatusGetTimeKeep import EmControllerStatusGetTim... | [
"EmControllerStatusGetTimeKeep.EmControllerStatusGetTimeKeep",
"GlobalModule.EM_CONFIG.read_sys_common_conf"
] | [((1166, 1215), 'EmControllerStatusGetTimeKeep.EmControllerStatusGetTimeKeep', 'EmControllerStatusGetTimeKeep', (['self.roop_interval'], {}), '(self.roop_interval)\n', (1195, 1215), False, 'from EmControllerStatusGetTimeKeep import EmControllerStatusGetTimeKeep\n'), ((1606, 1654), 'GlobalModule.EM_CONFIG.read_sys_commo... |
import sqlalchemy as db
class Queries:
def __init__(self, session):
self.session = session
self.queries = db.Table('AIRAVAT_QUERY_PLAN_INFO',
session.dbEngine.metadata,
autoload=True,
autoload_with=sess... | [
"sqlalchemy.Table",
"sqlalchemy.select"
] | [((128, 249), 'sqlalchemy.Table', 'db.Table', (['"""AIRAVAT_QUERY_PLAN_INFO"""', 'session.dbEngine.metadata'], {'autoload': '(True)', 'autoload_with': 'session.dbEngine.engine'}), "('AIRAVAT_QUERY_PLAN_INFO', session.dbEngine.metadata, autoload=\n True, autoload_with=session.dbEngine.engine)\n", (136, 249), True, 'i... |
# Copyright 2020–2021 Cirq on IQM developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed... | [
"cirq.GridQubit",
"cirq.Circuit",
"cirq.PhasedXZGate",
"cirq.ISwapPowGate",
"pytest.fixture",
"cirq.HPowGate",
"cirq.YPowGate",
"cirq.ZZPowGate",
"cirq_iqm.adonis.Adonis",
"cirq.CZPowGate",
"cirq.ZPowGate",
"cirq.NamedQubit",
"pytest.raises",
"pytest.approx",
"cirq.PhasedXPowGate",
"ci... | [((1247, 1277), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (1261, 1277), False, 'import pytest\n'), ((1336, 1347), 'cirq_iqm.adonis.Adonis', 'ad.Adonis', ([], {}), '()\n', (1345, 1347), True, 'import cirq_iqm.adonis as ad\n'), ((1440, 1468), 'cirq.XPowGate', 'cirq.XPowGat... |
#https://github.com/ec500-software-engineering/exercise-1-modularity-mmark9/blob/master/heart_monitor/main_app.py
import display
import sensor_readers
import prediction_engine
import notification_manager
import notifications_sender
import realtime_data_processor
from multiprocessing import Queue
from common_types impo... | [
"sensor_readers.BloodPulseSensorReader",
"notifications_sender.MockTelegramSender",
"database.InMemorySimpleDatabase",
"common_types.Contact",
"realtime_data_processor.RealTimeDataProcessor",
"notifications_sender.MockEmailSender",
"sensor_readers.BloodOxygenSensorReader",
"prediction_engine.Predictio... | [((704, 711), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (709, 711), False, 'from multiprocessing import Queue\n'), ((722, 751), 'display.TextTerminalDisplay', 'display.TextTerminalDisplay', ([], {}), '()\n', (749, 751), False, 'import display\n'), ((1035, 1059), 'database.InMemorySimpleDatabase', 'InMemorySim... |
import tensorflow as tf
class Module(object):
def __init__(self, l2_reg=False, l2_reg_scale=0.0001, trainable=False):
self._l2_reg = l2_reg
if self._l2_reg:
self._l2_reg_scale = l2_reg_scale
self._trainable = trainable
def Residual(self, x, args):
input = x
... | [
"tensorflow.layers.dense",
"tensorflow.layers.average_pooling2d",
"tensorflow.nn.tanh",
"tensorflow.variable_scope",
"tensorflow.contrib.layers.l2_regularizer",
"tensorflow.layers.max_pooling2d",
"tensorflow.nn.relu",
"tensorflow.layers.flatten",
"tensorflow.nn.leaky_relu",
"tensorflow.layers.conv... | [((2575, 2795), 'tensorflow.layers.conv1d', 'tf.layers.conv1d', ([], {'inputs': 'x', 'filters': 'args[1]', 'kernel_size': 'args[0]', 'strides': 'args[2]', 'padding': 'args[5]', 'activation': 'args[3]', 'dilation_rate': 'args[4]', 'kernel_regularizer': 'regularizer', 'trainable': 'self._trainable', 'name': 'name'}), '(i... |
#! /usr/bin/python3
# Copyright 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed... | [
"logging.basicConfig",
"paths_dag.is_graph_SNI",
"utils.draw_graph",
"paths_dag.is_graph_NI",
"opt_sni.opt_sni",
"graph_tools.add_split_nodes",
"graph_tools.simplified",
"graph_tools.without_edges",
"graph_tools.without_unncessary_splits",
"matplotlib.pyplot.show"
] | [((684, 724), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (703, 724), False, 'import logging\n'), ((1021, 1034), 'utils.draw_graph', 'draw_graph', (['g'], {}), '(g)\n', (1031, 1034), False, 'from utils import draw_graph\n'), ((1035, 1045), 'matplotlib.pyplo... |
# Generated by Django 2.2.5 on 2019-09-16 17:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0005_board_photo'),
]
operations = [
migrations.AlterField(
model_name='board',
name='photo',
fie... | [
"django.db.models.ImageField"
] | [((323, 384), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""images/"""'}), "(blank=True, null=True, upload_to='images/')\n", (340, 384), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/python
#-*-coding:utf-8-*-
#By <NAME>
from numpy import *
from lattice import Lattice
from bzone import BZone
from group import C6vGroup,C4vGroup,C3vGroup
__all__=['Honeycomb_Lattice','Square_Lattice','Triangular_Lattice','Chain','construct_lattice','resize_lattice']
class Honeycomb_Lattice(Lat... | [
"group.C6vGroup",
"group.C4vGroup",
"group.C3vGroup",
"lattice.Lattice"
] | [((1338, 1348), 'group.C6vGroup', 'C6vGroup', ([], {}), '()\n', (1346, 1348), False, 'from group import C6vGroup, C4vGroup, C3vGroup\n'), ((1948, 1958), 'group.C4vGroup', 'C4vGroup', ([], {}), '()\n', (1956, 1958), False, 'from group import C6vGroup, C4vGroup, C3vGroup\n'), ((2208, 2218), 'group.C4vGroup', 'C4vGroup', ... |
import os
# os.environ['CUDA_VISIBLE_DEVICES'] = '7'
import torch
from PIL import Image
from torchvision import transforms
from train_crnn.config import opt
from train_crnn.crnn import crnn
class resizeNormalize(object):
def __init__(self, size, interpolation=Image.BILINEAR):
self.size = size
self.interpolation ... | [
"train_crnn.crnn.crnn.CRNN",
"os.path.exists",
"PIL.Image.open",
"torch.load",
"torchvision.transforms.ToTensor"
] | [((1052, 1085), 'train_crnn.crnn.crnn.CRNN', 'crnn.CRNN', (['img_h', '(1)', 'n_class', '(256)'], {}), '(img_h, 1, n_class, 256)\n', (1061, 1085), False, 'from train_crnn.crnn import crnn\n'), ((1147, 1172), 'os.path.exists', 'os.path.exists', (['modelpath'], {}), '(modelpath)\n', (1161, 1172), False, 'import os\n'), ((... |
import logging
class GpioException(Exception):
def __init__(self,errors):
self.errors = errors
def toString(self):
return ' '.join(self.errors) # str(self.errors).replace('\'','').replace('[','').replace(']','')
class GpioDummy(object):
def __init__(self):
self.args = {'mode':'BCM'... | [
"logging.info",
"logging.debug",
"Process.decorate"
] | [((1116, 1164), 'logging.info', 'logging.info', (["('dummy gpio set warnings %s' % val)"], {}), "('dummy gpio set warnings %s' % val)\n", (1128, 1164), False, 'import logging\n'), ((1319, 1372), 'logging.info', 'logging.info', (["('dummy gpio set %s to %s.' % (key, val))"], {}), "('dummy gpio set %s to %s.' % (key, val... |
#!/usr/bin/env python
# coding: utf-8
import copy
testInstructionString = '''nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6'''
def parseInstructions(instructionsString):
instructions = []
for i in instructionsString.split("\n"):
thisInstruction = {}
thisInstruction["comman... | [
"copy.deepcopy"
] | [((2337, 2364), 'copy.deepcopy', 'copy.deepcopy', (['instructions'], {}), '(instructions)\n', (2350, 2364), False, 'import copy\n')] |
from __future__ import with_statement # this is to work with python2.5
import terapyps
from pyps import workspace
workspace.delete("convol3x3")
with terapyps.workspace("convol3x3.c", name="convol3x3", deleteOnClose=False,recoverInclude=False) as w:
for f in w.fun:
f.terapix_code_generation(debug=True)
# ... | [
"terapyps.workspace",
"pyps.workspace.delete"
] | [((114, 143), 'pyps.workspace.delete', 'workspace.delete', (['"""convol3x3"""'], {}), "('convol3x3')\n", (130, 143), False, 'from pyps import workspace\n'), ((149, 247), 'terapyps.workspace', 'terapyps.workspace', (['"""convol3x3.c"""'], {'name': '"""convol3x3"""', 'deleteOnClose': '(False)', 'recoverInclude': '(False)... |
from pymapper.layer import LayerType, GeoPandasLayer
def test_layer_types():
"""Test the LayerType enum."""
assert list(LayerType.__members__.keys()) == [GeoPandasLayer.LAYER_TYPE]
assert LayerType[GeoPandasLayer.LAYER_TYPE].value == GeoPandasLayer
| [
"pymapper.layer.LayerType.__members__.keys"
] | [((130, 158), 'pymapper.layer.LayerType.__members__.keys', 'LayerType.__members__.keys', ([], {}), '()\n', (156, 158), False, 'from pymapper.layer import LayerType, GeoPandasLayer\n')] |
#
# Copyright (c) 2018-2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable la... | [
"tensorflow_serving.apis.predict_pb2.PredictResponse",
"tensorflow.python.framework.dtypes.as_dtype",
"numpy.asarray",
"tensorflow.python.framework.tensor_shape.as_shape",
"ie_serving.logger.get_logger",
"tensorflow.contrib.util.make_ndarray",
"ie_serving.server.constants.INVALID_BATCHSIZE.format"
] | [((1302, 1322), 'ie_serving.logger.get_logger', 'get_logger', (['__name__'], {}), '(__name__)\n', (1312, 1322), False, 'from ie_serving.logger import get_logger\n'), ((4355, 4384), 'tensorflow_serving.apis.predict_pb2.PredictResponse', 'predict_pb2.PredictResponse', ([], {}), '()\n', (4382, 4384), False, 'from tensorfl... |
import numpy as np
import matplotlib.pyplot as plt
# plt.style.use('ggplot')
''' Shot Accuracy Plot
ticks = [5,6,7,8,9,10,11,12,13,14,15]#[1,2,3,4,5]
data_lists = [
[92.35,92.52,93.2,93.71,93.85,94.15,94.22,94.37,94.68,94.73,94.82],
[89.15,89.74,90.41,90.88,91.31,91.47,91.84,92.03,92.2,92.3,92.48],
[86.13... | [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.title",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((1704, 1727), 'numpy.arange', 'np.arange', (['(50)', '(1050)', '(50)'], {}), '(50, 1050, 50)\n', (1713, 1727), True, 'import numpy as np\n'), ((2020, 2057), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'fig_size', 'dpi': 'dpi'}), '(figsize=fig_size, dpi=dpi)\n', (2030, 2057), True, 'import matplotlib.py... |
# ---------------------------------------------------------------------------------------------
# MIT License
# Copyright (c) 2020, Solace Corporation, <NAME> (<EMAIL>)
# Copyright (c) 2020, Solace Corporation, <NAME> (<EMAIL>)
# ------------------------------------------------------------------------------------------... | [
"os.path.exists"
] | [((1860, 1899), 'os.path.exists', 'path.exists', (['self._location.root_folder'], {}), '(self._location.root_folder)\n', (1871, 1899), False, 'from os import path\n')] |
from Treap import Treap
from math import log
class IKS:
def __init__(self):
self.treap = None
self.n = [0, 0]
@staticmethod
def KSThresholdForPValue(pvalue, N):
'''Threshold for KS Test given a p-value
Args:
pval (float): p-value.
N (int): the size of the samples.
... | [
"Treap.Treap.Merge",
"Treap.Treap.SplitGreatest",
"math.log",
"Treap.Treap.SplitSmallest",
"Treap.Treap.SumAll",
"Treap.Treap",
"Treap.Treap.SplitKeepRight"
] | [((1986, 2023), 'Treap.Treap.SplitKeepRight', 'Treap.SplitKeepRight', (['self.treap', 'key'], {}), '(self.treap, key)\n', (2006, 2023), False, 'from Treap import Treap\n'), ((2046, 2071), 'Treap.Treap.SplitGreatest', 'Treap.SplitGreatest', (['left'], {}), '(left)\n', (2065, 2071), False, 'from Treap import Treap\n'), (... |
# third-party
from pycallgraph import PyCallGraph
from pycallgraph import GlobbingFilter
from pycallgraph import Config
from pycallgraph.output import GraphvizOutput
# this package
from ape.interface.ubootkommandant import UbootKommandant
subcommand = UbootKommandant()
graphviz = GraphvizOutput()
graphviz.output_fi... | [
"pycallgraph.Config",
"ape.interface.ubootkommandant.UbootKommandant",
"pycallgraph.PyCallGraph",
"pycallgraph.output.GraphvizOutput"
] | [((256, 273), 'ape.interface.ubootkommandant.UbootKommandant', 'UbootKommandant', ([], {}), '()\n', (271, 273), False, 'from ape.interface.ubootkommandant import UbootKommandant\n'), ((285, 301), 'pycallgraph.output.GraphvizOutput', 'GraphvizOutput', ([], {}), '()\n', (299, 301), False, 'from pycallgraph.output import ... |
import json
from Recommender import RecommendationEngine
from numpy import unique
import plotly
import pandas as pd
from flask import Flask
from flask import render_template, request, jsonify
from plotly.graph_objs import Bar
from load import load_data, load_model
from figures import load_figures
app = Flask(__name... | [
"figures.load_figures",
"flask.render_template",
"flask.request.args.get",
"pandas.read_csv",
"flask.Flask",
"pandas.get_dummies",
"Recommender.RecommendationEngine",
"pandas.DataFrame",
"pandas.concat",
"load.load_model",
"load.load_data"
] | [((308, 323), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (313, 323), False, 'from flask import Flask\n'), ((342, 353), 'load.load_data', 'load_data', ([], {}), '()\n', (351, 353), False, 'from load import load_data, load_model\n'), ((377, 389), 'load.load_model', 'load_model', ([], {}), '()\n', (387, 3... |
import argparse
import os
import random
from PIL import Image
import cv2
import gym
import numpy as np
def save_as_image(observation,
save_dir,
img_name,
prefix="img_"):
# donwnscaling the image
im_array = cv2.resize(observation, IMAGE_SIZE)
im = Image... | [
"os.path.exists",
"PIL.Image.fromarray",
"random.choice",
"numpy.mean",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.join",
"cv2.resize",
"gym.make"
] | [((270, 305), 'cv2.resize', 'cv2.resize', (['observation', 'IMAGE_SIZE'], {}), '(observation, IMAGE_SIZE)\n', (280, 305), False, 'import cv2\n'), ((315, 347), 'PIL.Image.fromarray', 'Image.fromarray', (['im_array', '"""RGB"""'], {}), "(im_array, 'RGB')\n", (330, 347), False, 'from PIL import Image\n'), ((487, 512), 'ar... |
'''Algorithms for converting grammars to Chomsky Normal Form.'''
from cfg.core import ContextFreeGrammar, Terminal, Nonterminal, \
ProductionRule, SubscriptedNonterminal
from util.moreitertools import powerset
def is_cnf_rule(r, start):
'''Return whether a production rule is in CNF. Must indi... | [
"cfg.core.SubscriptedNonterminal",
"cfg.core.SubscriptedNonterminal.next_unused",
"cfg.core.ProductionRule",
"cfg.core.ContextFreeGrammar",
"util.moreitertools.powerset"
] | [((1564, 1581), 'util.moreitertools.powerset', 'powerset', (['indices'], {}), '(indices)\n', (1572, 1581), False, 'from util.moreitertools import powerset\n'), ((2224, 2287), 'cfg.core.SubscriptedNonterminal.next_unused', 'SubscriptedNonterminal.next_unused', (['second_name', 'used_variables'], {}), '(second_name, used... |
import torch.nn as nn
from .classification import ClassificationBranch
from .attribute import AttributeBranch
from .baseline import BaselineReidBranch
from .pose import Pose2DHead
from .semantic_segmentation import FpnSemHead
from models import register_model, BaseModel
from builders import model_builder
from models.ut... | [
"models.register_model",
"torch.nn.ModuleList",
"builders.model_builder.build"
] | [((347, 390), 'models.register_model', 'register_model', (['"""multi_head_sem_multi_task"""'], {}), "('multi_head_sem_multi_task')\n", (361, 390), False, 'from models import register_model, BaseModel\n'), ((819, 844), 'torch.nn.ModuleList', 'nn.ModuleList', (['task_heads'], {}), '(task_heads)\n', (832, 844), True, 'imp... |
from datetime import datetime
def execution_time(func):
def wrapper(*args, **kwargs): # *args and **kwargs --> Doesn't matter the number of arguments
# or key arguments gived to the nested function
initial_time = datetime.now()
func(*args, **kwargs)
f... | [
"datetime.datetime.now"
] | [((266, 280), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (278, 280), False, 'from datetime import datetime\n'), ((332, 346), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (344, 346), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python3
# Copyright 2019 The NeuroPy Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicabl... | [
"virtualenv.create_environment",
"os.path.exists",
"os.path.join",
"termcolor.cprint"
] | [((1563, 1609), 'os.path.join', 'os.path.join', (['venv_dir', '"""bin/activate_this.py"""'], {}), "(venv_dir, 'bin/activate_this.py')\n", (1575, 1609), False, 'import os\n'), ((1688, 1711), 'termcolor.cprint', 'cprint', (['"""done"""', '"""green"""'], {}), "('done', 'green')\n", (1694, 1711), False, 'from termcolor imp... |
from typing import Tuple
import torch
from candlelight.vector_sampler import VectorSampler
def cubic(
input: torch.Tensor, value: torch.Tensor, domain: Tuple[float, float] = (0, 1)
) -> torch.Tensor:
n = value.size(0) - 1
h = (domain[1] - domain[0]) / n
A = torch.eye(n + 1) + torch.diagflat(torch.fu... | [
"torch.full",
"torch.eye",
"torch.solve",
"torch.pow",
"candlelight.vector_sampler.VectorSampler",
"torch.zeros",
"torch.linspace"
] | [((524, 541), 'torch.solve', 'torch.solve', (['d', 'A'], {}), '(d, A)\n', (535, 541), False, 'import torch\n'), ((557, 588), 'candlelight.vector_sampler.VectorSampler', 'VectorSampler', (['input', 'domain', 'n'], {}), '(input, domain, n)\n', (570, 588), False, 'from candlelight.vector_sampler import VectorSampler\n'), ... |
# -*- coding: utf-8 -*-
# @Author: Administrator
# @Date: 2019-04-28 02:23:29
# @Last Modified by: Administrator
# @Last Modified time: 2019-05-26 23:57:30
__all__ = [
"BotzoneClient",
]
import os
import time
from .const import USER_AGENT
from .const import BOTZONE_URL_HOST, BOTZONE_URL_LOGIN, BOTZONE_U... | [
"time.time"
] | [((3806, 3817), 'time.time', 'time.time', ([], {}), '()\n', (3815, 3817), False, 'import time\n')] |
import praw
import re
import tweepy
import secrets
def cleantitle(title):
return re.findall("^\[.*\](.*)", title)[0]
def parsepost(post):
if not post.is_self:
if "reddituploads" in post.url:
status = ""
else:
status = cleantitle(post.title) + ": " + post.url
else:
status = cleantitle(post.title)
re... | [
"tweepy.API",
"praw.Reddit",
"re.findall",
"secrets.init",
"tweepy.OAuthHandler"
] | [((350, 364), 'secrets.init', 'secrets.init', ([], {}), '()\n', (362, 364), False, 'import secrets\n'), ((376, 448), 'praw.Reddit', 'praw.Reddit', ([], {'user_agent': '"""twitter.com:tweets_iaf v 0.0.2 by /u/umeshunni"""'}), "(user_agent='twitter.com:tweets_iaf v 0.0.2 by /u/umeshunni')\n", (387, 448), False, 'import p... |
import pandas as pd
from joblib import load, dump
from matplotlib import pyplot as plt
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
from sklearn.metrics import plot_confusion_matrix
# train the model with inbuilt classifier
def train():
"""Data set reading"""
... | [
"pandas.read_csv",
"joblib.dump",
"sklearn.linear_model.LogisticRegression",
"joblib.load",
"sklearn.metrics.plot_confusion_matrix",
"sklearn.metrics.accuracy_score",
"matplotlib.pyplot.show"
] | [((325, 364), 'pandas.read_csv', 'pd.read_csv', (['"""../dataset/train.csv.csv"""'], {}), "('../dataset/train.csv.csv')\n", (336, 364), True, 'import pandas as pd\n'), ((422, 451), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'n_jobs': '(-1)'}), '(n_jobs=-1)\n', (440, 451), False, 'from sklear... |
# Generated by Django 3.2.6 on 2021-10-12 06:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contact_book', '0003_mer... | [
"django.db.migrations.AlterUniqueTogether",
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField"
] | [((227, 284), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (258, 284), False, 'from django.db import migrations, models\n'), ((1455, 1555), 'django.db.migrations.AlterUniqueTogether', 'migrations.AlterUniqueTogether',... |
import numpy as np
def hermitegaussian(coeffs,x,sigma):
xhat = (x/sigma)
herms = np.polynomial.hermite.Hermite(coeffs)
return herms(xhat) * np.exp(-xhat**2)
def continuous_convolve(kernels,obj):
out = np.empty(obj.shape)
for i in range(kernels.shape[0]):
out[jj] = np.dot(obj[max(0,jj-cente... | [
"numpy.exp",
"numpy.sum",
"numpy.polynomial.hermite.Hermite",
"numpy.empty"
] | [((90, 127), 'numpy.polynomial.hermite.Hermite', 'np.polynomial.hermite.Hermite', (['coeffs'], {}), '(coeffs)\n', (119, 127), True, 'import numpy as np\n'), ((219, 238), 'numpy.empty', 'np.empty', (['obj.shape'], {}), '(obj.shape)\n', (227, 238), True, 'import numpy as np\n'), ((878, 898), 'numpy.empty', 'np.empty', ([... |
from torch import nn
def init_weight(weight, init, init_range, init_std):
if init == "uniform":
nn.init.uniform_(weight, -init_range, init_range)
elif init == "normal":
nn.init.normal_(weight, 0.0, init_std)
def init_bias(bias):
nn.init.constant_(bias, 0.0)
def weights_init(m, init, in... | [
"torch.nn.init.normal_",
"torch.nn.init.uniform_",
"torch.nn.init.constant_"
] | [((261, 289), 'torch.nn.init.constant_', 'nn.init.constant_', (['bias', '(0.0)'], {}), '(bias, 0.0)\n', (278, 289), False, 'from torch import nn\n'), ((110, 159), 'torch.nn.init.uniform_', 'nn.init.uniform_', (['weight', '(-init_range)', 'init_range'], {}), '(weight, -init_range, init_range)\n', (126, 159), False, 'fro... |
import warnings
from collections import OrderedDict
from pathlib import Path
import numpy as np
import pandas as pd
import torch
from tqdm import tqdm
import librosa
import model_utils
import utils
def long_clip_to_images(y, sample_rate, composer):
len_y = len(y)
start = 0
end = sample_rate * 5
imag... | [
"collections.OrderedDict",
"model_utils.load_pytorch_model",
"utils.noise_reduce",
"torch.Tensor",
"numpy.asarray",
"numpy.argwhere",
"torch.cuda.is_available",
"numpy.concatenate",
"pandas.DataFrame",
"torch.no_grad",
"warnings.filterwarnings",
"librosa.load"
] | [((3641, 3674), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (3664, 3674), False, 'import warnings\n'), ((3740, 3753), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (3751, 3753), False, 'from collections import OrderedDict\n'), ((5386, 5434), 'pandas.DataFr... |
#!/usr/bin/env python
#
#
# Train TuneNet on position-position bouncing ball data, which is very similar to the dataset of Ajay et al 2018.
import matplotlib.pyplot as plt
import numpy as np
import torch
import torch.utils
import torch.utils
import torch.utils.data
import torch.utils.data
from tune.utils import get_... | [
"numpy.mean",
"torch.abs",
"tune.utils.get_torch_device",
"tune.utils.create_tensorboard_writer",
"torch.nn.MSELoss",
"numpy.zeros",
"torch.tensor",
"torch.zeros",
"matplotlib.pyplot.pause",
"torch.no_grad",
"matplotlib.pyplot.subplots"
] | [((370, 388), 'tune.utils.get_torch_device', 'get_torch_device', ([], {}), '()\n', (386, 388), False, 'from tune.utils import get_torch_device, create_tensorboard_writer\n'), ((398, 425), 'tune.utils.create_tensorboard_writer', 'create_tensorboard_writer', ([], {}), '()\n', (423, 425), False, 'from tune.utils import ge... |
# _ __
# | |/ /___ ___ _ __ ___ _ _ ®
# | ' </ -_) -_) '_ \/ -_) '_|
# |_|\_\___\___| .__/\___|_|
# |_|
#
# <NAME>
# Copyright 2015 Keeper Security Inc.
# Contact: <EMAIL>
#
import yubico
def get_response(challenge):
try:
YK = yubico.find_yubikey()
response = YK.chal... | [
"yubico.find_yubikey",
"yubico.yubico_util.hexdump"
] | [((272, 293), 'yubico.find_yubikey', 'yubico.find_yubikey', ([], {}), '()\n', (291, 293), False, 'import yubico\n'), ((566, 613), 'yubico.yubico_util.hexdump', 'yubico.yubico_util.hexdump', (['response'], {'length': '(20)'}), '(response, length=20)\n', (592, 613), False, 'import yubico\n')] |
#!/usr/bin/python3
'''
Abstract:
This is a program to show the data with different true and prediction
Usage:
plot_sed.py [main_name] [true label] [pred label]
Example:
plot_sed.py MaxLoss15 1 2
Editor:
Jacob975
##################################
# Python3 #
# This code is mad... | [
"load_lib.confusion_matrix",
"load_lib.load_cls_true",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"numpy.where",
"numpy.argmax",
"collections.Counter",
"numpy.array",
"numpy.append",
"load_lib.load_cls_pred",
"load_lib.load_arrangement",
"time.time",
"glob.glob"
] | [((1239, 1250), 'time.time', 'time.time', ([], {}), '()\n', (1248, 1250), False, 'import time\n'), ((1455, 1467), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1463, 1467), True, 'import numpy as np\n'), ((1508, 1520), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (1516, 1520), True, 'import numpy as np\n'),... |
# -*- coding: utf-8 -*-
"""
==========
"""
# import standard libraries
import os
# import third-party libraries
import numpy as np
import matplotlib.pyplot as plt
from colour import write_image, read_image
# import my libraries
import test_pattern_generator2 as tpg
import transfer_functions as tf
import plot_utili... | [
"matplotlib.pyplot.savefig",
"numpy.ones",
"colour.write_image",
"test_pattern_generator2.shaper_func_log2_to_linear",
"colour.read_image",
"matplotlib.pyplot.legend",
"transfer_functions.eotf_to_luminance",
"matplotlib.pyplot.close",
"plot_utility.log_scale_settings",
"numpy.linspace",
"os.path... | [((661, 685), 'numpy.ones', 'np.ones', (['(1080, 1920, 3)'], {}), '((1080, 1920, 3))\n', (668, 685), True, 'import numpy as np\n'), ((708, 760), 'colour.write_image', 'write_image', (['img', '"""test_src.tif"""'], {'bit_depth': '"""uint16"""'}), "(img, 'test_src.tif', bit_depth='uint16')\n", (719, 760), False, 'from co... |
import json
from src.lib.application.webApp.action import Action
from src.applications.models.json.user import UserModel, UserExpenses, UserExpense
from src.lib.application.models.json.model import BaseResponse,ModelErrorResponse
class ModelExampleAction(Action):
'''
демонстрационный экшен
показывает... | [
"src.applications.models.json.user.UserExpense",
"src.lib.application.models.json.model.ModelErrorResponse",
"json.loads",
"src.lib.application.models.json.model.BaseResponse"
] | [((2161, 2174), 'src.applications.models.json.user.UserExpense', 'UserExpense', ([], {}), '()\n', (2172, 2174), False, 'from src.applications.models.json.user import UserModel, UserExpenses, UserExpense\n'), ((1923, 1952), 'json.loads', 'json.loads', (['self.request.body'], {}), '(self.request.body)\n', (1933, 1952), F... |
import base64
import distutils.version
import random
from lbrynet.core.cryptoutils import get_lbry_hash_obj
blobhash_length = get_lbry_hash_obj().digest_size * 2 # digest_size is in bytes, and blob hashes are hex encoded
def generate_id(num=None):
h = get_lbry_hash_obj()
if num is not None:
h.upd... | [
"lbrynet.core.cryptoutils.get_lbry_hash_obj",
"base64.b64encode",
"random.getrandbits"
] | [((263, 282), 'lbrynet.core.cryptoutils.get_lbry_hash_obj', 'get_lbry_hash_obj', ([], {}), '()\n', (280, 282), False, 'from lbrynet.core.cryptoutils import get_lbry_hash_obj\n'), ((130, 149), 'lbrynet.core.cryptoutils.get_lbry_hash_obj', 'get_lbry_hash_obj', ([], {}), '()\n', (147, 149), False, 'from lbrynet.core.crypt... |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from PIL import Image
import nfp
import argparse
import os
# default resize width/height when converting image -> nfp
DEFAULT_WIDTH, DEFAULT_HEIGHT = 164, 81
desc = (
"Convert standard image files to ComputerCraft nfp files, and vice "
"versa. Input file type is ident... | [
"PIL.Image.open",
"argparse.ArgumentParser",
"os.path.splitext",
"nfp.img_to_nfp",
"nfp.nfp_to_img",
"os.remove"
] | [((1235, 1276), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'desc'}), '(description=desc)\n', (1258, 1276), False, 'import argparse\n'), ((2302, 2324), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (2318, 2324), False, 'import os\n'), ((2517, 2541), 'nfp.nfp_to_img',... |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""=================================================================
@Project : Algorithm_YuweiYin/LeetCode-All-Solution/Python3
@File : LC-1679-Max-Number-of-K-Sum-Pairs.py
@Author : [YuweiYin](https://github.com/YuweiYin)
@Date : 2022-05-04
=========================... | [
"time.process_time"
] | [((3330, 3349), 'time.process_time', 'time.process_time', ([], {}), '()\n', (3347, 3349), False, 'import time\n'), ((3402, 3421), 'time.process_time', 'time.process_time', ([], {}), '()\n', (3419, 3421), False, 'import time\n')] |
import logging
from .utils.plugins import Plugins, Proxy
from .utils.decos import GenLimiter
from typing import Iterator
class ProxyQuery(Plugins):
"""Handles the querying and operations of plugins"""
@GenLimiter
def exec_iter_plugin(self, method_name: str, sort_asc_fails: bool = True, *args, **kwargs) -... | [
"logging.info"
] | [((863, 938), 'logging.info', 'logging.info', (['f"""FreeProxyScraper plugin "{plugin.plugin_name}" has crashed"""'], {}), '(f\'FreeProxyScraper plugin "{plugin.plugin_name}" has crashed\')\n', (875, 938), False, 'import logging\n')] |
"""
Author: <NAME>
Date: 6th/July/2020
Copyright: <NAME>, 2020
email: <EMAIL>
website: https://johdev.com
"""
import numpy as np
import pandas as pd
class FeatureGenerator(object):
"""A feature engineering generator to create additional feature to DataFrame
Group by each label encoded column, compute ... | [
"pandas.merge"
] | [((2594, 2640), 'pandas.merge', 'pd.merge', (['df', 'new_cols_df'], {'on': 'keys', 'how': '"""left"""'}), "(df, new_cols_df, on=keys, how='left')\n", (2602, 2640), True, 'import pandas as pd\n')] |
import math
import numpy as np
import matplotlib.pyplot as plt
import csv
import os
logPath = './online/log/9x16_test/'
nameList = ['Alien', 'Conan1', 'Conan2', 'Cooking', 'Rhinos', 'Skiing', 'Surfing', 'War']
num = 48
batch_size = 4
tileList = []
tileListList = [[] for j in range(len(nameList))]
for idx, f in enu... | [
"numpy.mean",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.xticks",
"csv.writer",
"matplotlib.pyplot.bar",
"matplotlib.pyplot.tight_layout",
"csv.reader",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((883, 912), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(20, 8)'}), '(figsize=(20, 8))\n', (895, 912), True, 'import matplotlib.pyplot as plt\n'), ((913, 946), 'matplotlib.pyplot.bar', 'plt.bar', (['x', 'tileList'], {'width': 'width'}), '(x, tileList, width=width)\n', (920, 946), True, 'import mat... |
import logging
import math
import time
import re
import os
from PIL import Image, ImageDraw, ImageFont
from otter_buddy.constants import FONT_PATH
logger = logging.getLogger(__name__)
# Based on RFC 5322 Official Standard
# Ref: https://www.ietf.org/rfc/rfc5322.txt
email_regex: str = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9... | [
"logging.getLogger",
"PIL.Image.new",
"PIL.ImageFont.truetype",
"os.path.realpath",
"PIL.ImageDraw.Draw",
"re.search"
] | [((159, 186), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (176, 186), False, 'import logging\n'), ((1362, 1391), 're.search', 're.search', (['email_regex', 'email'], {}), '(email_regex, email)\n', (1371, 1391), False, 'import re\n'), ((1513, 1537), 'PIL.Image.new', 'Image.new', (['"""R... |
"""Module for IQ option websocket."""
import json
import logging
import websocket
import iqoptionapi.constants as OP_code
import iqoptionapi.global_value as global_value
class WebsocketClient(object):
"""Class for work with IQ option websocket."""
def __init__(self, api):
"""
... | [
"logging.getLogger",
"iqoptionapi.constants.ACTIVES.keys",
"iqoptionapi.constants.ACTIVES.values",
"websocket.WebSocketApp"
] | [((468, 610), 'websocket.WebSocketApp', 'websocket.WebSocketApp', (['self.api.wss_url'], {'on_message': 'self.on_message', 'on_error': 'self.on_error', 'on_close': 'self.on_close', 'on_open': 'self.on_open'}), '(self.api.wss_url, on_message=self.on_message,\n on_error=self.on_error, on_close=self.on_close, on_open=s... |
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
# Create your models here.
#storing user profile data
class UserProfileInfo(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
#storing posts in the databse
class notes(models.Model):... | [
"django.db.models.OneToOneField",
"django.db.models.TextField",
"django.utils.timezone.now",
"django.db.models.ImageField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((210, 262), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (230, 262), False, 'from django.db import models\n'), ((798, 830), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)... |
from embuilder.builder import EMB
prefixes = dict(
rdf = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" ,
rdfs = "http://www.w3.org/2000/01/rdf-schema#" ,
obo = "http://purl.obolibrary.org/obo/" ,
sio = "http://semanticscience.org/resource/" ,
xsd = "http://www.w3.org/2001/XMLSchema#",
this = "http://my_exa... | [
"embuilder.builder.EMB"
] | [((1864, 1895), 'embuilder.builder.EMB', 'EMB', (['config', 'prefixes', 'triplets'], {}), '(config, prefixes, triplets)\n', (1867, 1895), False, 'from embuilder.builder import EMB\n')] |
from base64 import b64decode
with open('good_luck.dat', 'r') as f:
data = f.readlines()
for i, line in enumerate(data):
data[i] = line.split('|')[1]
b64answ = ''.join(data)
with open('base64_1.txt', 'wb') as f:
f.write(b64decode(b64answ))
with open('base64_1.txt', 'r') as f:
data = f.read().split(... | [
"base64.b64decode"
] | [((234, 252), 'base64.b64decode', 'b64decode', (['b64answ'], {}), '(b64answ)\n', (243, 252), False, 'from base64 import b64decode\n'), ((370, 386), 'base64.b64decode', 'b64decode', (['piece'], {}), '(piece)\n', (379, 386), False, 'from base64 import b64decode\n')] |
# -*- coding: UTF-8 -*-
from spider import *
class Prepare:
def __init__(self,col=None):
self.url = BASE_URL
self.col = col
self.content_type = 'application/json; charset=utf-8'
self.collect = mongodb.db[self.col]
def update_one(self,matchid,item):
""... | [
"threading.Thread"
] | [((22897, 22927), 'threading.Thread', 'threading.Thread', ([], {'target': 'main1'}), '(target=main1)\n', (22913, 22927), False, 'import threading\n'), ((22939, 22969), 'threading.Thread', 'threading.Thread', ([], {'target': 'main2'}), '(target=main2)\n', (22955, 22969), False, 'import threading\n')] |
from __future__ import annotations
import asyncio
import json
import logging
import os
import pathlib
import signal
import sys
import textwrap
import traceback
from concurrent.futures import ProcessPoolExecutor
from dataclasses import dataclass, field
from typing import (Any, ClassVar, Dict, Generator, Iterable, List,... | [
"logging.getLogger",
"apischema.deserialize",
"signal.signal",
"traceback.format_exc",
"os.path.isabs",
"pathlib.Path",
"pathlib.Path.cwd",
"apischema.serialize",
"concurrent.futures.ProcessPoolExecutor",
"sys.exit",
"json.load",
"dataclasses.field",
"asyncio.as_completed"
] | [((1147, 1174), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1164, 1174), False, 'import logging\n'), ((2900, 2927), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (2905, 2927), False, 'from dataclasses import dataclass, field\n'), ((3041, ... |
import os
import time
import numpy as np
import torch
import torch.optim as optim
from tensorboardX import SummaryWriter
from torch.utils.data import DataLoader
from tqdm import tqdm
from datasets.segDataSet import COVID19_SegDataSet
from datasets.segDataSetNormalize import COVID19_SegDataSetNormalize
from models.mod... | [
"utils.Metrics.enhanced_mixing_loss",
"torch.cuda.manual_seed_all",
"os.path.exists",
"torch.cuda.get_device_properties",
"tensorboardX.SummaryWriter",
"os.makedirs",
"segConfig.getConfig",
"torch.load",
"torch.cuda.device_count",
"torch.cuda.set_device",
"models.model.U2NET",
"torch.cuda.is_a... | [((607, 696), 'utils.Metrics.enhanced_mixing_loss', 'enhanced_mixing_loss', (['d0', 'labels_v', 'weight', 'device'], {'alpha': '(0.5)', 'n_classes': 'num_classes'}), '(d0, labels_v, weight, device, alpha=0.5, n_classes=\n num_classes)\n', (627, 696), False, 'from utils.Metrics import enhanced_mixing_loss\n'), ((713,... |
from rest_framework import viewsets
from category.models import Category
from category.serializers import CategorySerializer
class CategoryViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Category.objects.all()
serializer_class = CategorySerializer
| [
"category.models.Category.objects.all"
] | [((197, 219), 'category.models.Category.objects.all', 'Category.objects.all', ([], {}), '()\n', (217, 219), False, 'from category.models import Category\n')] |
import time
import datetime
import json
import pandas as pd
from pandas import DataFrame
import os
import collections
import numpy as np
import csv
import collections
import nltk.classify
import nltk.metrics
"""
to save user-user similarity matrix as a whole
"""
index_list = ["1", "2", "3", "4", "5",... | [
"csv.writer"
] | [((725, 744), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csvfile)\n', (735, 744), False, 'import csv\n')] |
#!/usr/bin/env python3
# Copyright (c) 2008-9 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the Lic... | [
"PyQt4.QtGui.QApplication",
"re.escape",
"re.compile",
"PyQt4.QtCore.SIGNAL",
"PyQt4.QtCore.pyqtSignature"
] | [((1436, 1460), 'PyQt4.QtCore.pyqtSignature', 'pyqtSignature', (['"""QString"""'], {}), "('QString')\n", (1449, 1460), False, 'from PyQt4.QtCore import Qt, SIGNAL, pyqtSignature\n'), ((2016, 2033), 'PyQt4.QtCore.pyqtSignature', 'pyqtSignature', (['""""""'], {}), "('')\n", (2029, 2033), False, 'from PyQt4.QtCore import ... |
# -*- coding: utf-8 -*-
import numpy as np
import scipy.io as sio
import glob
import os
import torch
import torch.utils.data
import torchvision.transforms.functional
import cv2
def read_dataset(path):
"""
Read training dataset or validation dataset.
:param path: The path of dataset.
:return: The list of filenames... | [
"numpy.logical_not",
"os.path.join",
"numpy.max",
"numpy.exp",
"numpy.zeros",
"numpy.transpose",
"cv2.imread"
] | [((2187, 2220), 'numpy.exp', 'np.exp', (['(-D2 / 2.0 / sigma / sigma)'], {}), '(-D2 / 2.0 / sigma / sigma)\n', (2193, 2220), True, 'import numpy as np\n'), ((351, 385), 'os.path.join', 'os.path.join', (['path', '"""images/*.jpg"""'], {}), "(path, 'images/*.jpg')\n", (363, 385), False, 'import os\n'), ((1042, 1074), 'nu... |
#!/usr/bin/env python3
from collections import Counter
from decimal import Decimal
import csv
import os
import sys
def process_trips(trip_ids):
route_ids = set()
shape_ids = set()
service_ids = set()
with open('trips.txt', 'r') as f:
reader = csv.reader(f)
filtered_rows = []
f... | [
"os.makedirs",
"csv.writer",
"collections.Counter",
"csv.reader",
"decimal.Decimal"
] | [((1213, 1222), 'collections.Counter', 'Counter', ([], {}), '()\n', (1220, 1222), False, 'from collections import Counter\n'), ((4447, 4484), 'os.makedirs', 'os.makedirs', (['"""cleaned"""'], {'exist_ok': '(True)'}), "('cleaned', exist_ok=True)\n", (4458, 4484), False, 'import os\n'), ((4615, 4635), 'decimal.Decimal', ... |
from concurrent import futures
import base64
import time
import os
import model_pb2
import model_pb2_grpc
import predict as predict_fn
import grpc
class PredictService(model_pb2_grpc.PredictServiceServicer):
# def GetEncode(self, request, context):
# return test_pb2.encodetext(enctransactionID =... | [
"model_pb2.response",
"predict.predict",
"concurrent.futures.ThreadPoolExecutor",
"time.sleep",
"model_pb2_grpc.add_PredictServiceServicer_to_server",
"model_pb2.output"
] | [((2907, 2975), 'model_pb2_grpc.add_PredictServiceServicer_to_server', 'model_pb2_grpc.add_PredictServiceServicer_to_server', (['service', 'server'], {}), '(service, server)\n', (2958, 2975), False, 'import model_pb2_grpc\n'), ((971, 1018), 'model_pb2.response', 'model_pb2.response', ([], {'status': '"""SetProxy Sucess... |
"""Events API views"""
#DRF
from rest_framework.views import APIView
from rest_framework import status, mixins, viewsets
from rest_framework.response import Response
from rest_framework.request import Request
from rest_framework.decorators import api_view
from rest_framework import generics
from operator import itemget... | [
"cride.users.serializers.ExchangeModelSerializer",
"rest_framework.response.Response",
"rest_framework.decorators.api_view",
"cride.users.models.Exchange.objects.get"
] | [((571, 589), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (579, 589), False, 'from rest_framework.decorators import api_view\n'), ((637, 686), 'cride.users.models.Exchange.objects.get', 'Exchange.objects.get', ([], {'id': "request.data['exchange']"}), "(id=request.data['exchang... |
# Copyright (c) 2008-2009 <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the... | [
"random.randint"
] | [((2515, 2538), 'random.randint', 'randint', (['min_wt', 'max_wt'], {}), '(min_wt, max_wt)\n', (2522, 2538), False, 'from random import randint\n')] |
# Generated by Django 4.0 on 2021-12-16 19:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("episodes", "0021_auto_20210922_1012"),
]
operations = [
migrations.AddField(
model_name="episode",
name="link",
... | [
"django.db.models.URLField"
] | [((332, 387), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'max_length': '(2083)', 'null': '(True)'}), '(blank=True, max_length=2083, null=True)\n', (347, 387), False, 'from django.db import migrations, models\n')] |
import datetime as dt
from celery_store.mixins import PeriodicTaskMixin, TaskScheduleMixin
from celery import current_app as app
from celery.schedules import crontab
@app.task(name='sum-of-two-numbers')
def add(x, y):
return x + y
class PeriodicTask(PeriodicTaskMixin):
@property
def name(self):
... | [
"datetime.datetime.now",
"datetime.timedelta",
"celery.current_app.task",
"celery.schedules.crontab"
] | [((171, 206), 'celery.current_app.task', 'app.task', ([], {'name': '"""sum-of-two-numbers"""'}), "(name='sum-of-two-numbers')\n", (179, 206), True, 'from celery import current_app as app\n'), ((944, 956), 'celery.schedules.crontab', 'crontab', (['"""*"""'], {}), "('*')\n", (951, 956), False, 'from celery.schedules impo... |
from flask import render_template,redirect,request,url_for,abort,flash
from . import main
#from PIL import Image
from flask_login import login_required,current_user
from ..models import User,Post,Comment
from .forms import UpdateProfile,PostForm,AddCommentForm#,UpdateAccountForm
from .. import db
from .. import db,phot... | [
"flask.render_template",
"flask.flash",
"app.request.getQuotes",
"flask.url_for",
"flask.abort"
] | [((404, 415), 'app.request.getQuotes', 'getQuotes', ([], {}), '()\n', (413, 415), False, 'from app.request import getQuotes\n'), ((729, 786), 'flask.render_template', 'render_template', (['"""index.html"""'], {'title': 'title', 'quotes': 'quotes'}), "('index.html', title=title, quotes=quotes)\n", (744, 786), False, 'fr... |
"""create user table
Revision ID: <PASSWORD>
Revises:
Create Date: 2021-03-16 21:26:48.338701
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<PASSWORD>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
... | [
"alembic.op.drop_table",
"sqlalchemy.func.now",
"sqlalchemy.Column",
"sqlalchemy.DateTime"
] | [((1091, 1113), 'alembic.op.drop_table', 'op.drop_table', (['"""users"""'], {}), "('users')\n", (1104, 1113), False, 'from alembic import op\n'), ((338, 395), 'sqlalchemy.Column', 'sa.Column', (['"""id"""', 'sa.Integer'], {'primary_key': '(True)', 'index': '(True)'}), "('id', sa.Integer, primary_key=True, index=True)\n... |
from twilio.rest import Client
def sendotp(otpreci):
account_sid = 'ACe8caad8112e2135294377d739ce3e9b9'
auth_token = '<PASSWORD>'
client = Client(account_sid, auth_token)
msg='OTP for Login : ' + str(otpreci)
message = client.messages.create(
from_='whatsapp:+14155238886',
body= msg ... | [
"twilio.rest.Client"
] | [((153, 184), 'twilio.rest.Client', 'Client', (['account_sid', 'auth_token'], {}), '(account_sid, auth_token)\n', (159, 184), False, 'from twilio.rest import Client\n')] |
# Generated by Django 3.0.7 on 2020-07-30 04:32
from django.db import migrations, models
import django.db.models.deletion
import tinymce.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Goods',
... | [
"django.db.models.DateField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.SmallIntegerField",
"django.db.models.ImageField",
"django.db.models.DecimalField",
"django.db.models.URLField",
"django.db... | [((6195, 6305), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'db_constraint': '(False)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""goods.GoodsType"""'}), "(db_constraint=False, on_delete=django.db.models.deletion.\n CASCADE, to='goods.GoodsType')\n", (6212, 6305), False, 'from django.d... |
import urllib.request as ur
from bs4 import BeautifulSoup as soup
def find_productF(search):
header = { 'User-Agent' : 'Mozilla/5.0' }
link=["_3wU53n","_2cLu-l","_2B_pmu"]
search=search.replace(" ","%20")
urlF= "https://www.flipkart.com/search?q={}&otracker=search&otracker1=search&marketplace=... | [
"bs4.BeautifulSoup",
"urllib.request.Request",
"urllib.request.urlopen"
] | [((372, 402), 'urllib.request.Request', 'ur.Request', (['urlF', 'None', 'header'], {}), '(urlF, None, header)\n', (382, 402), True, 'import urllib.request as ur\n'), ((416, 431), 'urllib.request.urlopen', 'ur.urlopen', (['req'], {}), '(req)\n', (426, 431), True, 'import urllib.request as ur\n'), ((494, 519), 'bs4.Beaut... |
# https://practice.geeksforgeeks.org/problems/save-ironman/0
import re
a = 'Ab?/Ba'
pattern = r'\w+'
matches = re.findall(pattern, a)
the_string = ''.join(matches)
print(the_string)
half_len = len(the_string) // 2
part_1 = list(the_string[:half_len+1])
part_2 = list(the_string[-half_len:])
part_2.reverse()
for char1, ... | [
"re.findall"
] | [((112, 134), 're.findall', 're.findall', (['pattern', 'a'], {}), '(pattern, a)\n', (122, 134), False, 'import re\n')] |
import json
import requests
import time
import webbrowser
import sys
from logger import Logger
class DiscordApi:
def __init__(self):
self.api_endpoint = 'https://discord.com/api/v9'
with open("secrets.json") as f:
self.secrets = json.load(f)
self.headers = {"Authorization": f"... | [
"requests.patch",
"webbrowser.open",
"requests.get",
"time.sleep",
"logger.Logger.log",
"json.load"
] | [((421, 619), 'webbrowser.open', 'webbrowser.open', (['f"""{self.api_endpoint}/oauth2/authorize?client_id={self.secrets[\'client_id\']}&scope=bot&permissions=134217728&guild_id={self.secrets[\'guild_id\']}&disable_guild_select=true"""'], {}), '(\n f"{self.api_endpoint}/oauth2/authorize?client_id={self.secrets[\'clie... |
#!/usr/bin/env python
"""
Setup file created
"""
from setuptools import setup
setup(name='pyqm',
version='0.1',
description='',
url='https://github.com/suracefm/pyqm',
author='<NAME>',
license='BSD 3 New',
packages = ['pyqm']
)
| [
"setuptools.setup"
] | [((79, 235), 'setuptools.setup', 'setup', ([], {'name': '"""pyqm"""', 'version': '"""0.1"""', 'description': '""""""', 'url': '"""https://github.com/suracefm/pyqm"""', 'author': '"""<NAME>"""', 'license': '"""BSD 3 New"""', 'packages': "['pyqm']"}), "(name='pyqm', version='0.1', description='', url=\n 'https://githu... |
#!/usr/bin/env python3
import sys
import rospy
import cv2
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
class ImageGrabber:
def __init__(self):
self.bridge = CvBridge()
self.image_sub = rospy.Subscriber("main_camera/image_raw", Image, self.callback)
def ca... | [
"rospy.init_node",
"cv2.imshow",
"cv_bridge.CvBridge",
"cv2.destroyAllWindows",
"rospy.spin",
"rospy.Subscriber",
"cv2.waitKey"
] | [((599, 647), 'rospy.init_node', 'rospy.init_node', (['"""image_grabber"""'], {'anonymous': '(True)'}), "('image_grabber', anonymous=True)\n", (614, 647), False, 'import rospy\n'), ((743, 766), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (764, 766), False, 'import cv2\n'), ((208, 218), 'cv_bridg... |
import csv
from site_crawler.cleaner.cleaner import Cleaner
class Dataset_Builder:
def __init__(self):
self.cleaner = Cleaner()
self.create_csv_headers()
def create_csv_headers(self):
csv_files = [
'negative_sentiment',
'positive_sentiment',
'dataset... | [
"site_crawler.cleaner.cleaner.Cleaner",
"csv.writer",
"csv.DictReader"
] | [((131, 140), 'site_crawler.cleaner.cleaner.Cleaner', 'Cleaner', ([], {}), '()\n', (138, 140), False, 'from site_crawler.cleaner.cleaner import Cleaner\n'), ((950, 963), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (960, 963), False, 'import csv\n'), ((1320, 1343), 'csv.DictReader', 'csv.DictReader', (['csvfile'],... |
import filecmp
import os
def are_files_equal(file1, file2):
"""
:param file1:
:param file2:
:return: bool - if files content is equal
"""
file1_input = open(os.getcwd() + file1, "r")
file2_input = open(os.getcwd() + file2, "r")
line1 = file1_input.readlines()
line2 = file2_input.re... | [
"os.getcwd"
] | [((183, 194), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (192, 194), False, 'import os\n'), ((232, 243), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (241, 243), False, 'import os\n')] |
_this_pdbrc_unic_459f279ea52642e4bd9f38b32ad284e9 = 1
# enable simple tab completion (do it on top to avoid side effects)
import pdb
import rlcompleter
pdb.Pdb.complete = rlcompleter.Completer(locals()).complete
# save indent! respect youself
import bdb
import linecache
if (not hasattr(bdb.Bdb , "_format_stack_entry_... | [
"linecache.getline",
"bdb.Bdb._format_stack_entry_bak"
] | [((638, 712), 'bdb.Bdb._format_stack_entry_bak', 'bdb.Bdb._format_stack_entry_bak', (['self', 'frame_lineno', "('\\n%3d: ' % lineno)"], {}), "(self, frame_lineno, '\\n%3d: ' % lineno)\n", (669, 712), False, 'import bdb\n'), ((786, 838), 'linecache.getline', 'linecache.getline', (['filename', 'lineno', 'frame.f_gl... |
import pandas as pd
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
from tensorflow import keras
from pandas.plotting import autocorrelation_plot
from keras import Sequential
from tensorflow.python.keras.layers.recurrent import LSTM
from sklearn.preprocessing import MinMaxScaler
from... | [
"keras.Sequential",
"pandas.read_csv",
"sklearn.preprocessing.OneHotEncoder",
"numpy.asarray",
"tensorflow.python.keras.layers.recurrent.LSTM",
"tensorflow.keras.optimizers.Adam",
"tensorflow.keras.layers.Dense"
] | [((412, 489), 'pandas.read_csv', 'pd.read_csv', (['"""C:\\\\Users\\\\Michael\\\\Desktop\\\\pwrball_rand\\\\pwr_ball - Copy.csv"""'], {}), "('C:\\\\Users\\\\Michael\\\\Desktop\\\\pwrball_rand\\\\pwr_ball - Copy.csv')\n", (423, 489), True, 'import pandas as pd\n'), ((1243, 1255), 'keras.Sequential', 'Sequential', ([], {}... |
import boto3
def get_lambda_client(access_key, secret_key, region):
return boto3.client(
"lambda",
region_name=region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
def check_function_exists(function_name, access_key, secret_key, region):
client = get_lambda... | [
"boto3.client"
] | [((81, 191), 'boto3.client', 'boto3.client', (['"""lambda"""'], {'region_name': 'region', 'aws_access_key_id': 'access_key', 'aws_secret_access_key': 'secret_key'}), "('lambda', region_name=region, aws_access_key_id=access_key,\n aws_secret_access_key=secret_key)\n", (93, 191), False, 'import boto3\n')] |
# Generated by Django 3.1 on 2020-09-20 22:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0007_auto_20200920_1813'),
]
operations = [
migrations.AddField(
model_name='student',
name='group',
... | [
"django.db.migrations.DeleteModel",
"django.db.models.ManyToManyField"
] | [((398, 435), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""PartOf"""'}), "(name='PartOf')\n", (420, 435), False, 'from django.db import migrations, models\n'), ((328, 377), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'null': '(True)', 'to': '"""api.Group"""'}),... |
from setuptools import setup
setup(name='geomle',
version='1.0',
description='Intrinsic dimension',
url='https://github.com/premolab/GeoMLE',
author='<NAME>, <NAME>',
author_email='<EMAIL> ',
license='MIT',
packages=['geomle'],
install_requires=[
'numpy>=1.13.1... | [
"setuptools.setup"
] | [((30, 336), 'setuptools.setup', 'setup', ([], {'name': '"""geomle"""', 'version': '"""1.0"""', 'description': '"""Intrinsic dimension"""', 'url': '"""https://github.com/premolab/GeoMLE"""', 'author': '"""<NAME>, <NAME>"""', 'author_email': '"""<EMAIL> """', 'license': '"""MIT"""', 'packages': "['geomle']", 'install_re... |
"""
Created on 19/06/2020
@author: <NAME>
"""
from Data_manager.Dataset import Dataset
from Data_manager.IncrementalSparseMatrix import IncrementalSparseMatrix_FilterIDs
from pandas.api.types import is_string_dtype
import pandas as pd
def _add_keys_to_mapper(key_to_value_mapper, new_key_list):
for new_key in n... | [
"pandas.api.types.is_string_dtype",
"Data_manager.IncrementalSparseMatrix.IncrementalSparseMatrix_FilterIDs",
"Data_manager.Dataset.Dataset"
] | [((4767, 5168), 'Data_manager.Dataset.Dataset', 'Dataset', ([], {'dataset_name': 'dataset_name', 'URM_dictionary': 'URM_DICT_sparse', 'ICM_dictionary': 'ICM_DICT_sparse', 'ICM_feature_mapper_dictionary': 'self.ICM_mapper_DICT', 'UCM_dictionary': 'UCM_DICT_sparse', 'UCM_feature_mapper_dictionary': 'self.UCM_mapper_DICT'... |
import os
import pytest
import yaml
from gcasc.utils.yaml_include import YamlIncluderConstructor
from .helpers import read_file, read_yaml
YamlIncluderConstructor.add_to_loader_class(
loader_class=yaml.FullLoader,
base_dir=os.path.dirname(os.path.realpath(__file__)) + "/data",
)
@pytest.fixture()
def file... | [
"pytest.fixture",
"os.path.realpath"
] | [((295, 311), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (309, 311), False, 'import pytest\n'), ((372, 388), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (386, 388), False, 'import pytest\n'), ((449, 465), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (463, 465), False, 'import pytest\n'), (... |
def memoiziraj(f):
rezultati = {}
def mem_f(x):
if x not in rezultati:
rezultati[x] = f(x)
return rezultati[x]
return mem_f
#naredi pametno funkcijo
import sys
sys.setrecursionlimit(10000000)
def vsota(n):
if n == 0:
return 0
else:
return n + vsota(n-1)... | [
"sys.setrecursionlimit"
] | [((202, 233), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10000000)'], {}), '(10000000)\n', (223, 233), False, 'import sys\n')] |
import cocotb
from cocotb.triggers import Timer
from utils import pytest_cocotb_run_test
def test_onehot_mux(pytestconfig):
"""Pytest fixture for One-hot Mux test"""
pytest_cocotb_run_test(pytestconfig, __name__)
@cocotb.test()
async def cocotb_test_onehot_mux(dut):
"""One-hot mux test"""
dw = int(... | [
"cocotb.triggers.Timer",
"cocotb.test",
"utils.pytest_cocotb_run_test"
] | [((226, 239), 'cocotb.test', 'cocotb.test', ([], {}), '()\n', (237, 239), False, 'import cocotb\n'), ((176, 222), 'utils.pytest_cocotb_run_test', 'pytest_cocotb_run_test', (['pytestconfig', '__name__'], {}), '(pytestconfig, __name__)\n', (198, 222), False, 'from utils import pytest_cocotb_run_test\n'), ((555, 563), 'co... |
""" Defines a number of diverse, system-wide helper functions.
Contents:
1. Pickling
2. Graph saving and loading
3. Reporting
4. Corpus processing
5. Math functions
"""
import os
import sys
import time
import pickle
import codecs
import random
import logging
import numpy as np
import pandas as pd
import tensorflow a... | [
"os.listdir",
"pickle.dump",
"random.shuffle",
"pickle.load",
"os.path.join",
"pandas.read_table",
"tensorflow.maximum",
"codecs.open",
"time.time"
] | [((4339, 4372), 'os.path.join', 'os.path.join', (['save_dir', 'file_name'], {}), '(save_dir, file_name)\n', (4351, 4372), False, 'import os\n'), ((8015, 8124), 'pandas.read_table', 'pd.read_table', (['scored_path'], {'header': 'None', 'names': "['Sentence', 'Sentence_Perplexity']", 'skip_blank_lines': '(True)'}), "(sco... |
# Exhibiter, copyright (c) 2021 <NAME>.
# This software may not be used to evict people, see LICENSE.md.
# python standard imports
from re import search, sub, fullmatch
from pathlib import Path
from copy import copy
# third-party imports
from pdfrw import PdfReader, PdfWriter, buildxobj, toreportlab
from reportlab.li... | [
"pdfrw.PdfWriter",
"pdfrw.toreportlab.makerl",
"PIL.Image.open",
"pathlib.Path",
"pdfrw.buildxobj.pagexobj",
"pdfrw.PdfReader",
"re.fullmatch",
"reportlab.pdfgen.canvas.Canvas",
"re.sub",
"docx.Document",
"re.search"
] | [((9589, 9600), 'pdfrw.PdfWriter', 'PdfWriter', ([], {}), '()\n', (9598, 9600), False, 'from pdfrw import PdfReader, PdfWriter, buildxobj, toreportlab\n'), ((10154, 10172), 'docx.Document', 'Document', (['template'], {}), '(template)\n', (10162, 10172), False, 'from docx import Document\n'), ((14798, 14828), 're.sub', ... |
from typing import List
from fastapi import APIRouter, Depends
from sqlmodel import select, Session
from app.models import *
from utils import get_session
router = APIRouter()
@router.get("/users", response_model=List[UserRead])
async def get_users(*, session: Session=Depends(get_session)):
statement = select(Use... | [
"fastapi.APIRouter",
"fastapi.Depends",
"sqlmodel.select"
] | [((165, 176), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (174, 176), False, 'from fastapi import APIRouter, Depends\n'), ((271, 291), 'fastapi.Depends', 'Depends', (['get_session'], {}), '(get_session)\n', (278, 291), False, 'from fastapi import APIRouter, Depends\n'), ((310, 322), 'sqlmodel.select', 'select',... |
# Copyright (c) 2009, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the foll... | [
"tensorflow.gfile.Open",
"random.uniform",
"datetime.datetime.fromtimestamp",
"tensorflow.gfile.Exists",
"tensorflow.gfile.Stat",
"tensorflow.gfile.Remove",
"time.sleep",
"os.getcwd",
"datetime.datetime.now",
"os.getpid",
"datetime.timedelta",
"time.time"
] | [((1741, 1760), 'tensorflow.gfile.Stat', 'tf.gfile.Stat', (['path'], {}), '(path)\n', (1754, 1760), True, 'import tensorflow as tf\n'), ((1852, 1891), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['time_s'], {}), '(time_s)\n', (1883, 1891), False, 'import datetime\n'), ((1903, 1926), 'datetime... |