code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import datetime
import os
import sys
from cmath import inf
from typing import Any
import hypothesis.extra.numpy as xps
import hypothesis.strategies as st
import numpy
import pytest
from hypothesis import assume, given
from eopf.product.utils import (
apply_xpath,
conv,
convert_to_unix_time,
is_date,
... | [
"numpy.iinfo",
"eopf.product.utils.convert_to_unix_time",
"hypothesis.extra.numpy.integer_dtypes",
"eopf.product.utils.conv",
"hypothesis.extra.numpy.boolean_dtypes",
"datetime.datetime",
"eopf.product.utils.is_date",
"numpy.int64",
"hypothesis.strategies.booleans",
"hypothesis.strategies.text",
... | [((7718, 7759), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""EPSILON"""', '[0.1]'], {}), "('EPSILON', [0.1])\n", (7741, 7759), False, 'import pytest\n'), ((457, 523), 'os.path.join', 'os.path.join', (['EMBEDED_TEST_DATA_FOLDER', '"""snippet_xfdumanifest.xml"""'], {}), "(EMBEDED_TEST_DATA_FOLDER, 'snippet... |
import pytest
from app.html.inline_builder import InlineBuilder, LinkBuilder, CodeBuilder, ImageBuilder
from app.markdown.inline_parser import InlineParser, LinkParser, CodeParser, ImageParser
from app.settings import setting
class TestInlineBuilder:
""" Inline要素からHTML文字列が得られるか検証 """
# HTML組み立て
@pytest.... | [
"app.html.inline_builder.InlineBuilder",
"app.html.inline_builder.ImageBuilder",
"app.html.inline_builder.CodeBuilder",
"pytest.mark.parametrize",
"app.html.inline_builder.LinkBuilder",
"app.markdown.inline_parser.LinkParser",
"app.markdown.inline_parser.CodeParser",
"app.markdown.inline_parser.Inline... | [((313, 782), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('inline_text', 'expected')", '[(\'plain text\', \'plain text\'), (\'[参考リンク](https://docs.python.org/3/)\',\n f\'<a href="https://docs.python.org/3/" class="{setting[\\\'class_name\\\'][\\\'a\\\']}">参考リンク</a>\'\n ), (\':
data = array.array('f')
test = array.array('f')
with open(filename, 'rb') as fd:
data.... | [
"student_code.part_two_classifier",
"array.array",
"student_code.part_one_classifier"
] | [((220, 236), 'array.array', 'array.array', (['"""f"""'], {}), "('f')\n", (231, 236), False, 'import array\n'), ((249, 265), 'array.array', 'array.array', (['"""f"""'], {}), "('f')\n", (260, 265), False, 'import array\n'), ((2886, 2944), 'student_code.part_one_classifier', 'student_code.part_one_classifier', (['trainin... |
from bottle import request, response, HTTPResponse
import os, datetime, re
import json as JSON
import jwt
class auth:
def gettoken(mypass):
secret = str(os.getenv('API_SCRT', '!@ws4RT4ws212@#%'))
password = str(os.getenv('API_PASS', 'password'))
if mypass == password:
... | [
"jwt.decode",
"os.getenv",
"datetime.datetime.utcnow",
"json.dumps",
"bottle.request.headers.keys",
"datetime.timedelta",
"bottle.request.headers.raw"
] | [((177, 218), 'os.getenv', 'os.getenv', (['"""API_SCRT"""', '"""!@ws4RT4ws212@#%"""'], {}), "('API_SCRT', '!@ws4RT4ws212@#%')\n", (186, 218), False, 'import os, datetime, re\n'), ((245, 278), 'os.getenv', 'os.getenv', (['"""API_PASS"""', '"""password"""'], {}), "('API_PASS', 'password')\n", (254, 278), False, 'import o... |
from django.conf.urls import include, url
from django.conf import settings
from .views import data_sniffer_health_check
if settings.DATA_SNIFFER_ENABLED:
urlpatterns = [
url(r'^(?P<key>[-\w]+)', data_sniffer_health_check, name="data_sniffer_health_check"),
]
else:
urlpatterns = []
| [
"django.conf.urls.url"
] | [((184, 274), 'django.conf.urls.url', 'url', (['"""^(?P<key>[-\\\\w]+)"""', 'data_sniffer_health_check'], {'name': '"""data_sniffer_health_check"""'}), "('^(?P<key>[-\\\\w]+)', data_sniffer_health_check, name=\n 'data_sniffer_health_check')\n", (187, 274), False, 'from django.conf.urls import include, url\n')] |
# Generated by Django 2.1.2 on 2018-10-19 14:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mediumwave', '0010_auto_20181017_1937'),
]
operations = [
migrations.AddField(
model_name='transmitter',
name='iso',... | [
"django.db.models.CharField"
] | [((339, 381), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(3)'}), '(blank=True, max_length=3)\n', (355, 381), False, 'from django.db import migrations, models\n')] |
#!/usr/bin/env python
import sys
import csv
import json
mostRatingUser = None
mostRatingCount = 0
mostRatingInfo = None
for line in sys.stdin:
line = line.strip()
user, genreString = line.split("\t", 1)
genreInfo = json.loads(genreString)
if not mostRatingUser or len(genreInfo) > mostRatingCount:
... | [
"json.loads"
] | [((230, 253), 'json.loads', 'json.loads', (['genreString'], {}), '(genreString)\n', (240, 253), False, 'import json\n')] |
import socket
from datetime import datetime
# Author @inforkgodara
ip_address = input("IP Address: ")
splitted_ip_digits = ip_address.split('.')
dot = '.'
first_three_ip_digits = splitted_ip_digits[0] + dot + splitted_ip_digits[1] + dot + splitted_ip_digits[2] + dot
starting_number = int(input("Starting IP Number: "... | [
"datetime.datetime.now",
"socket.socket",
"socket.setdefaulttimeout"
] | [((419, 433), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (431, 433), False, 'from datetime import datetime\n'), ((878, 892), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (890, 892), False, 'from datetime import datetime\n'), ((468, 517), 'socket.socket', 'socket.socket', (['socket.AF_INET'... |
# Simulate user activity for Windows
# Can trigger Brave Ads
import random
from time import sleep
import pydirectinput
import os
# clear log function
def cls():
os.system('cls' if os.name == 'nt' else 'clear')
# main simulate function
def simulate():
while True:
# u can change x,y with your screen ... | [
"random.choice",
"pydirectinput.move",
"time.sleep",
"pydirectinput.click",
"pydirectinput.keyDown",
"pydirectinput.keyUp",
"pydirectinput.moveTo",
"os.system",
"random.randint"
] | [((168, 216), 'os.system', 'os.system', (["('cls' if os.name == 'nt' else 'clear')"], {}), "('cls' if os.name == 'nt' else 'clear')\n", (177, 216), False, 'import os\n'), ((384, 410), 'random.randint', 'random.randint', (['(2567)', '(4460)'], {}), '(2567, 4460)\n', (398, 410), False, 'import random\n'), ((429, 455), 'r... |
from TOKEN import LexToken
class Lexer:
def __init__(self,text):
self.my_bool = False
self.result = ''
self.names = {
"case" : "CASE",
"class" : "CLASS",
"else" : "ELSE",
"esac" : "ESAC",
"fi" : "FI",
"if" : "IF",
... | [
"TOKEN.LexToken"
] | [((6007, 6044), 'TOKEN.LexToken', 'LexToken', (['"""STRING"""', 'current1', 'Ln', 'Col'], {}), "('STRING', current1, Ln, Col)\n", (6015, 6044), False, 'from TOKEN import LexToken\n'), ((6885, 6934), 'TOKEN.LexToken', 'LexToken', (['self.names[current2]', 'current2', 'Ln', 'Col'], {}), '(self.names[current2], current2, ... |
import pandas as pd
import numpy as np
import math
import matplotlib.pyplot as plt
from sklearn import feature_selection as fs
from sklearn import naive_bayes
from sklearn import model_selection
from sklearn import metrics
from sklearn import linear_model
from sklearn import svm
from imblearn.under_sampling import Ne... | [
"numpy.unique",
"sklearn.feature_selection.VarianceThreshold",
"pandas.read_csv",
"imblearn.under_sampling.NeighbourhoodCleaningRule",
"sklearn.feature_selection.SelectFromModel",
"imblearn.over_sampling.SMOTE",
"sklearn.svm.LinearSVC",
"sklearn.metrics.precision_score",
"sklearn.metrics.recall_scor... | [((714, 752), 'sklearn.feature_selection.mutual_info_regression', 'fs.mutual_info_regression', (['features', 'y'], {}), '(features, y)\n', (739, 752), True, 'from sklearn import feature_selection as fs\n'), ((1027, 1049), 'sklearn.feature_selection.VarianceThreshold', 'fs.VarianceThreshold', ([], {}), '()\n', (1047, 10... |
"""For each repo in DEPS, git config an appropriate depot-tools.upstream.
This will allow git new-branch to set the correct tracking branch.
"""
import argparse
import hashlib
import json
import os
import sys
import textwrap
import gclient_utils
import git_common
def _GclientEntriesToString(entries):
entries_str ... | [
"git_common.root",
"os.path.exists",
"textwrap.dedent",
"hashlib.md5",
"json.dumps",
"os.path.join",
"sys.stdout.write",
"os.chdir",
"sys.stderr.write",
"git_common.set_config",
"gclient_utils.SplitUrlRevision",
"gclient_utils.GetGClientRootAndEntries"
] | [((322, 357), 'json.dumps', 'json.dumps', (['entries'], {'sort_keys': '(True)'}), '(entries, sort_keys=True)\n', (332, 357), False, 'import json\n'), ((595, 613), 'os.chdir', 'os.chdir', (['repo_dir'], {}), '(repo_dir)\n', (603, 613), False, 'import os\n'), ((639, 674), 'gclient_utils.SplitUrlRevision', 'gclient_utils.... |
import os
import socket
import codecs
import urllib3
from urllib.parse import urlparse
def __process__(command):
try:
process = os.popen(command)
results = str(process.read())
return results
except Exception as e:
raise e
def create_dir(directory):
if not os.path.exists(directory):
os.makedirs(direct... | [
"os.path.exists",
"socket.gethostbyname",
"urllib.parse.urlparse",
"os.makedirs",
"os.popen"
] | [((133, 150), 'os.popen', 'os.popen', (['command'], {}), '(command)\n', (141, 150), False, 'import os\n'), ((273, 298), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (287, 298), False, 'import os\n'), ((302, 324), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (313, 324)... |
from collections import defaultdict
from typing import List, Any, Tuple
from util.helpers import solution_timer
from util.input_helper import read_entire_input
from util.console import console
from year_2019.intcode import IntCode, parse
data = read_entire_input(2019,11)
def run_robot(data:List[str], init=0):
deb... | [
"util.input_helper.read_entire_input",
"util.console.console.print",
"util.helpers.solution_timer",
"collections.defaultdict",
"year_2019.intcode.parse",
"year_2019.intcode.IntCode"
] | [((246, 273), 'util.input_helper.read_entire_input', 'read_entire_input', (['(2019)', '(11)'], {}), '(2019, 11)\n', (263, 273), False, 'from util.input_helper import read_entire_input\n'), ((1151, 1178), 'util.helpers.solution_timer', 'solution_timer', (['(2019)', '(11)', '(1)'], {}), '(2019, 11, 1)\n', (1165, 1178), F... |
#!/usr/bin/env python
import datetime
import optparse
import os
import os.path
import struct
import sys
# sudo pip3 install piexif
import piexif
# Make this negative to subtract time, e.g.:
# -datetime.timedelta(hours=5, minutes=9)
#TIME_ADJUSTMENT = datetime.timedelta(hours=5, minutes=9)
#TIME_ADJUSTMENT = datetim... | [
"optparse.OptionParser",
"os.utime",
"piexif.load",
"sys.stderr.write",
"sys.exit",
"os.path.getmtime",
"datetime.timedelta",
"piexif.dump"
] | [((358, 401), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(788)', 'seconds': '(13032)'}), '(days=788, seconds=13032)\n', (376, 401), False, 'import datetime\n'), ((588, 617), 'os.path.getmtime', 'os.path.getmtime', (['infile_name'], {}), '(infile_name)\n', (604, 617), False, 'import os\n'), ((4256, 4290)... |
import os
import sys
import numpy as np
import pandas as pd
import tensorflow as tf
from losses import focal_loss,weighted_binary_crossentropy
from utils import Dataset
class DeepFM(object):
def __init__(self, params):
self.feature_size = params['feature_size']
self.field_size = params['field_size'... | [
"tensorflow.local_variables_initializer",
"numpy.sqrt",
"tensorflow.contrib.layers.l2_regularizer",
"tensorflow.reduce_sum",
"tensorflow.gradients",
"numpy.array",
"tensorflow.control_dependencies",
"tensorflow.clip_by_global_norm",
"tensorflow.nn.embedding_lookup",
"tensorflow.random_normal",
"... | [((945, 1011), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {'shape': '[None, None]', 'name': '"""feature_index"""'}), "(tf.int32, shape=[None, None], name='feature_index')\n", (959, 1011), True, 'import tensorflow as tf\n'), ((1038, 1106), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'sh... |
import numpy as np
from math import log
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score, mean_squared_error, mean_absolute_error, classification_report
from math import sqrt
import json
from pprint import pprint
import argparse
parser = argparse.ArgumentParser(f... | [
"argparse.ArgumentParser",
"sklearn.metrics.roc_auc_score",
"sklearn.linear_model.LogisticRegression",
"sklearn.metrics.mean_squared_error",
"numpy.zeros",
"math.log",
"json.load",
"sklearn.metrics.mean_absolute_error"
] | [((295, 374), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (318, 374), False, 'import argparse\n'), ((724, 739), 'json.load', 'json.load', (['file'], {}), '(file)\n', (733, 739), F... |
# -*- coding: utf-8 -*-
# @Time : 2020/12/23 2:27 PM
# @Author : Kevin
import config
from utils.sentence_process import cut_sentence_by_character
from search.sort.word_to_sequence import Word2Sequence
import pickle
def prepare_dict_model():
lines=open(config.sort_all_file_path,"r").readlines()
ws=Word2Se... | [
"search.sort.word_to_sequence.Word2Sequence",
"utils.sentence_process.cut_sentence_by_character"
] | [((313, 328), 'search.sort.word_to_sequence.Word2Sequence', 'Word2Sequence', ([], {}), '()\n', (326, 328), False, 'from search.sort.word_to_sequence import Word2Sequence\n'), ((340, 371), 'utils.sentence_process.cut_sentence_by_character', 'cut_sentence_by_character', (['line'], {}), '(line)\n', (365, 371), False, 'fro... |
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
from __future__ import absolute_import, print_function, unicode_literals
import os
import shutil
try:
from unittest.mock import MagicMock
except ImportError:
from mock import MagicMock
import uuid
from ddt import ddt as DataDrivenTestCase, data as ddt_da... | [
"schwarz.mailqueue.DebugMailer",
"schwarz.mailqueue.lock_file",
"testfixtures.LogCapture",
"os.walk",
"os.path.exists",
"schwarz.log_utils.l_",
"os.listdir",
"schwarz.mailqueue.testutils.assert_did_log_message",
"os.unlink",
"schwarz.mailqueue.MessageHandler",
"schwarz.mailqueue.create_maildir_d... | [((1343, 1364), 'ddt.data', 'ddt_data', (['(True)', '(False)'], {}), '(True, False)\n', (1351, 1364), True, 'from ddt import ddt as DataDrivenTestCase, data as ddt_data\n'), ((8179, 8200), 'ddt.data', 'ddt_data', (['(True)', '(False)'], {}), '(True, False)\n', (8187, 8200), True, 'from ddt import ddt as DataDrivenTestC... |
#!/usr/bin/python3
"""Tests for reflinks script."""
#
# (C) Pywikibot team, 2014-2022
#
# Distributed under the terms of the MIT license.
#
import unittest
from scripts.reflinks import ReferencesRobot, XmlDumpPageGenerator, main
from tests import join_xml_data_path
from tests.aspects import ScriptMainTestCase, TestCas... | [
"scripts.reflinks.XmlDumpPageGenerator",
"tests.utils.empty_sites",
"tests.join_xml_data_path",
"unittest.main",
"scripts.reflinks.main"
] | [((6787, 6802), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6800, 6802), False, 'import unittest\n'), ((1039, 1079), 'tests.join_xml_data_path', 'join_xml_data_path', (['"""dummy-reflinks.xml"""'], {}), "('dummy-reflinks.xml')\n", (1057, 1079), False, 'from tests import join_xml_data_path\n'), ((2280, 2320), '... |
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 4 17:01:28 2021
@author: fahim
"""
from keras.models import Model
from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D
from keras.initializers import glorot_uniform
def identity_block(X, f, filters, ... | [
"keras.layers.MaxPooling2D",
"keras.layers.ZeroPadding2D",
"keras.layers.AveragePooling2D",
"keras.layers.Input",
"keras.models.Model",
"keras.layers.Activation",
"keras.initializers.glorot_uniform",
"keras.layers.BatchNormalization",
"keras.layers.Add"
] | [((2641, 2659), 'keras.layers.Input', 'Input', (['input_shape'], {}), '(input_shape)\n', (2646, 2659), False, 'from keras.layers import Input, Add, Activation, ZeroPadding2D, BatchNormalization, Conv2D, AveragePooling2D, MaxPooling2D\n'), ((4165, 4214), 'keras.models.Model', 'Model', ([], {'inputs': 'X_input', 'outputs... |
# -*- coding: utf-8 -*-
"""Console script for bioinf."""
import sys
import click
from .sequence import Sequence
from .sequence_alignment import NeedlemanWunschSequenceAlignmentAlgorithm
from .utils import read_config, read_sequence
@click.group()
def main(args=None):
"""Console script for bioinf."""
@main.comm... | [
"click.group",
"click.echo",
"click.Path"
] | [((236, 249), 'click.group', 'click.group', ([], {}), '()\n', (247, 249), False, 'import click\n'), ((351, 374), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (361, 374), False, 'import click\n'), ((416, 439), 'click.Path', 'click.Path', ([], {'exists': '(True)'}), '(exists=True)\n', (426, ... |
from __future__ import unicode_literals
import unittest
from mopidy.local import json
from mopidy.models import Ref
class BrowseCacheTest(unittest.TestCase):
def setUp(self):
self.uris = [b'local:track:foo/bar/song1',
b'local:track:foo/bar/song2',
b'local:track:... | [
"mopidy.models.Ref.directory",
"mopidy.models.Ref.track",
"mopidy.local.json._BrowseCache"
] | [((353, 381), 'mopidy.local.json._BrowseCache', 'json._BrowseCache', (['self.uris'], {}), '(self.uris)\n', (370, 381), False, 'from mopidy.local import json\n'), ((435, 487), 'mopidy.models.Ref.directory', 'Ref.directory', ([], {'uri': '"""local:directory:foo"""', 'name': '"""foo"""'}), "(uri='local:directory:foo', nam... |
# coding=utf-8
from OTLMOW.OTLModel.Datatypes.KeuzelijstField import KeuzelijstField
from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde
# Generated with OTLEnumerationCreator. To modify: extend, do not edit
class KlVerlichtingstoestelModelnaam(KeuzelijstField):
"""De modelnaam van het verlich... | [
"OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde"
] | [((729, 888), 'OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde.KeuzelijstWaarde', 'KeuzelijstWaarde', ([], {'invulwaarde': '"""ARC"""', 'label': '"""ARC"""', 'objectUri': '"""https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerlichtingstoestelModelnaam/ARC"""'}), "(invulwaarde='ARC', label='ARC', objectUri=\n 'http... |
import string
from unittest2 import TestCase
import os
from hypothesis import given
from hypothesis.strategies import text, lists
from mock import patch, Mock
from githooks import repo
class FakeDiffObject(object):
def __init__(self, a_path, b_path, new, deleted):
self.a_path = a_path
self.b_pat... | [
"hypothesis.strategies.text",
"mock.patch",
"githooks.repo.get",
"mock.Mock",
"githooks.repo.added_files",
"githooks.repo.deleted_files",
"os.getcwd",
"os.path.dirname",
"githooks.repo.repo_root",
"githooks.repo.untracked_files",
"githooks.repo.modified_files"
] | [((427, 453), 'mock.patch', 'patch', (['"""githooks.repo.git"""'], {}), "('githooks.repo.git')\n", (432, 453), False, 'from mock import patch, Mock\n'), ((839, 865), 'mock.patch', 'patch', (['"""githooks.repo.get"""'], {}), "('githooks.repo.get')\n", (844, 865), False, 'from mock import patch, Mock\n'), ((1202, 1228), ... |
# -*- coding: utf-8 -*-
from hmac import HMAC
from hashlib import sha256
import random
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
def pbkd(password,salt):
"""
password must be a string in ascii, for some reasons
string of type unicode provokes the follow... | [
"logging.basicConfig",
"logging.getLogger",
"random.randrange"
] | [((102, 142), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (121, 142), False, 'import logging\n'), ((153, 180), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (170, 180), False, 'import logging\n'), ((579, 600), 'random.randran... |
import os
import subprocess
import pathlib
def reemplazar(string):
return string.replace('self.', 'self.w.').replace('Form"', 'self.w.centralWidget"').replace('Form.', 'self.w.centralWidget.').replace('Form)', 'self.w.centralWidget)').replace('"', "'")
try:
url_archivo = input('Archivo: ').strip().strip('"'... | [
"subprocess.Popen",
"pathlib.Path",
"os.path.basename",
"os.remove"
] | [((374, 403), 'os.path.basename', 'os.path.basename', (['url_archivo'], {}), '(url_archivo)\n', (390, 403), False, 'import os\n'), ((2881, 2906), 'os.remove', 'os.remove', (['url_archivo_py'], {}), '(url_archivo_py)\n', (2890, 2906), False, 'import os\n'), ((558, 583), 'pathlib.Path', 'pathlib.Path', (['url_archivo'], ... |
"""
This script requires developers to add the following information:
1. add file and function name to srcfiles_srcfuncs
2. add file and directory name to srcdir_srcfiles
3. add expected display name for the function to display_names
"""
import os
import itertools
from shutil import copyfile... | [
"os.path.exists",
"os.path.join",
"os.getcwd",
"os.chdir",
"shutil.copyfile",
"os.mkdir"
] | [((1684, 1695), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1693, 1695), False, 'import os\n'), ((1777, 1809), 'os.path.join', 'os.path.join', (['cur_dir', '"""API_Doc"""'], {}), "(cur_dir, 'API_Doc')\n", (1789, 1809), False, 'import os\n'), ((2147, 2169), 'os.chdir', 'os.chdir', (['src_copy_dir'], {}), '(src_copy_dir... |
from google.appengine.ext import ndb
from google.appengine.ext.ndb import msgprop
from entities import BaseEntity
from constants import Gender, UserStatus, Device, APIStatus
from errors import DataError
class User(BaseEntity):
name = ndb.StringProperty()
mail = ndb.StringProperty()
gender = msgprop.EnumPr... | [
"google.appengine.ext.ndb.DateProperty",
"google.appengine.ext.ndb.BlobProperty",
"errors.DataError",
"google.appengine.ext.ndb.msgprop.EnumProperty",
"google.appengine.ext.ndb.IntegerProperty",
"google.appengine.ext.ndb.DateTimeProperty",
"google.appengine.ext.ndb.StringProperty"
] | [((240, 260), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (258, 260), False, 'from google.appengine.ext import ndb\n'), ((272, 292), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {}), '()\n', (290, 292), False, 'from google.appengine.ext import ndb\n'), ((3... |
# core modules
from math import pi
# 3rd party modules
import matplotlib.pyplot as plt
import pandas as pd
# internal modules
import analysis
def main(path):
df = analysis.parse_file(path)
df = prepare_df(df, grouping=(df['date'].dt.hour))
print(df.reset_index().to_dict(orient='list'))
df = pd.DataF... | [
"analysis.get_parser",
"matplotlib.pyplot.xticks",
"analysis.parse_file",
"pandas.DataFrame",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show"
] | [((171, 196), 'analysis.parse_file', 'analysis.parse_file', (['path'], {}), '(path)\n', (190, 196), False, 'import analysis\n'), ((312, 568), 'pandas.DataFrame', 'pd.DataFrame', (["{'date': [209, 13, 1, 2, 1, 25, 809, 3571, 1952, 1448, 942, 1007, 1531, \n 1132, 981, 864, 975, 2502, 2786, 2717, 3985, 4991, 2872, 761]... |
from datetime import datetime
from threading import Lock
from Database import Database
class LoggedSensor:
"""
This is a common base class for all sensors that have data to be stored/logged.
"""
registered_type_ids = []
def __init__(self, type_id, max_measurements=200, holdoff_time=None):
... | [
"datetime.datetime.fromtimestamp",
"threading.Lock",
"datetime.datetime.now",
"Database.Database.instance.fetch_latest_measurements",
"Database.Database.instance.insert_measurement"
] | [((608, 614), 'threading.Lock', 'Lock', ([], {}), '()\n', (612, 614), False, 'from threading import Lock\n'), ((664, 749), 'Database.Database.instance.fetch_latest_measurements', 'Database.instance.fetch_latest_measurements', (['self.type_id', 'self.max_measurements'], {}), '(self.type_id, self.max_measurements\n )\... |
import boto3
from botocore.exceptions import ClientError
import gzip
import io
import os
import csv
import re
class S3Data(object):
def __init__(self, bucket_name_, prefix_, file_, df_schema_, compression_type_,
check_headers_, file_type_, access_key_=None, secret_key_=None,
regi... | [
"csv.DictReader",
"boto3.client",
"gzip.open",
"io.BytesIO",
"boto3.resource",
"os.path.basename",
"re.search"
] | [((3043, 3165), 'boto3.client', 'boto3.client', (['"""s3"""'], {'region_name': 'self.region', 'aws_access_key_id': 'self.access_key', 'aws_secret_access_key': 'self.secret_key'}), "('s3', region_name=self.region, aws_access_key_id=self.\n access_key, aws_secret_access_key=self.secret_key)\n", (3055, 3165), False, 'i... |
from datetime import datetime
import timebomb.models as models
def test_Notification():
notif = models.Notification("message")
assert notif.content == "message"
assert notif.read is False
assert str(notif) == "message"
def test_Player():
player = models.Player("name", "id")
assert player.... | [
"timebomb.models.Message",
"timebomb.models.Room",
"timebomb.models.GameState",
"timebomb.models.Player",
"datetime.datetime.now",
"timebomb.models.Notification"
] | [((103, 133), 'timebomb.models.Notification', 'models.Notification', (['"""message"""'], {}), "('message')\n", (122, 133), True, 'import timebomb.models as models\n'), ((273, 300), 'timebomb.models.Player', 'models.Player', (['"""name"""', '"""id"""'], {}), "('name', 'id')\n", (286, 300), True, 'import timebomb.models ... |
from __future__ import unicode_literals
import json
import numpy as np
from builtins import str
from abc import ABCMeta, abstractmethod
from pychemia import HAS_PYMONGO
from pychemia.utils.computing import deep_unicode
if HAS_PYMONGO:
from pychemia.db import PyChemiaDB
class Population:
__metaclass__ = ABCMe... | [
"pychemia.utils.computing.deep_unicode",
"pychemia.db.PyChemiaDB",
"numpy.argsort",
"numpy.array",
"json.load",
"json.dump"
] | [((501, 519), 'pychemia.utils.computing.deep_unicode', 'deep_unicode', (['name'], {}), '(name)\n', (513, 519), False, 'from pychemia.utils.computing import deep_unicode\n'), ((3036, 3054), 'numpy.argsort', 'np.argsort', (['values'], {}), '(values)\n', (3046, 3054), True, 'import numpy as np\n'), ((3193, 3209), 'json.lo... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `pynessie` package."""
import pytest
import requests_mock
import simplejson as json
from click.testing import CliRunner
from pynessie import __version__
from pynessie import cli
from pynessie.model import ReferenceSchema
def test_command_line_interface(reque... | [
"simplejson.dumps",
"pynessie.model.ReferenceSchema",
"click.testing.CliRunner"
] | [((391, 402), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (400, 402), False, 'from click.testing import CliRunner\n'), ((1398, 1409), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (1407, 1409), False, 'from click.testing import CliRunner\n'), ((872, 948), 'simplejson.dumps', 'json.dumps', (["[... |
# Code generated by sqlc. DO NOT EDIT.
import dataclasses
from typing import Optional
@dataclasses.dataclass()
class Author:
id: int
name: str
bio: Optional[str]
| [
"dataclasses.dataclass"
] | [((89, 112), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {}), '()\n', (110, 112), False, 'import dataclasses\n')] |
import unittest
import datetime
from weightTrack import WeightNote
class TestWeightNote(unittest.TestCase):
### Testing getter methods ###
def test_shouldGetWeight(self):
testWeight = WeightNote(100, "Ate breakfast")
self.assertEqual(testWeight.getWeight(), 100, "Should be 100")
# Note:... | [
"unittest.main",
"weightTrack.WeightNote"
] | [((1866, 1881), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1879, 1881), False, 'import unittest\n'), ((204, 236), 'weightTrack.WeightNote', 'WeightNote', (['(100)', '"""Ate breakfast"""'], {}), "(100, 'Ate breakfast')\n", (214, 236), False, 'from weightTrack import WeightNote\n'), ((759, 791), 'weightTrack.We... |
# -*- encoding: utf-8 -*-
import datetime
def formata_data(data):
data = datetime.datetime.strptime(data, '%d/%m/%Y').date()
return data.strftime("%Y%m%d")
def formata_valor(valor):
return str("%.2f" % valor).replace(".", "")
| [
"datetime.datetime.strptime"
] | [((80, 124), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['data', '"""%d/%m/%Y"""'], {}), "(data, '%d/%m/%Y')\n", (106, 124), False, 'import datetime\n')] |
import argparse
import torch
import syft as sy
from syft import WebsocketServerWorker
def get_args():
parser = argparse.ArgumentParser(description="Run websocket server worker.")
parser.add_argument(
"--port",
"-p",
type=int,
default=8777,
help="port number of the webso... | [
"syft.TorchHook",
"argparse.ArgumentParser",
"syft.WebsocketServerWorker"
] | [((117, 184), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run websocket server worker."""'}), "(description='Run websocket server worker.')\n", (140, 184), False, 'import argparse\n'), ((845, 864), 'syft.TorchHook', 'sy.TorchHook', (['torch'], {}), '(torch)\n', (857, 864), True, 'impo... |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path imp... | [
"_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_back",
"_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_begin",
"_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef1",
"_SimInternalLoad_Lights_Default.SimInterna... | [((3458, 3551), '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_Name', '_SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_Name', (['self', '*args'], {}), '(\n self, *args)\n', (3533, 3551), False, 'import _SimInternalLoad_Lights_Default\n'), ((3620, 3727), '_SimInternalLo... |
import garm.indicators as gari
import ham.time_utils as hamt
import ohlcv
import luigi
import strategies as chs
from luigi.util import inherits
@inherits(chs.Strategy)
class BuyAndHold(chs.Strategy):
FN = gari.buy_and_hold_signals
def requires(self):
for m in hamt.months(self.start_date, self.end_da... | [
"ham.time_utils.months",
"ohlcv.OHLCV",
"luigi.util.inherits"
] | [((148, 170), 'luigi.util.inherits', 'inherits', (['chs.Strategy'], {}), '(chs.Strategy)\n', (156, 170), False, 'from luigi.util import inherits\n'), ((280, 323), 'ham.time_utils.months', 'hamt.months', (['self.start_date', 'self.end_date'], {}), '(self.start_date, self.end_date)\n', (291, 323), True, 'import ham.time_... |
import pytesseract
import os
import time
import requests
import json
from PIL import Image,ImageFont,ImageDraw
# 读取配置文件
with open('config.json') as json_file:
config = json.load(json_file)
# 默认的文件保存的目录
MAIN_PATH = './imageApi/image/'
# FONT,用于将文字渲染成图片
FONT = config['font']
def strToImg(text,mainPath):
'''
... | [
"PIL.Image.open",
"os.makedirs",
"PIL.Image.new",
"PIL.ImageFont.truetype",
"requests.get",
"PIL.ImageDraw.Draw",
"pytesseract.image_to_string",
"json.load",
"time.time"
] | [((174, 194), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (183, 194), False, 'import json\n'), ((452, 490), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(W, H)', '(26, 26, 26)'], {}), "('RGB', (W, H), (26, 26, 26))\n", (461, 490), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((499, 517)... |
# coding: utf-8
# ### Open using Databricks Platform/Py-spark. It holds the code for developing the RandomForest Classifier on the chosen subset of important features.
# In[1]:
import os, sys
import pandas as pd
import numpy as np
from sklearn.metrics import matthews_corrcoef
import pyspark
from numpy import array... | [
"pyspark.ml.Pipeline",
"pyspark.ml.classification.RandomForestClassifier",
"gc.collect",
"pyspark.ml.evaluation.MulticlassClassificationEvaluator",
"pyspark.ml.feature.VectorAssembler",
"sklearn.metrics.matthews_corrcoef"
] | [((1315, 1371), 'pyspark.ml.feature.VectorAssembler', 'VectorAssembler', ([], {'inputCols': 'feature', 'outputCol': '"""features"""'}), "(inputCols=feature, outputCol='features')\n", (1330, 1371), False, 'from pyspark.ml.feature import VectorAssembler\n'), ((1568, 1677), 'pyspark.ml.classification.RandomForestClassifie... |
#!/usr/bin/env python3
# coding=utf-8
import os as os
import sys as sys
import io as io
import traceback as trb
import argparse as argp
import gzip as gz
import operator as op
import functools as fnt
def parse_command_line():
"""
:return:
"""
parser = argp.ArgumentParser()
parser.add_argument('--... | [
"argparse.ArgumentParser",
"gzip.open",
"functools.partial",
"os.path.basename",
"sys.exit",
"operator.itemgetter",
"io.StringIO",
"traceback.print_exc"
] | [((271, 292), 'argparse.ArgumentParser', 'argp.ArgumentParser', ([], {}), '()\n', (290, 292), True, 'import argparse as argp\n'), ((1926, 1939), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1937, 1939), True, 'import io as io\n'), ((2016, 2056), 'functools.partial', 'fnt.partial', (['join_parts', '*(args.switch,)']... |
#-*- coding: utf-8 -*-
from django.db import models
# Create your models here.
class Feature(models.Model):
day = models.SmallIntegerField()
month = models.SmallIntegerField()
year = models.SmallIntegerField()
momentum = models.FloatField(
null=True, blank=True)
day5disparity = models.Flo... | [
"django.db.models.DecimalField",
"django.db.models.FloatField",
"django.db.models.SmallIntegerField"
] | [((121, 147), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (145, 147), False, 'from django.db import models\n'), ((160, 186), 'django.db.models.SmallIntegerField', 'models.SmallIntegerField', ([], {}), '()\n', (184, 186), False, 'from django.db import models\n'), ((198, 224), 'dja... |
'''
@Description: Easter Egg
@Author: <NAME>
@Date: 2019-08-10 10:30:29
@LastEditors: <NAME>
@LastEditTime: 2019-08-10 10:36:24
'''
from flask import Blueprint
egg = Blueprint('egg', __name__)
from . import views | [
"flask.Blueprint"
] | [((167, 193), 'flask.Blueprint', 'Blueprint', (['"""egg"""', '__name__'], {}), "('egg', __name__)\n", (176, 193), False, 'from flask import Blueprint\n')] |
import pyaf.Bench.TS_datasets as tsds
import pyaf.Bench.YahooStocks as ys
import warnings
symbol_lists = tsds.get_yahoo_symbol_lists();
y_keys = sorted(symbol_lists.keys())
print(y_keys)
k = "nysecomp"
tester = ys.cYahoo_Tester(tsds.load_yahoo_stock_prices(k) , "YAHOO_STOCKS_" + k);
with warnings.catch_warnings():
... | [
"warnings.simplefilter",
"pyaf.Bench.TS_datasets.get_yahoo_symbol_lists",
"warnings.catch_warnings",
"pyaf.Bench.TS_datasets.load_yahoo_stock_prices"
] | [((106, 135), 'pyaf.Bench.TS_datasets.get_yahoo_symbol_lists', 'tsds.get_yahoo_symbol_lists', ([], {}), '()\n', (133, 135), True, 'import pyaf.Bench.TS_datasets as tsds\n'), ((231, 262), 'pyaf.Bench.TS_datasets.load_yahoo_stock_prices', 'tsds.load_yahoo_stock_prices', (['k'], {}), '(k)\n', (259, 262), True, 'import pya... |
import pycropml.transpiler.antlr_py.grammars
from pycropml.transpiler.antlr_py.grammars.CSharpLexer import CSharpLexer
from pycropml.transpiler.antlr_py.grammars.CSharpParser import CSharpParser
from pycropml.transpiler.antlr_py.grammars.Fortran90Lexer import Fortran90Lexer
from pycropml.transpiler.antlr_py.grammars.F... | [
"collections.OrderedDict",
"collections.namedtuple",
"functools.reduce",
"operator.methodcaller",
"inspect.signature",
"antlr4.CommonTokenStream",
"antlr4.InputStream",
"inspect.isclass"
] | [((4280, 4323), 'collections.namedtuple', 'namedtuple', (['"""FieldSpec"""', "['name', 'origin']"], {}), "('FieldSpec', ['name', 'origin'])\n", (4290, 4323), False, 'from collections import OrderedDict, namedtuple\n'), ((2199, 2216), 'antlr4.InputStream', 'InputStream', (['code'], {}), '(code)\n', (2210, 2216), False, ... |
# coding: utf-8
"""
Gitea API.
This documentation describes the Gitea API. # noqa: E501
OpenAPI spec version: 1.16.7
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class TimelineComment(object):
"""NOTE: This class is... | [
"six.iteritems"
] | [((21746, 21779), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (21759, 21779), False, 'import six\n')] |
import string
import warnings
import re
from . import util
import spacy
class FileParser(object):
def __init__(self,
file_parser='txt',
xml_node_path=None, fparser=None):
if file_parser not in ['txt', 'xml', 'defined']:
msg = 'file_parser should be txt, xml or... | [
"spacy.load",
"nltk.tokenize.WordPunctTokenizer",
"nltk.tokenize.PunktWordTokenizer",
"warnings.warn",
"re.sub",
"nltk.tokenize.TreebankWordTokenizer"
] | [((4024, 4040), 'spacy.load', 'spacy.load', (['"""en"""'], {}), "('en')\n", (4034, 4040), False, 'import spacy\n'), ((2476, 2500), 're.sub', 're.sub', (['"""\\\\d"""', '"""0"""', 'word'], {}), "('\\\\d', '0', word)\n", (2482, 2500), False, 'import re\n'), ((3380, 3398), 'warnings.warn', 'warnings.warn', (['msg'], {}), ... |
import logging
import time
from tests.common.helpers.assertions import pytest_assert
logger = logging.getLogger(__name__)
def join_master(duthost, master_vip):
"""
Joins DUT to Kubernetes master
Args:
duthost: DUT host object
master_vip: VIP of high availability Kubernetes master
If... | [
"logging.getLogger",
"time.sleep"
] | [((96, 123), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (113, 123), False, 'import logging\n'), ((3693, 3718), 'time.sleep', 'time.sleep', (['min_wait_time'], {}), '(min_wait_time)\n', (3703, 3718), False, 'import time\n'), ((3995, 4021), 'time.sleep', 'time.sleep', (['poll_wait_secs'... |
from django.db import models
from django.contrib.auth.models import User
class Post(models.Model):
user = models.ForeignKey(User, related_name='posts', on_delete=models.CASCADE)
content = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=... | [
"django.db.models.DateTimeField",
"django.db.models.TextField",
"django.db.models.ForeignKey"
] | [((111, 182), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'related_name': '"""posts"""', 'on_delete': 'models.CASCADE'}), "(User, related_name='posts', on_delete=models.CASCADE)\n", (128, 182), False, 'from django.db import models\n'), ((197, 215), 'django.db.models.TextField', 'models.TextField', (... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `database` module."""
import pytest
import os
import sqlite3
from packit_app.database import Database
def test_database_connection():
db = Database()
assert type(db.cur) is sqlite3.Cursor
assert type(db.connection) is sqlite3.Connection
asse... | [
"packit_app.database.Database",
"os.path.exists",
"pytest.raises"
] | [((206, 216), 'packit_app.database.Database', 'Database', ([], {}), '()\n', (214, 216), False, 'from packit_app.database import Database\n'), ((412, 422), 'packit_app.database.Database', 'Database', ([], {}), '()\n', (420, 422), False, 'from packit_app.database import Database\n'), ((323, 359), 'os.path.exists', 'os.pa... |
import torch
import torch.nn as nn
import torch.nn.functional as F
import dgl
from dgl.nn.pytorch.glob import SumPooling, AvgPooling, MaxPooling
"""
GIN: Graph Isomorphism Networks
HOW POWERFUL ARE GRAPH NEURAL NETWORKS? (<NAME>, <NAME>, <NAME> and <NAME>, ICLR 2019)
https://arxiv.org/pdf/1810.00826.pdf
"... | [
"torch.nn.ModuleList",
"gnns.gin_layer.MLP",
"torch.nn.Linear",
"gnns.gin_layer.ApplyNodeFunc"
] | [((930, 951), 'torch.nn.ModuleList', 'torch.nn.ModuleList', ([], {}), '()\n', (949, 951), False, 'import torch\n'), ((1573, 1617), 'torch.nn.Linear', 'nn.Linear', (['hidden_dim', 'n_classes'], {'bias': '(False)'}), '(hidden_dim, n_classes, bias=False)\n', (1582, 1617), True, 'import torch.nn as nn\n'), ((2692, 2713), '... |
# -*- coding: ascii -*-
#
# Copyright 2007, 2008, 2009, 2010, 2011
# <NAME> or his licensors, as applicable
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licen... | [
"posixpath.join",
"os.path.splitdrive",
"os.path.join",
"_setup.commands.add_option",
"os.path.splitext",
"os.path.split",
"os.path.normpath",
"_setup.commands.add_finalizer",
"distutils.filelist.FileList"
] | [((1065, 1088), 'os.path.normpath', '_os.path.normpath', (['path'], {}), '(path)\n', (1082, 1088), True, 'import os as _os\n'), ((1262, 1287), 'os.path.splitdrive', '_os.path.splitdrive', (['path'], {}), '(path)\n', (1281, 1287), True, 'import os as _os\n'), ((1446, 1466), 'os.path.split', '_os.path.split', (['path'], ... |
from django.urls import path
from . import views
urlpatterns = [
path('charge/', views.charge, name='charge'),
path('payment/', views.HomePageView.as_view(), name='payment'),
]
| [
"django.urls.path"
] | [((71, 115), 'django.urls.path', 'path', (['"""charge/"""', 'views.charge'], {'name': '"""charge"""'}), "('charge/', views.charge, name='charge')\n", (75, 115), False, 'from django.urls import path\n')] |
# Stat_Canada.py (flowsa)
# !/usr/bin/env python3
# coding=utf-8
'''
Pulls Statistics Canada data on water intake and discharge for 3 digit NAICS from 2005 - 2015
'''
import pandas as pd
import io
import zipfile
import pycountry
from flowsa.common import *
def sc_call(url, sc_response, args):
"""
Convert res... | [
"flowsa.dataclean.harmonize_units",
"pandas.read_csv",
"pandas.merge",
"io.BytesIO",
"flowsa.getFlowByActivity",
"flowsa.flowbyfunctions.aggregator",
"flowsa.common.load_bea_crosswalk",
"pandas.concat"
] | [((1472, 1509), 'pandas.concat', 'pd.concat', (['dataframe_list'], {'sort': '(False)'}), '(dataframe_list, sort=False)\n', (1481, 1509), True, 'import pandas as pd\n'), ((4432, 4543), 'flowsa.getFlowByActivity', 'flowsa.getFlowByActivity', ([], {'datasource': '"""StatCan_GDP"""', 'year': "attr['allocation_source_year']... |
import bybit
import math
import pandas as pd
import time
from datetime import datetime
from dateutil.relativedelta import relativedelta
# settings
num_orders = 3
order_size = 1
order_distance = 10
sl_risk = 0.03
tp_distance = 5
api_key = "YOUR_KEY"
api_secret = "YOUR_SECRET"
client = bybit.bybit(test=False, api_key=... | [
"dateutil.relativedelta.relativedelta",
"datetime.datetime.now",
"pandas.DataFrame",
"bybit.bybit",
"pandas.to_datetime",
"math.isnan"
] | [((288, 351), 'bybit.bybit', 'bybit.bybit', ([], {'test': '(False)', 'api_key': 'api_key', 'api_secret': 'api_secret'}), '(test=False, api_key=api_key, api_secret=api_secret)\n', (299, 351), False, 'import bybit\n'), ((3965, 3981), 'pandas.DataFrame', 'pd.DataFrame', (['[]'], {}), '([])\n', (3977, 3981), True, 'import ... |
# coding: utf-8
#------------------------------------------------------------------------------------------#
# This file is part of Pyccel which is released under MIT License. See the LICENSE file or #
# go to https://github.com/pyccel/pyccel/blob/master/LICENSE for full license details. #
#------------------------... | [
"pyccel.ast.core.SeparatorComment",
"subprocess.Popen",
"os.path.join",
"os.getcwd",
"os.chdir",
"pyccel.ast.bind_c.as_static_function_call",
"pyccel.errors.errors.Errors",
"pyccel.codegen.printing.cwrappercode.cwrappercode",
"pyccel.codegen.printing.fcode.fcode",
"os.path.abspath",
"pyccel.code... | [((900, 908), 'pyccel.errors.errors.Errors', 'Errors', ([], {}), '()\n', (906, 908), False, 'from pyccel.errors.errors import Errors\n'), ((1967, 1978), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1976, 1978), False, 'import os\n'), ((1983, 2007), 'os.chdir', 'os.chdir', (['pyccel_dirpath'], {}), '(pyccel_dirpath)\n',... |
######
# If you do not want to post results on Twitter remove the lines marked with TWITTER
######
import time
import tweepy
import os
import classes.utility
import requests
from bs4 import BeautifulSoup, SoupStrainer
tools = classes.utility.ScavUtility()
iterator = 1
session = requests.session()
session.proxies = {}... | [
"requests.session",
"bs4.SoupStrainer",
"time.sleep",
"tweepy.API",
"os.popen",
"os.system",
"tweepy.OAuthHandler"
] | [((281, 299), 'requests.session', 'requests.session', ([], {}), '()\n', (297, 299), False, 'import requests\n'), ((712, 762), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (731, 762), False, 'import tweepy\n'), ((840, 856), 'tweepy.API', 't... |
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.safestring import mark_safe
from docutils.core import publish_parts
register = template.Library()
@register.filter(name='rst')
@stringfilter
def rst_to_html5(text):
parts = publish_parts(text, writer_name='html5'... | [
"docutils.core.publish_parts",
"django.utils.safestring.mark_safe",
"django.template.Library"
] | [((182, 200), 'django.template.Library', 'template.Library', ([], {}), '()\n', (198, 200), False, 'from django import template\n'), ((281, 374), 'docutils.core.publish_parts', 'publish_parts', (['text'], {'writer_name': '"""html5"""', 'settings_overrides': "{'initial_header_level': 2}"}), "(text, writer_name='html5', s... |
# -*- coding: utf-8 -*-
"""Top-level package for ballet."""
__author__ = '<NAME>'
__email__ = '<EMAIL>'
__version__ = '0.19.5'
# filter warnings
import warnings # noqa E402
warnings.filterwarnings(
action='ignore', module='scipy', message='^internal gelsd')
# silence sklearn deprecation warnings
import logging... | [
"logging.NullHandler",
"warnings.filterwarnings",
"logging.captureWarnings"
] | [((177, 265), 'warnings.filterwarnings', 'warnings.filterwarnings', ([], {'action': '"""ignore"""', 'module': '"""scipy"""', 'message': '"""^internal gelsd"""'}), "(action='ignore', module='scipy', message=\n '^internal gelsd')\n", (200, 265), False, 'import warnings\n'), ((334, 363), 'logging.captureWarnings', 'log... |
# coding: utf-8
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may c... | [
"oci.util.formatted_flat_dict"
] | [((8598, 8623), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (8617, 8623), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')] |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applic... | [
"paddle.fluid.param_attr.ParamAttr"
] | [((1124, 1157), 'paddle.fluid.param_attr.ParamAttr', 'ParamAttr', ([], {'name': "(name + '_weights')"}), "(name=name + '_weights')\n", (1133, 1157), False, 'from paddle.fluid.param_attr import ParamAttr\n'), ((1365, 1399), 'paddle.fluid.param_attr.ParamAttr', 'ParamAttr', ([], {'name': "(bn_name + '_scale')"}), "(name=... |
from pyscf import gto
import radii
def from_frag(xyz, frags, chgs, spins, gjfhead='', scrfhead='', gjfname='', basis=None, wfnpath=None):
# mol = gto.Mole()
# mol.atom = xyz
# mol.basis = bas
# mol.verbose = 1
# mol.build()
#
if isinstance(frags[0], str):
frags = str2list(frags)
guess_fr... | [
"pyscf.gto.format_atom"
] | [((1183, 1211), 'pyscf.gto.format_atom', 'gto.format_atom', (['xyz'], {'unit': '(1)'}), '(xyz, unit=1)\n', (1198, 1211), False, 'from pyscf import gto\n')] |
"""
File to control the parameters of the SITE approach and to specify the postprocessing functionality.
The parameters for each equation are the ones used in the paper. All results of the paper
'Sparse Identification of Trunction Errors' of Thaler, Paehler and Adams, 2019 can be replicated only be
setting the appropri... | [
"Postprocessing_Util.calculate_orders",
"Postprocessing_Util.plot_resolution",
"SITE.site"
] | [((6369, 6972), 'SITE.site', 'SITE.site', (['equation', 'x_nodes', 't_steps', 'D', 'P', 'combinations'], {'optimize_spline': 'optimize_spline', 'x_min': 'x_min', 'x_max': 'x_max', 'acc_space': 'acc_space', 'acc_time': 'acc_space', 'preconditioner': 'preconditioner', 'a': 'a', 'cfl': 'cfl', 'n_ctr_train': 'n_ctr_train',... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = '<NAME>'
import os
import cv2
import numpy
def find_images(path, recursive=False, ignore=True):
if os.path.isfile(path):
yield path
elif os.path.isdir(path):
assert os.path.isdir(path), 'FileIO - get_images: Directory does not exist'... | [
"os.listdir",
"cv2.imshow",
"os.path.isfile",
"os.path.isdir",
"cv2.resize"
] | [((166, 186), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (180, 186), False, 'import os\n'), ((1291, 1313), 'cv2.resize', 'cv2.resize', (['img', 'shape'], {}), '(img, shape)\n', (1301, 1313), False, 'import cv2\n'), ((1318, 1340), 'cv2.imshow', 'cv2.imshow', (['title', 'img'], {}), '(title, img)\n',... |
from aoc2019 import *
import unittest
class Day1(unittest.TestCase):
def test_mass_12(self):
chall = Rocket()
self.assertEqual(chall.calc_fuel_weight(12), 2)
def test_mass_14(self):
chall = Rocket()
self.assertEqual(chall.calc_fuel_weight(14), 2)
def test_mass_1969(self):
... | [
"unittest.main"
] | [((940, 955), 'unittest.main', 'unittest.main', ([], {}), '()\n', (953, 955), False, 'import unittest\n')] |
"""
Contains the definition of Compound.
"""
from xdtools.artwork import Artwork
from xdtools.utils import Point
class Compound(Artwork):
"""
A compound shape.
=== Attributes ===
uid - the unique id of this Compound shape.
name - the name of this Compound shape as it appears in the Layers panel.... | [
"xdtools.utils.Point"
] | [((952, 963), 'xdtools.utils.Point', 'Point', (['x', 'y'], {}), '(x, y)\n', (957, 963), False, 'from xdtools.utils import Point\n')] |
#!/usr/bin/env python
# -*- coding: latin-1 -*-
"""
Test play sounds. (January 1st, 2015)
Piece of SimpleGUICS2Pygame.
https://bitbucket.org/OPiMedia/simpleguics2pygame
GPLv3 --- Copyright (C) 2015 <NAME>
http://www.opimedia.be/
"""
import time
try:
import simplegui
SIMPLEGUICS2PYGAME = False
except Impor... | [
"SimpleGUICS2Pygame.simpleguics2pygame.Sound",
"time.time",
"SimpleGUICS2Pygame.simpleguics2pygame._LocalSound"
] | [((457, 549), 'SimpleGUICS2Pygame.simpleguics2pygame.Sound', 'simplegui.Sound', (['"""http://commondatastorage.googleapis.com/codeskulptor-assets/jump.ogg"""'], {}), "(\n 'http://commondatastorage.googleapis.com/codeskulptor-assets/jump.ogg')\n", (472, 549), True, 'import SimpleGUICS2Pygame.simpleguics2pygame as sim... |
import os
import os.path as osp
import pickle
import time
import numpy as np
from multiprocessing import Pool
from ..utils import get_bbox_dim
from .misc import read_img_info, change_cls_order, get_classes
def load_imgs(img_dir, ann_dir=None, classes=None, nproc=10,
def_bbox_type='poly'):
assert d... | [
"os.path.exists",
"os.listdir",
"os.makedirs",
"os.path.join",
"os.path.split",
"os.path.isfile",
"numpy.zeros",
"os.path.isdir",
"multiprocessing.Pool",
"time.time"
] | [((376, 394), 'os.path.isdir', 'osp.isdir', (['img_dir'], {}), '(img_dir)\n', (385, 394), True, 'import os.path as osp\n'), ((589, 600), 'time.time', 'time.time', ([], {}), '()\n', (598, 600), False, 'import time\n'), ((1281, 1292), 'time.time', 'time.time', ([], {}), '()\n', (1290, 1292), False, 'import time\n'), ((15... |
from setuptools import setup
setup(name='emoji_map',
version='0.1',
description='Maps unicode emoji to its description',
url='http://github.com/rchurch4/emoji_map',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['emoji_map'],
include_package_data=True,... | [
"setuptools.setup"
] | [((30, 308), 'setuptools.setup', 'setup', ([], {'name': '"""emoji_map"""', 'version': '"""0.1"""', 'description': '"""Maps unicode emoji to its description"""', 'url': '"""http://github.com/rchurch4/emoji_map"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'packages': "['emoji_ma... |
import json
import os
import pytest
import tempfile
@pytest.fixture(scope='session', autouse=True)
def working_test_dir():
# create a produce a temporary directory to use for everything
tmp_working_dir = tempfile.TemporaryDirectory()
yield tmp_working_dir.name
# delete it at the end of the session
... | [
"tempfile.TemporaryDirectory",
"os.path.join",
"os.path.split",
"json.load",
"pytest.fixture",
"json.dump"
] | [((55, 100), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'autouse': '(True)'}), "(scope='session', autouse=True)\n", (69, 100), False, 'import pytest\n'), ((361, 406), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'autouse': '(True)'}), "(scope='session', autouse=True)\n", ... |
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable ... | [
"unittest.main",
"gdata.test_config.build_suite"
] | [((7440, 7489), 'gdata.test_config.build_suite', 'conf.build_suite', (['[AccountFeedTest, DataFeedTest]'], {}), '([AccountFeedTest, DataFeedTest])\n', (7456, 7489), True, 'import gdata.test_config as conf\n'), ((7521, 7536), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7534, 7536), False, 'import unittest\n')] |
#!/usr/bin/env python
import rospy
from math import sqrt
from geometry_msgs.msg import Twist, PoseStamped, Pose2D, PointStamped, PoseWithCovarianceStamped
from std_msgs.msg import Empty, String
import Queue
goal_point = rospy.Publisher(
"move_base_simple/goal", PoseStamped, queue_size=1)
servo = rospy.Publisher(... | [
"rospy.Subscriber",
"rospy.init_node",
"math.sqrt",
"geometry_msgs.msg.PoseStamped",
"rospy.spin",
"rospy.sleep",
"rospy.Publisher",
"Queue.Queue",
"rospy.loginfo",
"std_msgs.msg.Empty"
] | [((222, 289), 'rospy.Publisher', 'rospy.Publisher', (['"""move_base_simple/goal"""', 'PoseStamped'], {'queue_size': '(1)'}), "('move_base_simple/goal', PoseStamped, queue_size=1)\n", (237, 289), False, 'import rospy\n'), ((304, 349), 'rospy.Publisher', 'rospy.Publisher', (['"""servo"""', 'Empty'], {'queue_size': '(1)'}... |
from unittest import TestCase
from pylibsrtp import Error, Policy, Session
RTP = (
b"\x80\x08\x00\x00" # version, packet type, sequence number
b"\x00\x00\x00\x00" # timestamp
b"\x00\x00\x30\x39" # ssrc: 12345
) + (b"\xd4" * 160)
RTCP = (
b"\x80\xc8\x00\x06\xf3\xcb\x20\x01\x83\xab\x03\xa1\xeb\x02\x0... | [
"pylibsrtp.Policy",
"pylibsrtp.Session"
] | [((638, 646), 'pylibsrtp.Policy', 'Policy', ([], {}), '()\n', (644, 646), False, 'from pylibsrtp import Error, Policy, Session\n'), ((1118, 1126), 'pylibsrtp.Policy', 'Policy', ([], {}), '()\n', (1124, 1126), False, 'from pylibsrtp import Error, Policy, Session\n'), ((1544, 1552), 'pylibsrtp.Policy', 'Policy', ([], {})... |
#!/usr/bin/python
# Copyright 2020 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required ... | [
"yaml.safe_load",
"ansible.module_utils.basic.AnsibleModule",
"ansible.module_utils.baremetal_deploy.expand"
] | [((6324, 6393), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'argument_spec', 'supports_check_mode': '(False)'}), '(argument_spec=argument_spec, supports_check_mode=False)\n', (6337, 6393), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((6270, 6299), 'yaml.safe... |
# Generated by Django 3.2.6 on 2021-09-10 11:56
import uuid
import django.db.models.deletion
import django.db.models.fields
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("user_management", "0002_remove_username"),
... | [
"django.db.models.ForeignKey",
"django.db.models.JSONField",
"django.db.models.CharField",
"django.db.models.DateTimeField",
"django.db.models.UUIDField"
] | [((487, 578), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (503, 578), False, 'from django.db import migrations, models\n'), ((7... |
from __future__ import absolute_import
from collections import namedtuple
from datetime import datetime, timedelta
import pytz
from casexml.apps.case.dbaccessors import get_open_case_docs_in_domain
from casexml.apps.case.mock import CaseBlock
from casexml.apps.case.xml import V2
import uuid
from xml.etree import Elemen... | [
"pytz.timezone",
"corehq.util.timezones.conversions.UserTime",
"collections.namedtuple",
"corehq.util.quickcache.quickcache",
"dimagi.utils.couch.CriticalSection",
"datetime.datetime.utcnow",
"casexml.apps.case.mock.CaseBlock",
"corehq.feature_previews.CALLCENTER.enabled",
"uuid.uuid4",
"corehq.ap... | [((819, 894), 'collections.namedtuple', 'namedtuple', (['"""DomainLite"""', '"""name default_timezone cc_case_type use_fixtures"""'], {}), "('DomainLite', 'name default_timezone cc_case_type use_fixtures')\n", (829, 894), False, 'from collections import namedtuple\n'), ((2034, 2084), 'collections.namedtuple', 'namedtup... |
from snovault import (
AuditFailure,
audit_checker,
)
from .formatter import (
audit_link,
path_to_text,
)
def audit_contributor_institute(value, system):
if value['status'] in ['deleted']:
return
need_inst = []
if 'corresponding_contributors' in value:
for user in value['... | [
"snovault.AuditFailure",
"snovault.audit_checker"
] | [((5487, 5587), 'snovault.audit_checker', 'audit_checker', (['"""Dataset"""'], {'frame': "['original_files', 'corresponding_contributors', 'contributors']"}), "('Dataset', frame=['original_files',\n 'corresponding_contributors', 'contributors'])\n", (5500, 5587), False, 'from snovault import AuditFailure, audit_chec... |
import numpy as np
import cv2
import math
import datetime
from datetime import timedelta as Delta
h=300
w=300
cap = cv2.VideoCapture(0)
SUN_LOC=(200,70)
SUN_RSIZE=20
ORBITAL_R=10
def Orbiral(frame,Centerloc,orbit_r,size_r,phi,color):
x_orbit=Centerloc[0]+int(orbit_r*np.cos(np.deg2rad(phi))... | [
"cv2.setWindowProperty",
"cv2.imshow",
"cv2.circle",
"numpy.deg2rad",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"datetime.datetime.today",
"datetime.timedelta",
"cv2.waitKey",
"cv2.namedWindow"
] | [((125, 144), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (141, 144), False, 'import cv2\n'), ((775, 824), 'cv2.namedWindow', 'cv2.namedWindow', (['"""Frame"""', 'cv2.WND_PROP_FULLSCREEN'], {}), "('Frame', cv2.WND_PROP_FULLSCREEN)\n", (790, 824), False, 'import cv2\n'), ((826, 884), 'cv2.setWindowPr... |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
#
"""Tests for editor and outline explorer interaction."""
# Test library imports
import pytest
# Local imports
from spyder.plugins.outlineexplorer.widgets import OutlineExplorerWidget
from spyder.plugin... | [
"spyder.utils.qthelpers.qapplication",
"spyder.plugins.editor.widgets.codeeditor.CodeEditor",
"pytest.main",
"spyder.plugins.outlineexplorer.editor.OutlineExplorerProxyEditor",
"pytest.fixture",
"spyder.plugins.outlineexplorer.widgets.OutlineExplorerWidget"
] | [((1165, 1181), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1179, 1181), False, 'import pytest\n'), ((1272, 1286), 'spyder.utils.qthelpers.qapplication', 'qapplication', ([], {}), '()\n', (1284, 1286), False, 'from spyder.utils.qthelpers import qapplication\n'), ((1300, 1323), 'spyder.plugins.editor.widgets.... |
from typing import TYPE_CHECKING
from drip.utils import json_list, json_object, raise_response
if TYPE_CHECKING:
from requests import Session
class Subscribers:
session: 'Session'
@json_object('subscribers')
def create_or_update_subscriber(self, email, marshall=True, **options):
"""
... | [
"drip.utils.json_object",
"drip.utils.raise_response",
"drip.utils.json_list"
] | [((199, 225), 'drip.utils.json_object', 'json_object', (['"""subscribers"""'], {}), "('subscribers')\n", (210, 225), False, 'from drip.utils import json_list, json_object, raise_response\n'), ((2147, 2171), 'drip.utils.json_list', 'json_list', (['"""subscribers"""'], {}), "('subscribers')\n", (2156, 2171), False, 'from... |
from flask import Flask, request
from flask_httpauth import HTTPBasicAuth
from auth_handler import AuthHandler
from cache import Cache
from os import environ
from yaml import safe_load
import logging
from connection_provider import ConnectionProvider
# init logging
logging.basicConfig(format='[%(asctime)s] [%(levelna... | [
"logging.basicConfig",
"flask_httpauth.HTTPBasicAuth",
"connection_provider.ConnectionProvider",
"flask.Flask",
"yaml.safe_load",
"logging.info",
"cache.Cache"
] | [((268, 364), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""[%(asctime)s] [%(levelname)s] %(message)s"""', 'level': 'logging.DEBUG'}), "(format='[%(asctime)s] [%(levelname)s] %(message)s',\n level=logging.DEBUG)\n", (287, 364), False, 'import logging\n'), ((385, 400), 'flask.Flask', 'Flask', (['_... |
import json
import logging
from typing import Tuple
import requests
from django.conf import settings
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.views.generic import FormView, TemplateView
from .forms import CodeSchoolForm
logger = logging.getLogger(__name__)
class IndexView(Templat... | [
"logging.getLogger",
"json.dumps"
] | [((267, 294), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (284, 294), False, 'import logging\n'), ((1493, 1511), 'json.dumps', 'json.dumps', (['params'], {}), '(params)\n', (1503, 1511), False, 'import json\n')] |
# Create your service here.
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__copyright__ = "Copyright 2019."
from utils.commons import safe_invoke
class NotificationManager(object):
def __init__(self, *args, **kwargs):
pass
def notify(self, *args, **kwargs):
pass
@staticmethod
def n... | [
"utils.commons.safe_invoke",
"utils.notification.email.email_manager.EmailManager"
] | [((368, 404), 'utils.commons.safe_invoke', 'safe_invoke', (['notif_mgr.notify', '*args'], {}), '(notif_mgr.notify, *args)\n', (379, 404), False, 'from utils.commons import safe_invoke\n'), ((547, 576), 'utils.notification.email.email_manager.EmailManager', 'EmailManager', (['*args'], {}), '(*args, **kwargs)\n', (559, 5... |
import cv2
import urllib
import numpy as np
import multiprocessing as mp
stream = 'http://192.168.53.114:8000/streamLow.mjpg'
stream2 = 'http://192.168.53.114:8001/streamLow.mjpg'
def procImg(str, wind, stop):
bytes = ''
stream = urllib.urlopen(str)
while not stop.is_set():
try:
bytes... | [
"multiprocessing.Event",
"multiprocessing.Process",
"urllib.urlopen",
"cv2.imshow",
"numpy.fromstring",
"cv2.waitKey"
] | [((241, 260), 'urllib.urlopen', 'urllib.urlopen', (['str'], {}), '(str)\n', (255, 260), False, 'import urllib\n'), ((1203, 1213), 'multiprocessing.Event', 'mp.Event', ([], {}), '()\n', (1211, 1213), True, 'import multiprocessing as mp\n'), ((1228, 1280), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'procImg... |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import cProfile
import io
import pstats
cache = {}
PROFILE_SWITCH = True
class Profiler:
def __init__(self):
self.profiler = cProfile.Profile()
def profile(self, func, *args, **kwargs):
... | [
"pstats.Stats",
"cProfile.Profile",
"io.StringIO"
] | [((250, 268), 'cProfile.Profile', 'cProfile.Profile', ([], {}), '()\n', (266, 268), False, 'import cProfile\n'), ((433, 446), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (444, 446), False, 'import io\n'), ((494, 543), 'pstats.Stats', 'pstats.Stats', (['self.profiler'], {'stream': 'string_stream'}), '(self.profiler,... |
import aiohttp
import aiohttp_jinja2
import pytest
from ddtrace.contrib.aiohttp.middlewares import trace_app
from ddtrace.contrib.aiohttp_jinja2.patch import patch as patch_jinja2
from ddtrace.internal.utils import version
from ddtrace.pin import Pin
from .app.web import setup_app
if version.parse_version(aiohttp._... | [
"ddtrace.contrib.aiohttp_jinja2.patch.patch",
"ddtrace.pin.Pin.override",
"ddtrace.internal.utils.version.parse_version",
"ddtrace.contrib.aiohttp.middlewares.trace_app"
] | [((289, 331), 'ddtrace.internal.utils.version.parse_version', 'version.parse_version', (['aiohttp.__version__'], {}), '(aiohttp.__version__)\n', (310, 331), False, 'from ddtrace.internal.utils import version\n'), ((519, 541), 'ddtrace.contrib.aiohttp.middlewares.trace_app', 'trace_app', (['app', 'tracer'], {}), '(app, ... |
from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField, HiddenField,IntegerField
from wtforms.validators import DataRequired, NumberRange
class ConnectForm(FlaskForm):
'''
The form for connecting to the Arduino
'''
id = HiddenField('A hidden field');
serial_port = StringField... | [
"wtforms.validators.NumberRange",
"wtforms.SubmitField",
"wtforms.StringField",
"wtforms.HiddenField",
"wtforms.validators.DataRequired"
] | [((260, 289), 'wtforms.HiddenField', 'HiddenField', (['"""A hidden field"""'], {}), "('A hidden field')\n", (271, 289), False, 'from wtforms import StringField, SubmitField, HiddenField, IntegerField\n'), ((410, 484), 'wtforms.StringField', 'StringField', (['"""Name of the Arduino:"""'], {'description': '"""Name"""', '... |
import pandas as pd
import numpy as np
import os
import tensorflow as tf
####### STUDENTS FILL THIS OUT ######
#Question 3
def reduce_dimension_ndc(df, ndc_df):
'''
df: pandas dataframe, input dataset
ndc_df: pandas dataframe, drug code dataset used for mapping in generic names
return:
df: pand... | [
"tensorflow.feature_column.categorical_column_with_vocabulary_file",
"pandas.merge",
"os.path.join",
"tensorflow.feature_column.numeric_column",
"tensorflow.feature_column.indicator_column",
"functools.partial",
"pandas.DataFrame"
] | [((399, 498), 'pandas.merge', 'pd.merge', (['df', "ndc_df[['Proprietary Name', 'NDC_Code']]"], {'left_on': '"""ndc_code"""', 'right_on': '"""NDC_Code"""'}), "(df, ndc_df[['Proprietary Name', 'NDC_Code']], left_on='ndc_code',\n right_on='NDC_Code')\n", (407, 498), True, 'import pandas as pd\n'), ((1480, 1496), 'panda... |
'''
This library is used to incorporate
'''
import numpy as np
def cell_prob_with_nucleus(cell, nucleus):
'''
This function is used to figure out whether one region is cell or empty hole (without nucleus)
:param cell: segmentations results with different labels
:param nucleus: nucleus RawMemb image (... | [
"numpy.zeros_like",
"numpy.unique"
] | [((496, 531), 'numpy.zeros_like', 'np.zeros_like', (['cell'], {'dtype': 'np.uint8'}), '(cell, dtype=np.uint8)\n', (509, 531), True, 'import numpy as np\n'), ((439, 454), 'numpy.unique', 'np.unique', (['cell'], {}), '(cell)\n', (448, 454), True, 'import numpy as np\n')] |
import calendar
import datetime
import re
import sys
from dateutil.relativedelta import relativedelta
import gam
from gam.var import *
from gam import controlflow
from gam import display
from gam import gapi
from gam import utils
from gam.gapi.directory import orgunits as gapi_directory_orgunits
def build():
re... | [
"dateutil.relativedelta.relativedelta",
"gam.gapi.got_total_items_msg",
"sys.exit",
"datetime.timedelta",
"gam.gapi.directory.orgunits.getOrgUnitId",
"gam.utils.get_yyyymmdd",
"gam.utils.get_time_or_delta_from_now",
"gam.gapi.get_enum_values_minus_unspecified",
"gam.controlflow.invalid_argument_exit... | [((325, 355), 'gam.buildGAPIObject', 'gam.buildGAPIObject', (['"""reports"""'], {}), "('reports')\n", (344, 355), False, 'import gam\n'), ((4653, 4679), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4671, 4679), False, 'import datetime\n'), ((10144, 10205), 'gam.display.write_csv_fil... |
#!/usr/bin/env python2.7
import sys
infiles = sys.argv[1:]
data = []
genes = []
for i, fname in enumerate(infiles):
sys.stderr.write(fname + '\n')
d = []
with open(fname, 'rb') as ihandle:
for j, line in enumerate(ihandle):
g, c = line.strip().split()
if i != 0 and g != gene... | [
"sys.stderr.write"
] | [((121, 151), 'sys.stderr.write', 'sys.stderr.write', (["(fname + '\\n')"], {}), "(fname + '\\n')\n", (137, 151), False, 'import sys\n')] |
import numpy as np
from .image_transforms import mat_to_gray
def rgb2hcv(Blue, Green, Red):
"""transform red green blue arrays to a color space
Parameters
----------
Blue : np.array, size=(m,n)
Blue band of satellite image
Green : np.array, size=(m,n)
Green band of satellite image... | [
"numpy.radians",
"numpy.dstack",
"numpy.ceil",
"numpy.copy",
"numpy.sqrt",
"numpy.amin",
"numpy.divide",
"numpy.zeros_like",
"numpy.array",
"numpy.linalg.inv",
"numpy.einsum",
"numpy.cos",
"numpy.arctan2",
"numpy.sin",
"numpy.amax",
"numpy.remainder"
] | [((2091, 2184), 'numpy.array', 'np.array', (['[(+0.299, +0.587, +0.114), (+0.596, -0.275, -0.321), (+0.212, -0.523, +0.311)]'], {}), '([(+0.299, +0.587, +0.114), (+0.596, -0.275, -0.321), (+0.212, -\n 0.523, +0.311)])\n', (2099, 2184), True, 'import numpy as np\n'), ((2227, 2256), 'numpy.dstack', 'np.dstack', (['(Re... |
import pandas as pd
X_train = pd.read_csv("X_train.csv")
df_y = pd.read_csv("y_train.csv")
y_train = df_y["y"]
X_test = pd.read_csv("X_test.csv")
| [
"pandas.read_csv"
] | [((31, 57), 'pandas.read_csv', 'pd.read_csv', (['"""X_train.csv"""'], {}), "('X_train.csv')\n", (42, 57), True, 'import pandas as pd\n'), ((65, 91), 'pandas.read_csv', 'pd.read_csv', (['"""y_train.csv"""'], {}), "('y_train.csv')\n", (76, 91), True, 'import pandas as pd\n'), ((122, 147), 'pandas.read_csv', 'pd.read_csv'... |
import logging
from flask import render_template, request, redirect, session, url_for, flash
from flask.ext.restful import abort
from flask_login import current_user, login_required
from redash import models, settings
from redash.wsgi import app
from redash.utils import json_dumps
@app.route('/embed/query/<query_id... | [
"redash.utils.json_dumps",
"redash.wsgi.app.route",
"redash.models.Query.get_by_id",
"flask.ext.restful.abort"
] | [((287, 377), 'redash.wsgi.app.route', 'app.route', (['"""/embed/query/<query_id>/visualization/<visualization_id>"""'], {'methods': "['GET']"}), "('/embed/query/<query_id>/visualization/<visualization_id>',\n methods=['GET'])\n", (296, 377), False, 'from redash.wsgi import app\n'), ((442, 474), 'redash.models.Query... |
from unittest import TestCase
import os
class TestSet_up_logger(TestCase):
def test_set_up_logger(self):
from utils import set_up_logger
from logging import Logger
logger = set_up_logger("test", "test.log")
self.assertIsInstance(logger, Logger)
os.remove("test.log")
| [
"utils.set_up_logger",
"os.remove"
] | [((205, 238), 'utils.set_up_logger', 'set_up_logger', (['"""test"""', '"""test.log"""'], {}), "('test', 'test.log')\n", (218, 238), False, 'from utils import set_up_logger\n'), ((293, 314), 'os.remove', 'os.remove', (['"""test.log"""'], {}), "('test.log')\n", (302, 314), False, 'import os\n')] |
# the comments in this file were made while learning, as reminders
# to RUN APP IN CMD PROMPT: cd to this directory, or place in default CMD directory:
# then run 'python rubicon_reminders_cli.py'
from os import listdir
from datetime import datetime
# this assigns dt variable as date + timestamp
dt = (datetime.now()... | [
"datetime.datetime.now",
"os.listdir"
] | [((306, 320), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (318, 320), False, 'from datetime import datetime\n'), ((557, 566), 'os.listdir', 'listdir', ([], {}), '()\n', (564, 566), False, 'from os import listdir\n')] |
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import time
import weakref
from PySide import QtGui
from mcedit2.widgets.layout import Column
log = logging.getLogger(__name__)
class InfoPanel(QtGui.QWidget):
def __init__(self, attrs, signals... | [
"logging.getLogger",
"mcedit2.widgets.layout.Column",
"time.time",
"PySide.QtGui.QLabel",
"PySide.QtGui.QWidget.__init__",
"weakref.ref"
] | [((222, 249), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (239, 249), False, 'import logging\n'), ((912, 950), 'PySide.QtGui.QWidget.__init__', 'QtGui.QWidget.__init__', (['self'], {}), '(self, **kwargs)\n', (934, 950), False, 'from PySide import QtGui\n'), ((1035, 1046), 'time.time', ... |