code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import os
import numpy as np
from sys import platform, path
if platform == "linux" or platform == "linux2":
path.insert(1, os.path.dirname(os.getcwd()) + "/src")
FILE_NAME = os.path.dirname(os.getcwd()) + "/data" + "/xAPI-Edu-Data-Edited.csv"
elif platform == "win32":
path.insert(1, os.path.dirname(os.getc... | [
"numpy.argpartition",
"DataPreprocessing.FeaturePreprocess",
"os.getcwd",
"DataProcessing.save_file",
"DataProcessing.load_file",
"DataProcessing.ModelValidating",
"DataPreprocessing.Preprocess",
"DataProcessing.ModelTuning"
] | [((2478, 2504), 'DataPreprocessing.Preprocess', 'Preprocess', ([], {'data': 'FILE_NAME'}), '(data=FILE_NAME)\n', (2488, 2504), False, 'from DataPreprocessing import Preprocess, FeaturePreprocess\n'), ((4600, 4677), 'DataPreprocessing.FeaturePreprocess', 'FeaturePreprocess', (['X_data'], {'n_components': 'n_components',... |
import io
import time
import wx
from .screenshot_thread import ScreenshotThread, EVT_SCREENSHOT
class DeviceFrame(wx.Frame):
def __init__(self, device,
png=True,
resize_quality=wx.IMAGE_QUALITY_NORMAL,
size_divisor=640,
*args, **kwargs):
super().__init__(N... | [
"wx.AutoBufferedPaintDC"
] | [((3362, 3390), 'wx.AutoBufferedPaintDC', 'wx.AutoBufferedPaintDC', (['self'], {}), '(self)\n', (3384, 3390), False, 'import wx\n')] |
from django.template import Library
from django.utils.safestring import mark_safe
register = Library()
@register.filter
def version_name(revision, autoescape=None):
name = "<strong>%s</strong> " % revision.version_name \
if revision.version_name else ""
return mark_safe("%srev. %d" % (name, revis... | [
"django.utils.safestring.mark_safe",
"django.template.Library"
] | [((94, 103), 'django.template.Library', 'Library', ([], {}), '()\n', (101, 103), False, 'from django.template import Library\n'), ((284, 341), 'django.utils.safestring.mark_safe', 'mark_safe', (["('%srev. %d' % (name, revision.revision_number))"], {}), "('%srev. %d' % (name, revision.revision_number))\n", (293, 341), F... |
import toml
def pipfile_decode(data):
parsed_toml = toml.loads(data)
res = dict()
# print(parsed_toml)
res["dependencies"] = parsed_toml["packages"]
res["dev-dependencies"] = parsed_toml["dev-packages"]
return res
def pipfile_encode(data):
res = dict()
res["packages"] = dict()
f... | [
"toml.loads",
"toml.dumps"
] | [((58, 74), 'toml.loads', 'toml.loads', (['data'], {}), '(data)\n', (68, 74), False, 'import toml\n'), ((419, 434), 'toml.dumps', 'toml.dumps', (['res'], {}), '(res)\n', (429, 434), False, 'import toml\n')] |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not us... | [
"py4j.java_gateway.java_import",
"pyspark.sql.SQLContext.__init__"
] | [((885, 959), 'py4j.java_gateway.java_import', 'java_import', (['sc._gateway.jvm', '"""org.apache.spark.sql.hbase.HBaseSQLContext"""'], {}), "(sc._gateway.jvm, 'org.apache.spark.sql.hbase.HBaseSQLContext')\n", (896, 959), False, 'from py4j.java_gateway import java_import\n'), ((1253, 1292), 'pyspark.sql.SQLContext.__in... |
import gym
from baselines import deepq
from baselines.common.atari_wrappers_deprecated import wrap_dqn, ScaledFloatFrame
from cloud_environment import CloudEnvironment
import numpy as np
import collections
import os
import csv
import pandas as pd
#Logging
def logger_callback(locals,globals):
done = locals[... | [
"numpy.mean",
"cloud_environment.CloudEnvironment",
"csv.writer",
"baselines.deepq.test",
"numpy.sum",
"numpy.zeros",
"collections.Counter",
"baselines.deepq.models.cnn_to_mlp",
"numpy.concatenate",
"os.stat",
"pandas.concat"
] | [((2638, 2670), 'pandas.concat', 'pd.concat', (['[e_df, ci_df]'], {'axis': '(1)'}), '([e_df, ci_df], axis=1)\n', (2647, 2670), True, 'import pandas as pd\n'), ((3001, 3216), 'cloud_environment.CloudEnvironment', 'CloudEnvironment', ([], {'img_size': 'img_size', 'radius': '[12, 13]', 'sequence_stride': '(1)', 'channels'... |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author : <NAME>
@file : at_app.py
@time : 2019/01/02
@site :
@software: PyCharm
,----------------, ,---------,
,-----------------------, ," ,"|
," ,"| ," ," |... | [
"linktools.logger.message",
"linktools.decorator.entry_point",
"linktools.android.AdbArgumentParser",
"linktools.utils.is_empty",
"linktools.utils.get_item"
] | [((11455, 11491), 'linktools.decorator.entry_point', 'entry_point', ([], {'known_errors': '[AdbError]'}), '(known_errors=[AdbError])\n', (11466, 11491), False, 'from linktools.decorator import entry_point\n'), ((11517, 11572), 'linktools.android.AdbArgumentParser', 'AdbArgumentParser', ([], {'description': '"""fetch ap... |
from itertools import product
from Tables import query
from .ImageHelper import *
get_width = lambda v: int(21 + 4*(v - 1))
def get_position_info(version:int) -> list:
width = get_width(version)
info = list()
color = [
[1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 1, 1, 0... | [
"itertools.product",
"Tables.query.get_format_info_string",
"Tables.query.get_version_info_string",
"time.time",
"Tables.query.get_position_of_alignment_patterns"
] | [((1321, 1370), 'Tables.query.get_position_of_alignment_patterns', 'query.get_position_of_alignment_patterns', (['version'], {}), '(version)\n', (1361, 1370), False, 'from Tables import query\n'), ((2712, 2763), 'Tables.query.get_format_info_string', 'query.get_format_info_string', (['ec_level', 'mask_string'], {}), '(... |
#!/usr/bin/env python
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import numpy as np
import scipy.io
import glob
import os
import csv
import random
import tensorflow as tf
import transition_model_common as tm
# import sys
# sys.path.append('./tensorflow_hmm')
# import tensorflow_hmm.hmm as hmm
... | [
"tensorflow.nn.softmax",
"transition_model_common.create_model",
"tensorflow.cast",
"tensorflow.variables_initializer",
"transition_model_common.RobotDataLoader",
"os.path.exists",
"numpy.reshape",
"argparse.ArgumentParser",
"tensorflow.squared_difference",
"tensorflow.Session",
"tensorflow.plac... | [((350, 365), 'transition_model_common.DataLoader', 'tm.DataLoader', ([], {}), '()\n', (363, 365), True, 'import transition_model_common as tm\n'), ((583, 630), 'transition_model_common.create_model', 'tm.create_model', (['n_input', 'n_classes'], {'train': '(True)'}), '(n_input, n_classes, train=True)\n', (598, 630), T... |
# !/usr/bin/env python
# coding=UTF-8
"""
@Author: <NAME>
@LastEditors: <NAME>
@Description:
@Date: 2021-08-18
@LastEditTime: 2022-03-19
"""
import zipfile
import pickle
import gzip
import json
import re
from pathlib import Path
from typing import Union, Optional, Sequence
from ..strings import normalize_language, LA... | [
"zipfile.ZipFile",
"pathlib.Path",
"gzip.open",
"sklearn.neighbors.NearestNeighbors",
"json.load"
] | [((686, 718), 'zipfile.ZipFile', 'zipfile.ZipFile', (['cilin_path', '"""r"""'], {}), "(cilin_path, 'r')\n", (701, 718), False, 'import zipfile\n'), ((911, 941), 'zipfile.ZipFile', 'zipfile.ZipFile', (['fyh_path', '"""r"""'], {}), "(fyh_path, 'r')\n", (926, 941), False, 'import zipfile\n'), ((1301, 1332), 'gzip.open', '... |
from django import template
from django_extras.utils import humanize
register = template.Library()
@register.filter(is_safe=True)
def describe_seconds(value):
"""
Convert a seconds value into a human readable (ie week, day, hour) value.
:param value: integer value of the number of seconds.
:return: ... | [
"django.template.Library",
"django_extras.utils.humanize.describe_seconds"
] | [((82, 100), 'django.template.Library', 'template.Library', ([], {}), '()\n', (98, 100), False, 'from django import template\n'), ((374, 406), 'django_extras.utils.humanize.describe_seconds', 'humanize.describe_seconds', (['value'], {}), '(value)\n', (399, 406), False, 'from django_extras.utils import humanize\n')] |
# Copyright 2012 Viewfinder Inc. All Rights Reserved.
"""Friend relation.
Viewfinder friends define a relationship between two users predicated on confirmation of photo
sharing permission. Each friend has an associated 'status', which can be:
- 'friend': user has been marked as a friend; however, that user may no... | [
"math.log",
"viewfinder.backend.op.notification_manager.NotificationManager.NotifyUpdateFriend",
"viewfinder.backend.base.exceptions.NotFoundError",
"tornado.gen.Task",
"viewfinder.backend.db.base.DBObject._schema.GetTable"
] | [((2141, 2184), 'viewfinder.backend.db.base.DBObject._schema.GetTable', 'DBObject._schema.GetTable', (['vf_schema.FRIEND'], {}), '(vf_schema.FRIEND)\n', (2166, 2184), False, 'from viewfinder.backend.db.base import DBObject\n'), ((5805, 5896), 'tornado.gen.Task', 'gen.Task', (['Friend.Query', 'client', 'user_id', "frien... |
from ensmallen_graph import EnsmallenGraph # pylint: disable=no-name-in-module
def load_hpo() -> EnsmallenGraph:
"""Test that HPO graph can be loaded."""
graph = EnsmallenGraph.from_unsorted_csv(
edge_path="./pytests/data/edges.tsv",
sources_column="subject",
destinations_column="obj... | [
"ensmallen_graph.EnsmallenGraph.from_unsorted_csv"
] | [((174, 553), 'ensmallen_graph.EnsmallenGraph.from_unsorted_csv', 'EnsmallenGraph.from_unsorted_csv', ([], {'edge_path': '"""./pytests/data/edges.tsv"""', 'sources_column': '"""subject"""', 'destinations_column': '"""object"""', 'directed': '(False)', 'edge_types_column': '"""edge_label"""', 'node_path': '"""./pytests/... |
import sys
import getopt
import subprocess
def main(argv):
rancher_options = ''
rancher_command = ''
rancher_args = ''
try:
opts, args = getopt.getopt(argv, "o:c:a:", ["help", "rancher_options=", "rancher_command=", "rancher_args="])
except getopt.GetoptError:
print('Unrecognized A... | [
"subprocess.Popen",
"getopt.getopt",
"sys.exit"
] | [((163, 263), 'getopt.getopt', 'getopt.getopt', (['argv', '"""o:c:a:"""', "['help', 'rancher_options=', 'rancher_command=', 'rancher_args=']"], {}), "(argv, 'o:c:a:', ['help', 'rancher_options=',\n 'rancher_command=', 'rancher_args='])\n", (176, 263), False, 'import getopt\n'), ((676, 687), 'sys.exit', 'sys.exit', (... |
import unittest
import os, sys
from flask import Flask
from app import auth, api, db, home, create_app
import threading
import pytest
import requests
global flask
@pytest.fixture()
def server():
flask = create_app()
flask.run(host="localhost", port=5000)
class TestApp(unittest.TestCase):
def setUp(self... | [
"pytest.fixture",
"unittest.main",
"app.create_app"
] | [((166, 182), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (180, 182), False, 'import pytest\n'), ((209, 221), 'app.create_app', 'create_app', ([], {}), '()\n', (219, 221), False, 'from app import auth, api, db, home, create_app\n'), ((1059, 1074), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1072, 107... |
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
sns.set()
path = r'C:\Users\HP\PycharmProjects\Operaciones_Calor\Data.xlsx'
df = pd.read_excel("Data.xlsx")
df2 = pd.read_excel("time.xlsx")
df3 = np.transpose(df)
ax = sns.heatmap(data=df3)
plt.show() | [
"seaborn.set",
"seaborn.heatmap",
"pandas.read_excel",
"numpy.transpose",
"matplotlib.pyplot.show"
] | [((94, 103), 'seaborn.set', 'sns.set', ([], {}), '()\n', (101, 103), True, 'import seaborn as sns\n'), ((175, 201), 'pandas.read_excel', 'pd.read_excel', (['"""Data.xlsx"""'], {}), "('Data.xlsx')\n", (188, 201), True, 'import pandas as pd\n'), ((208, 234), 'pandas.read_excel', 'pd.read_excel', (['"""time.xlsx"""'], {})... |
from fe_store.models import (
ProductType, Attribute, Measure, Product, Association, Value
)
products = [
{
"id": "1",
"name": "Ноутбук Xiaomi Mi Notebook Pro 15.6",
"type": "Ноутбук",
"price": "54 000",
"features": {
"Процессор": "Core i5",
"Част... | [
"fe_store.models.Measure",
"fe_store.models.Association",
"fe_store.models.ProductType",
"fe_store.models.Value",
"fe_store.models.Attribute",
"fe_store.models.Product"
] | [((3030, 3221), 'fe_store.models.Product', 'Product', ([], {'name': '"""Ноутбук Xiaomi Mi Notebook Pro 15.6 (Intel Core i5)"""', 'product_type_id': '(1)', 'image': '"""https://avatars.mds.yandex.net/get-mpic/397397/img_id5839825920611580155.jpeg/5hq"""'}), "(name='Ноутбук Xiaomi Mi Notebook Pro 15.6 (Intel Core i5)',\n... |
"""
This module deals with querying and downloading LAT FITS files.
"""
# Scientific Library
import numpy as np
import pandas as pd
# Requests Urls and Manupilate Files
from astropy.utils.data import download_files_in_parallel, download_file
from astroquery import fermi
from tqdm import tqdm
import requests
import ... | [
"logging.basicConfig",
"os.makedirs",
"functools.reduce",
"tqdm.tqdm",
"logging.warning",
"numpy.any",
"numpy.sum",
"shutil.copyfile",
"os.path.dirname",
"signal.alarm",
"retry.api.retry_call",
"pandas.read_html",
"pandas.isna",
"logging.info"
] | [((457, 499), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.WARNING'}), '(level=logging.WARNING)\n', (476, 499), False, 'import logging\n'), ((2439, 2470), 'pandas.isna', 'pd.isna', (['self.grbs.at[row, col]'], {}), '(self.grbs.at[row, col])\n', (2446, 2470), True, 'import pandas as pd\n'), ((24... |
# Copyright 2022 VMware, Inc.
# SPDX-License-Identifier: Apache License 2.0
from .models import Client, Product, TestCase, UIEvent, Capture, Console
from rest_framework import serializers
class ClientSerializer(serializers.ModelSerializer):
class Meta:
model = Client
fields = ('id', 'uuid', 'role... | [
"rest_framework.serializers.DateTimeField",
"rest_framework.serializers.IntegerField",
"rest_framework.serializers.CharField"
] | [((689, 733), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""product.name"""'}), "(source='product.name')\n", (710, 733), False, 'from rest_framework import serializers\n'), ((756, 796), 'rest_framework.serializers.IntegerField', 'serializers.IntegerField', ([], {'required': '(Fals... |
# Import modules
import groupdocs_annotation_cloud
from Common import Common
class MoveFolder:
@classmethod
def Run(cls):
# Create instance of the API
api = groupdocs_annotation_cloud.FolderApi.from_config(Common.GetConfig())
try:
request = groupdocs_annotation... | [
"groupdocs_annotation_cloud.MoveFolderRequest",
"Common.Common.GetConfig"
] | [((236, 254), 'Common.Common.GetConfig', 'Common.GetConfig', ([], {}), '()\n', (252, 254), False, 'from Common import Common\n'), ((300, 438), 'groupdocs_annotation_cloud.MoveFolderRequest', 'groupdocs_annotation_cloud.MoveFolderRequest', (['"""annotationdocs1"""', '"""annotationdocs1\\\\annotationdocs"""', 'Common.myS... |
"""
--- Day 15: Oxygen System ---
https://adventofcode.com/2019/day/15
"""
from collections import deque
from aocd import data
from aoc_wim.aoc2019 import IntComputer
from aoc_wim.search import AStar
from aoc_wim.zgrid import ZGrid
NACK = 0
ACK = 1
GOAL = 2
neighbours = {
-1j: 1, # NORTH
+1j: 2, # SOUTH
... | [
"collections.deque",
"aoc_wim.search.AStar.__init__",
"aoc_wim.aoc2019.IntComputer",
"aoc_wim.zgrid.ZGrid"
] | [((434, 454), 'aoc_wim.zgrid.ZGrid', 'ZGrid', (["{(0.0j): '.'}"], {}), "({(0.0j): '.'})\n", (439, 454), False, 'from aoc_wim.zgrid import ZGrid\n'), ((471, 488), 'aoc_wim.aoc2019.IntComputer', 'IntComputer', (['data'], {}), '(data)\n', (482, 488), False, 'from aoc_wim.aoc2019 import IntComputer\n'), ((516, 531), 'colle... |
# Copyright (c) 2020, <NAME>, Honda Research Institute Europe GmbH, and
# Technical University of Darmstadt.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code mus... | [
"numpy.clip",
"pyrado.policies.recurrent.rnn.LSTMPolicy",
"pyrado.environment_wrappers.base.EnvWrapper.__init__",
"torch.cuda.is_available",
"pyrado.algorithms.step_based.svpg.SVPGBuilder",
"pyrado.save",
"pyrado.sampling.parallel_evaluation.eval_domain_params",
"numpy.mean",
"pyrado.sampling.sample... | [((21755, 21781), 'torch.cat', 'to.cat', (['(state, action)', '(1)'], {}), '((state, action), 1)\n', (21761, 21781), True, 'import torch as to\n'), ((6471, 6608), 'numpy.random.random_sample', 'np.random.random_sample', (["(svpg_hp['algo']['num_particles'], svpg_hp['algo']['horizon'], adr_hp[\n 'evaluation_steps'], ... |
# This file is Public Domain and may be used without restrictions.
import _jpype
import jpype
from jpype.types import *
from jpype import java
import jpype.dbapi2 as dbapi2
import common
import time
try:
import zlib
except ImportError:
zlib = None
class SQLModuleTestCase(common.JPypeTestCase):
def setUp(... | [
"jpype.dbapi2.Time",
"common.JPypeTestCase.setUp",
"time.mktime",
"jpype.dbapi2.Timestamp",
"jpype.dbapi2.Date",
"jpype.dbapi2.Binary"
] | [((335, 367), 'common.JPypeTestCase.setUp', 'common.JPypeTestCase.setUp', (['self'], {}), '(self)\n', (361, 367), False, 'import common\n'), ((2372, 2397), 'jpype.dbapi2.Date', 'dbapi2.Date', (['(2002)', '(12)', '(25)'], {}), '(2002, 12, 25)\n', (2383, 2397), True, 'import jpype.dbapi2 as dbapi2\n'), ((2682, 2705), 'jp... |
"""
Stats stuff!
"""
import textwrap
import numpy as np
import pandas as pd
from scipy import stats
from IPython.display import display
from .boxes import *
from .table_display import *
__DEBUG__ = False
def debug(*args, **kwargs):
if __DEBUG__:
print(*args, **kwargs)
class Chi2Result(object):
"... | [
"textwrap.dedent",
"IPython.display.display",
"numpy.linalg.solve",
"scipy.stats.chi2.cdf",
"scipy.stats.chi2_contingency",
"pandas.crosstab",
"numpy.diag",
"numpy.kron",
"numpy.zeros",
"numpy.outer",
"pandas.concat"
] | [((2089, 2112), 'pandas.crosstab', 'pd.crosstab', (['col1', 'col2'], {}), '(col1, col2)\n', (2100, 2112), True, 'import pandas as pd\n'), ((2139, 2165), 'scipy.stats.chi2_contingency', 'stats.chi2_contingency', (['xs'], {}), '(xs)\n', (2161, 2165), False, 'from scipy import stats\n'), ((4582, 4616), 'numpy.zeros', 'np.... |
import os
import numpy as np
import matplotlib.pyplot as plt
from .kshell_utilities import atomic_numbers, loadtxt
from .general_utilities import create_spin_parity_list, gamma_strength_function_average
class LEE:
def __init__(self, directory):
self.bin_width = 0.2
self.E_max = 30
self.Ex_m... | [
"numpy.ceil",
"os.listdir",
"numpy.sum",
"numpy.linspace",
"os.path.isdir",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((563, 605), 'numpy.linspace', 'np.linspace', (['(0)', 'E_max_adjusted', '(n_bins + 1)'], {}), '(0, E_max_adjusted, n_bins + 1)\n', (574, 605), True, 'import numpy as np\n'), ((7272, 7286), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (7284, 7286), True, 'import matplotlib.pyplot as plt\n'), ((7579,... |
# vim: ts=4 sw=4 expandtab
import cgi
import os.path
import re
import unittest
_identifier = re.compile('^[A-Za-z_$][A-Za-z0-9_$]*$')
_contenttypes = (
'text/javascript',
'text/ecmascript',
'application/javascript',
'application/ecmascript',
'application/x-javascript',
)
class JSVersion:
def ... | [
"unittest.main",
"cgi.parse_header",
"re.compile"
] | [((94, 134), 're.compile', 're.compile', (['"""^[A-Za-z_$][A-Za-z0-9_$]*$"""'], {}), "('^[A-Za-z_$][A-Za-z0-9_$]*$')\n", (104, 134), False, 'import re\n'), ((4500, 4515), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4513, 4515), False, 'import unittest\n'), ((960, 983), 'cgi.parse_header', 'cgi.parse_header', (... |
from .Node import Node
from blinker import signal
###################################################################################################
# Pipeline:
#
### The main process pipeline. Responsible for orchestrating a sequence of discrete tasks, as can
### be defined as an acyclic directed graph. Each node ru... | [
"blinker.signal"
] | [((4722, 4735), 'blinker.signal', 'signal', (['"""end"""'], {}), "('end')\n", (4728, 4735), False, 'from blinker import signal\n'), ((6546, 6566), 'blinker.signal', 'signal', (['"""node start"""'], {}), "('node start')\n", (6552, 6566), False, 'from blinker import signal\n'), ((6750, 6773), 'blinker.signal', 'signal', ... |
from simulation.framework import DiscreteSimulation
from tests.helpers import TestCase
class TestSimulationFramework(TestCase):
def setUp(self):
self.sim = DiscreteSimulation(max_duration=8, available_actions=[])
def test_reset_creates_new_timeline(self):
original_timeline = self.sim.timeline... | [
"simulation.framework.DiscreteSimulation"
] | [((170, 226), 'simulation.framework.DiscreteSimulation', 'DiscreteSimulation', ([], {'max_duration': '(8)', 'available_actions': '[]'}), '(max_duration=8, available_actions=[])\n', (188, 226), False, 'from simulation.framework import DiscreteSimulation\n')] |
import os
import time
while (True):
os.system("pipenv run start --token 1<PASSWORD> --board 1 --time-factor=1 --logic LowerRight")
time.sleep(1) | [
"os.system",
"time.sleep"
] | [((41, 145), 'os.system', 'os.system', (['"""pipenv run start --token 1<PASSWORD> --board 1 --time-factor=1 --logic LowerRight"""'], {}), "(\n 'pipenv run start --token 1<PASSWORD> --board 1 --time-factor=1 --logic LowerRight'\n )\n", (50, 145), False, 'import os\n'), ((140, 153), 'time.sleep', 'time.sleep', (['(... |
from django.core.exceptions import PermissionDenied
from django.utils import timezone
from .models import Item
from datetime import datetime
def active_auction(function):
def wrap(request, *args, **kwargs):
item = Item.objects.get(slug=kwargs['slug'])
if item.end_of_auction > timezone.now():
... | [
"django.utils.timezone.now"
] | [((299, 313), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (311, 313), False, 'from django.utils import timezone\n')] |
#!/usr/bin/python3
# To generate an Office365 token:
# python3
# from O365 import Account
# account = Account(credentials=('yourregisteredappname', 'yoursecret'))
# account.authenticate(scopes=['files.read', 'user.read', 'offline_access'])
# It will return a URL, go to this in a browser, accept the permissions, the... | [
"pandas.option_context",
"O365.Account",
"pandas.read_excel"
] | [((722, 846), 'O365.Account', 'Account', ([], {'credentials': '(registered_app_name, registered_app_secret)', 'scopes': "['files.read', 'user.read', 'offline_access']"}), "(credentials=(registered_app_name, registered_app_secret), scopes=[\n 'files.read', 'user.read', 'offline_access'])\n", (729, 846), False, 'from ... |
from flask import Blueprint
from flask.ext.restful import Api
from .endpoints.goal import Goal
from .endpoints.weight import Weight
from .endpoints.calories import Calories
api_blueprint = Blueprint('api', __name__)
api = Api(prefix='/api/v1.0')
# Register the endpoints
api.add_resource(Goal, '/goal', '/goal')
api.ad... | [
"flask.Blueprint",
"flask.ext.restful.Api"
] | [((190, 216), 'flask.Blueprint', 'Blueprint', (['"""api"""', '__name__'], {}), "('api', __name__)\n", (199, 216), False, 'from flask import Blueprint\n'), ((223, 246), 'flask.ext.restful.Api', 'Api', ([], {'prefix': '"""/api/v1.0"""'}), "(prefix='/api/v1.0')\n", (226, 246), False, 'from flask.ext.restful import Api\n')... |
"""Tests for Pipeline class"""
import pytest
from cognigraph.nodes.pipeline import Pipeline
import numpy as np
from numpy.testing import assert_array_equal
from cognigraph.tests.prepare_pipeline_tests import (
create_dummy_info,
ConcreteSource,
ConcreteProcessor,
ConcreteOutput,
)
@pytest.fixture(scop... | [
"cognigraph.tests.prepare_pipeline_tests.ConcreteSource",
"cognigraph.tests.prepare_pipeline_tests.create_dummy_info",
"numpy.ones",
"cognigraph.tests.prepare_pipeline_tests.ConcreteProcessor",
"cognigraph.tests.prepare_pipeline_tests.ConcreteOutput",
"numpy.zeros",
"pytest.fixture",
"numpy.all",
"c... | [((301, 333), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (315, 333), False, 'import pytest\n'), ((363, 379), 'cognigraph.tests.prepare_pipeline_tests.ConcreteSource', 'ConcreteSource', ([], {}), '()\n', (377, 379), False, 'from cognigraph.tests.prepare_pipeline_tests ... |
#!/usr/bin/env python
'''Testing for read_rockstar.py
@author: <NAME>
@contact: <EMAIL>
@status: Development
'''
import glob
from mock import call, patch
import numpy as np
import numpy.testing as npt
import os
import pdb
import pytest
import unittest
import galaxy_dive.read_data.rockstar as read_rockstar
import gal... | [
"numpy.testing.assert_allclose",
"galaxy_dive.read_data.rockstar.RockstarReader"
] | [((603, 660), 'galaxy_dive.read_data.rockstar.RockstarReader', 'read_rockstar.RockstarReader', (['"""./tests/data/rockstar_dir"""'], {}), "('./tests/data/rockstar_dir')\n", (631, 660), True, 'import galaxy_dive.read_data.rockstar as read_rockstar\n'), ((901, 938), 'numpy.testing.assert_allclose', 'npt.assert_allclose',... |
from django import forms
from .models import Recipe
class RecipeForm(forms.ModelForm):
class Meta:
model = Recipe
fields = (
'title',
'tags',
'cooking_time',
'text',
'image',
)
widgets = {
'tags': forms.Checkb... | [
"django.forms.CheckboxSelectMultiple"
] | [((308, 338), 'django.forms.CheckboxSelectMultiple', 'forms.CheckboxSelectMultiple', ([], {}), '()\n', (336, 338), False, 'from django import forms\n')] |
import math
from pandas_util import values_of, subset_by_value
def entropy(samples, bins=None):
"""
Compute entropy of given set of sample data points
Parameters:
samples - Pandas DataFrame; last column is taken as the class labels
Keyword Args:
bins - Number of bins/quantiles to have for co... | [
"pandas_util.subset_by_value",
"pandas_util.values_of",
"math.log2"
] | [((469, 514), 'pandas_util.values_of', 'values_of', (['samples', 'samples.columns[-1]', 'bins'], {}), '(samples, samples.columns[-1], bins)\n', (478, 514), False, 'from pandas_util import values_of, subset_by_value\n'), ((1494, 1527), 'pandas_util.values_of', 'values_of', (['samples', 'feature', 'bins'], {}), '(samples... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
# Python imports.
import logging
# Django imports.
from django.conf.urls import url
# Rest Framework imports.
# Third Party Library imports
# local imports.
from qzzzme_app.swagger import schema_view
app_name = 'qzzzme_app'
urlp... | [
"django.conf.urls.url"
] | [((336, 383), 'django.conf.urls.url', 'url', (['"""^docs/$"""', 'schema_view'], {'name': '"""schema_view"""'}), "('^docs/$', schema_view, name='schema_view')\n", (339, 383), False, 'from django.conf.urls import url\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# <bitbar.title>GitHub Zen</bitbar.title>
# <bitbar.version>v1.0.0</bitbar.version>
# <bitbar.author>Josh</bitbar.author>
# <bitbar.author.github>andjosh</bitbar.author.github>
# <bitbar.desc>GitHub zen in your menu bar!</bitbar.desc>
# <bitbar.dependencies>python</bitbar.... | [
"urllib2.Request",
"urllib2.urlopen",
"os.getenv"
] | [((433, 518), 'os.getenv', 'os.getenv', (['"""GITHUB_TOKEN"""', '"""Enter your GitHub.com Personal Access Token here..."""'], {}), "('GITHUB_TOKEN', 'Enter your GitHub.com Personal Access Token here...'\n )\n", (442, 518), False, 'import os\n'), ((560, 627), 'urllib2.Request', 'urllib2.Request', (['url'], {'headers'... |
from multiprocessing.sharedctypes import Value
import sympy as sp
a = sp.symbols('alpha')
def _phase2golden(alpha_l, alpha_u, functionOfAlpha, error, k, debug): #implementação do SLIDE 13
currentError = error + 1 # força o algorítimo a passar para o loop while
k += 1
while abs(currentError) > ... | [
"sympy.symbols"
] | [((72, 91), 'sympy.symbols', 'sp.symbols', (['"""alpha"""'], {}), "('alpha')\n", (82, 91), True, 'import sympy as sp\n')] |
#!/usr/bin/env python3
import rulegenerator
from random import randint
if __name__ == '__main__':
generations = 500
width = 501
rule = randint(0, 255)
init_config = randint(0, 32)
init_config = bin(init_config)[2:]
grid = rulegenerator.generate_rule_wrap(generations, init_config, rule, width)
print("Wr... | [
"rulegenerator.generate_rule_wrap",
"random.randint"
] | [((142, 157), 'random.randint', 'randint', (['(0)', '(255)'], {}), '(0, 255)\n', (149, 157), False, 'from random import randint\n'), ((175, 189), 'random.randint', 'randint', (['(0)', '(32)'], {}), '(0, 32)\n', (182, 189), False, 'from random import randint\n'), ((236, 307), 'rulegenerator.generate_rule_wrap', 'rulegen... |
import configparser as cfg
import src.scripts.index as scripts
import os
try:
cel_det = scripts.get_env_variable('REDIS_URL')
# we want to use the heroku set add-on redis server if it's available,
# but if we can't find it, then it isn't set and we must be running
# the dev server, so we connect to it ... | [
"src.scripts.index.get_env_variable",
"configparser.ConfigParser",
"os.getcwd"
] | [((549, 567), 'configparser.ConfigParser', 'cfg.ConfigParser', ([], {}), '()\n', (565, 567), True, 'import configparser as cfg\n'), ((93, 130), 'src.scripts.index.get_env_variable', 'scripts.get_env_variable', (['"""REDIS_URL"""'], {}), "('REDIS_URL')\n", (117, 130), True, 'import src.scripts.index as scripts\n'), ((16... |
import click
from ui.cli.command import Command
class Input(Command):
def __init__(self, title: str, value=None):
self.title = title
self.value = value
def render(self):
click.echo("%s\t%s" % (self.title, self.value))
| [
"click.echo"
] | [((206, 253), 'click.echo', 'click.echo', (["('%s\\t%s' % (self.title, self.value))"], {}), "('%s\\t%s' % (self.title, self.value))\n", (216, 253), False, 'import click\n')] |
import requests
import json
import time
import mysql.connector
from mysql.connector import Error
def saveindicador(conexion):
cur=conexion.cursor()
sqlconsulta="SELECT JSON_EXTRACT(datosjson,'$.internal_type') as internal_type,sum(JSON_EXTRACT(datosjson,'$.amount')) as total_amount FROM apidata group by JSON_E... | [
"json.loads",
"json.dumps",
"time.sleep",
"requests.get"
] | [((555, 571), 'json.dumps', 'json.dumps', (['dato'], {}), '(dato)\n', (565, 571), False, 'import json\n'), ((594, 617), 'json.loads', 'json.loads', (['json_string'], {}), '(json_string)\n', (604, 617), False, 'import json\n'), ((637, 681), 'json.dumps', 'json.dumps', (['json_string2'], {'ensure_ascii': '(False)'}), '(j... |
import pytest
from jobs.statuses import JobStatus
@pytest.mark.parametrize(
"code,value",
[
(JobStatus.SENT_TO_DPS, "SENT_TO_DPS"),
(JobStatus.PROCESSED_BY_DPS, "PROCESSED_BY_DPS"),
(JobStatus.COMPLETE, "COMPLETE"),
(JobStatus.NOTIFIED_VALIDATION_FAILED, "NOTIFIED_VALIDATION_FA... | [
"pytest.mark.parametrize"
] | [((53, 337), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""code,value"""', "[(JobStatus.SENT_TO_DPS, 'SENT_TO_DPS'), (JobStatus.PROCESSED_BY_DPS,\n 'PROCESSED_BY_DPS'), (JobStatus.COMPLETE, 'COMPLETE'), (JobStatus.\n NOTIFIED_VALIDATION_FAILED, 'NOTIFIED_VALIDATION_FAILED'), (JobStatus.\n CLEANED... |
from os import environ
from http.server import BaseHTTPRequestHandler, HTTPServer
from fritzconnection.lib.fritzstatus import FritzStatus
from fritzconnection import FritzConnection
if "FritzBoxUri" not in environ:
print("FritzBoxUri Missing")
quit()
if "FritzBoxUser" not in environ:
print("Frit... | [
"fritzconnection.lib.fritzstatus.FritzStatus",
"fritzconnection.FritzConnection",
"http.server.HTTPServer"
] | [((451, 571), 'fritzconnection.FritzConnection', 'FritzConnection', ([], {'address': "environ['FritzBoxUri']", 'user': "environ['FritzBoxUser']", 'password': "environ['FritzBoxPassword']"}), "(address=environ['FritzBoxUri'], user=environ['FritzBoxUser'\n ], password=environ['FritzBoxPassword'])\n", (466, 571), False... |
from django.urls import path
from . import views
urlpatterns = [
path(
"model-attrs/<int:content_type_id>/",
views.model_attrs,
name="django_signals_model_attrs",
),
]
| [
"django.urls.path"
] | [((70, 171), 'django.urls.path', 'path', (['"""model-attrs/<int:content_type_id>/"""', 'views.model_attrs'], {'name': '"""django_signals_model_attrs"""'}), "('model-attrs/<int:content_type_id>/', views.model_attrs, name=\n 'django_signals_model_attrs')\n", (74, 171), False, 'from django.urls import path\n')] |
import base64
import simplejson
from aws_xray_sdk.core import patch_all, xray_recorder
from botocore.exceptions import ClientError
from okdata.aws.logging import (
logging_wrapper,
log_add,
log_exception,
)
from okdata.resource_auth import ResourceAuthorizer
from status.StatusData import StatusData
from s... | [
"okdata.aws.logging.log_exception",
"aws_xray_sdk.core.patch_all",
"simplejson.dumps",
"status.common.extract_bearer_token",
"status.common.extract_dataset_id",
"base64.b64decode",
"status.StatusData.StatusData",
"aws_xray_sdk.core.xray_recorder.capture",
"okdata.aws.logging.log_add",
"okdata.reso... | [((429, 440), 'aws_xray_sdk.core.patch_all', 'patch_all', ([], {}), '()\n', (438, 440), False, 'from aws_xray_sdk.core import patch_all, xray_recorder\n'), ((463, 483), 'okdata.resource_auth.ResourceAuthorizer', 'ResourceAuthorizer', ([], {}), '()\n', (481, 483), False, 'from okdata.resource_auth import ResourceAuthori... |
"""
Keystone API shims. Requires v3 API. See `Keystone HTTP API
<https://developer.openstack.org/api-ref/identity/v3/>`_
"""
from requests import HTTPError
from hammers.osrest.base import BaseAPI
API = BaseAPI('identity')
def project(auth, id):
"""Retrieve project by ID"""
response = API.get(auth, '/project... | [
"hammers.osrest.base.BaseAPI"
] | [((204, 223), 'hammers.osrest.base.BaseAPI', 'BaseAPI', (['"""identity"""'], {}), "('identity')\n", (211, 223), False, 'from hammers.osrest.base import BaseAPI\n')] |
import json
from data_query_engine import DataQueryEngine
class Sql4Json(object):
def __init__(self, json_str, sql_str):
self.data = json.loads(json_str, encoding="latin1")
self.query_engine = DataQueryEngine(self.data, sql_str)
def get_results(self):
return self.query_engine.get_res... | [
"json.loads",
"data_query_engine.DataQueryEngine"
] | [((148, 187), 'json.loads', 'json.loads', (['json_str'], {'encoding': '"""latin1"""'}), "(json_str, encoding='latin1')\n", (158, 187), False, 'import json\n'), ((216, 251), 'data_query_engine.DataQueryEngine', 'DataQueryEngine', (['self.data', 'sql_str'], {}), '(self.data, sql_str)\n', (231, 251), False, 'from data_que... |
"""Contains derivative calculation with BackPACK."""
from test.core.derivatives.implementation.base import DerivativesImplementation
from test.utils import chunk_sizes
from typing import List
from torch import Tensor, einsum, zeros
from backpack.utils.subsampling import subsample
class BackpackDerivatives(Derivativ... | [
"test.utils.chunk_sizes",
"torch.einsum",
"torch.zeros",
"backpack.utils.subsampling.subsample"
] | [((4342, 4404), 'backpack.utils.subsampling.subsample', 'subsample', (['self.problem.module.input0'], {'subsampling': 'subsampling'}), '(self.problem.module.input0, subsampling=subsampling)\n', (4351, 4404), False, 'from backpack.utils.subsampling import subsample\n'), ((5173, 5217), 'torch.einsum', 'einsum', (['"""vni... |
from rest_framework import serializers
from rest_framework.serializers import raise_errors_on_nested_writes
from rest_framework.utils import model_meta
import logging
logger = logging.getLogger(__file__)
class BaseSerializer(serializers.ModelSerializer):
def update(self, instance, validated_data):
... | [
"logging.getLogger",
"rest_framework.utils.model_meta.get_field_info",
"rest_framework.serializers.raise_errors_on_nested_writes"
] | [((182, 209), 'logging.getLogger', 'logging.getLogger', (['__file__'], {}), '(__file__)\n', (199, 209), False, 'import logging\n'), ((326, 387), 'rest_framework.serializers.raise_errors_on_nested_writes', 'raise_errors_on_nested_writes', (['"""update"""', 'self', 'validated_data'], {}), "('update', self, validated_data... |
import os
import sys
import pytest
from . import pytest_utils
HERE = os.path.dirname(os.path.abspath(__file__))
@pytest.mark.parametrize('o1,o2', [
(1, 2),
(3, 3.),
({'inject_test': 'foo bar{{inject_me}}', 'inject_me': 'beezbooz'},
{'inject_test': 'foo barbeezbooz', 'inject_me': 'beezbooz'}),
])
def... | [
"os.path.abspath",
"pytest.mark.parametrize",
"pytest.raises"
] | [((116, 305), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""o1,o2"""', "[(1, 2), (3, 3.0), ({'inject_test': 'foo bar{{inject_me}}', 'inject_me':\n 'beezbooz'}, {'inject_test': 'foo barbeezbooz', 'inject_me': 'beezbooz'})]"], {}), "('o1,o2', [(1, 2), (3, 3.0), ({'inject_test':\n 'foo bar{{inject_me}}... |
"""Filter and convert annotations"""
import json
import os
import cv2
USED_ARTF_NAME = ["backpack", "phone", "survivor", "drill", "fire_extinguisher", "vent", "helmet", "rope", "breadcrumb",
"robot", "cube", "nothing"]
BLACKLIST = [
"../virtual/helmet/local_J01_000.jpg",
"../virtual/helmet/local_J... | [
"cv2.rectangle",
"argparse.ArgumentParser",
"os.path.join",
"cv2.putText",
"cv2.imshow",
"os.path.dirname",
"cv2.destroyAllWindows",
"json.load",
"cv2.waitKey"
] | [((4043, 4066), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (4064, 4066), False, 'import cv2\n'), ((4147, 4179), 'os.path.dirname', 'os.path.dirname', (['annotation_file'], {}), '(annotation_file)\n', (4162, 4179), False, 'import os\n'), ((5997, 6067), 'argparse.ArgumentParser', 'argparse.Argume... |
from flask import current_app
from functools import wraps
from app import db
def transactional(func):
@wraps(func)
def commit_or_rollback(*args, **kwargs):
try:
res = func(*args, **kwargs)
db.session.commit()
return res
except Exception as e:
cu... | [
"app.db.session.commit",
"app.db.session.rollback",
"functools.wraps",
"flask.current_app.logger.error"
] | [((110, 121), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (115, 121), False, 'from functools import wraps\n'), ((232, 251), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (249, 251), False, 'from app import db\n'), ((318, 345), 'flask.current_app.logger.error', 'current_app.logger.error', (... |
import os, threading
import tkinter.ttk as ttk
import tkinter as tk
class Tree(ttk.Treeview):
def __init__(self, master, double_click=None, single_click=None, startpath=None, *args, **kwargs):
super().__init__(master, *args, **kwargs)
self.master = master
self.base = master.base
se... | [
"os.listdir",
"os.path.join",
"os.path.split",
"os.path.isfile",
"os.path.isdir",
"os.path.abspath",
"tkinter.PhotoImage",
"threading.Thread"
] | [((448, 1123), 'tkinter.PhotoImage', 'tk.PhotoImage', ([], {'data': '"""\n iVBORw0KGgoAAAANSUhEUgAAAA0AAAARCAYAAAAG/<KEY>MAAA7DAAAOwwHHb6hkAAAAGXRFWHRTb2Z0d2FyZQB\n 3d3cuaW5rc2NhcGUub3Jnm+48GgAAAUNJREFUKJHVzzFLQmEUxvH/uaipF0EbW1rEsWyLJCirKShszIKMoqnFrU9Re+YSak\n MtTU3XhnCLhj5GIEJeu5re06KScpXWnunlvM... |
from typing import IO
import yaml
class YamlFileDatabaseMixin:
async def load_yaml(self, file: IO) -> dict:
return yaml.safe_load(file)
async def dump_yaml(self, data: dict, file: IO):
return yaml.safe_dump(data, file)
| [
"yaml.safe_load",
"yaml.safe_dump"
] | [((130, 150), 'yaml.safe_load', 'yaml.safe_load', (['file'], {}), '(file)\n', (144, 150), False, 'import yaml\n'), ((220, 246), 'yaml.safe_dump', 'yaml.safe_dump', (['data', 'file'], {}), '(data, file)\n', (234, 246), False, 'import yaml\n')] |
from typing import Union, Optional, Sequence, Any, Mapping, List, Tuple, Callable
from collections.abc import Iterable
import operator
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.axes import Axes
def pie_marker(
ratios: Sequence[float],
res: int = 50,
... | [
"numpy.abs",
"numpy.column_stack",
"numpy.sum",
"numpy.linspace",
"matplotlib.pyplot.scatter",
"pandas.DataFrame",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.get_cmap"
] | [((3013, 3061), 'pandas.DataFrame', 'pd.DataFrame', (["{'x': x, 'y': y, 'ratios': ratios}"], {}), "({'x': x, 'y': y, 'ratios': ratios})\n", (3025, 3061), True, 'import pandas as pd\n'), ((3646, 3664), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['cmap'], {}), '(cmap)\n', (3658, 3664), True, 'import matplotlib.pyplot... |
from glacier import glacier
def f1(name: str, verbose: bool = False) -> None:
pass
def f2(name: str, verbose: bool = False) -> None:
pass
def f3(name: str, verbose: bool = False) -> None:
pass
if __name__ == '__main__':
glacier({
'run': f1,
'build': f2,
'test': f3,
})... | [
"glacier.glacier"
] | [((244, 289), 'glacier.glacier', 'glacier', (["{'run': f1, 'build': f2, 'test': f3}"], {}), "({'run': f1, 'build': f2, 'test': f3})\n", (251, 289), False, 'from glacier import glacier\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import mysql.connector
import datetime
#search 5 comment for a moment
#return 0 for error
def searchComment(momentId, startPoint, addPoint, cnx):
commentQuery = 'SELECT * FROM moment_comment WHERE moment_id = %s ORDER BY comment_id DESC LIMIT %s, 5'
pin = startPoint * ... | [
"datetime.datetime.now"
] | [((806, 829), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (827, 829), False, 'import datetime\n')] |
import argparse
import pickle
from pathlib import Path
import pandas as pd
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score, confusion_matrix, classification_report
from sklearn.model_selection import train_test_split
parser = argparse.ArgumentParser(description='Process ... | [
"argparse.ArgumentParser",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.metrics.classification_report",
"pathlib.Path",
"sklearn.linear_model.LogisticRegression",
"pandas.DataFrame",
"sklearn.metrics.accuracy_score",
"sklearn.metrics.confusion_matrix"
] | [((275, 336), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process some integers."""'}), "(description='Process some integers.')\n", (298, 336), False, 'import argparse\n'), ((994, 1033), 'pandas.read_csv', 'pd.read_csv', (['args.input_train_data_path'], {}), '(args.input_train_data_pa... |
import wave
import pyaudio
FORMAT = pyaudio.paInt16
CHANNELS = 1
RATE = 16000
RECORD_SECONDS = 10
CHUNK = 16000
WAV_OUTPUT_FILENAME = "audioFile2.wav"
EXTERNAL_MIC_NAME = 'USB audio CODEC: Audio (hw:1,0)'
mic_index = None
audio = pyaudio.PyAudio()
info = audio.get_host_api_info_by_index(0)
numdevices = info.get('dev... | [
"wave.open",
"pyaudio.PyAudio"
] | [((233, 250), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ([], {}), '()\n', (248, 250), False, 'import pyaudio\n'), ((1096, 1132), 'wave.open', 'wave.open', (['WAV_OUTPUT_FILENAME', '"""wb"""'], {}), "(WAV_OUTPUT_FILENAME, 'wb')\n", (1105, 1132), False, 'import wave\n')] |
"""
Test script for data.py classes.
"""
import os
import numpy as np
import pytest
from bilby.core.prior import PriorDict, Uniform
from cwinpy import HeterodynedData, MultiHeterodynedData, TargetedPulsarLikelihood
class TestTargetedPulsarLikelhood(object):
"""
Tests for the TargetedPulsarLikelihood class.
... | [
"numpy.random.normal",
"cwinpy.HeterodynedData",
"numpy.ones",
"cwinpy.MultiHeterodynedData",
"cwinpy.TargetedPulsarLikelihood",
"numpy.linspace",
"pytest.raises",
"bilby.core.prior.PriorDict",
"bilby.core.prior.Uniform",
"os.remove"
] | [((372, 417), 'numpy.linspace', 'np.linspace', (['(1000000000.0)', '(1000086340.0)', '(1440)'], {}), '(1000000000.0, 1000086340.0, 1440)\n', (383, 417), True, 'import numpy as np\n'), ((429, 473), 'numpy.random.normal', 'np.random.normal', (['(0.0)', '(1e-25)'], {'size': '(1440, 2)'}), '(0.0, 1e-25, size=(1440, 2))\n',... |
import pickle
import torch
import numpy as np
from transformers import RobertaModel, RobertaTokenizerFast
from retriever_train.dataset_config import DATASET_CONFIG, BASE_CONFIG
from retriever_train.data_utils import Instance
from retriever_train.utils import init_parent_model
class PrefixSuffixWrapper(object):
... | [
"retriever_train.utils.init_parent_model",
"torch.tensor",
"retriever_train.data_utils.Instance",
"torch.cuda.current_device"
] | [((666, 693), 'torch.cuda.current_device', 'torch.cuda.current_device', ([], {}), '()\n', (691, 693), False, 'import torch\n'), ((1902, 1951), 'retriever_train.data_utils.Instance', 'Instance', (['self.args', 'self.config', 'all_context_ids'], {}), '(self.args, self.config, all_context_ids)\n', (1910, 1951), False, 'fr... |
#! /usr/bin/env python3
import os
from random import choice
import filetype
from telegram.ext import Updater, CommandHandler
from tinydb import TinyDB, where
db = TinyDB('db.json')
admins_table = db.table("admins")
given_questions = db.table("given")
quiet_categories = ['meme']
def register_admin(update, context):... | [
"os.listdir",
"tinydb.TinyDB",
"filetype.is_image",
"os.getenv",
"tinydb.where",
"telegram.ext.CommandHandler",
"telegram.ext.Updater"
] | [((165, 182), 'tinydb.TinyDB', 'TinyDB', (['"""db.json"""'], {}), "('db.json')\n", (171, 182), False, 'from tinydb import TinyDB, where\n'), ((2088, 2119), 'os.getenv', 'os.getenv', (['"""TELEGRAM_BOT_TOKEN"""'], {}), "('TELEGRAM_BOT_TOKEN')\n", (2097, 2119), False, 'import os\n'), ((2220, 2260), 'os.getenv', 'os.geten... |
###############################################################################
# mockDensData.py: generate mock data following a given density
###############################################################################
import os, os.path
import pickle
import multiprocessing
from optparse import OptionParser
import... | [
"mwdust.Drimmel03",
"define_rcsample.get_rcsample",
"densprofiles.logit",
"numpy.log",
"multiprocessing.cpu_count",
"numpy.array",
"mwdust.Green15",
"os.path.exists",
"numpy.linspace",
"numpy.nanmax",
"numpy.amax",
"numpy.amin",
"mwdust.Marshall06",
"pickle.load",
"galpy.util.bovy_coords... | [((1685, 1714), 'fitDens._setup_densfunc', 'fitDens._setup_densfunc', (['type'], {}), '(type)\n', (1708, 1714), False, 'import fitDens\n'), ((2767, 2793), 'os.path.exists', 'os.path.exists', (['selectFile'], {}), '(selectFile)\n', (2781, 2793), False, 'import os, os.path\n'), ((2999, 3029), 'numpy.linspace', 'numpy.lin... |
#!/usr/bin/env python
# coding=utf-8
import eventlet
# BGPSpeaker needs sockets patched -> breaks SRL registration if done too late
# eventlet.monkey_patch( socket=True, select=True ) # adding only ( socket=True ) allows SRL, but then BGP doesn't work :(
eventlet.monkey_patch() # need thread too
# Google core librar... | [
"socket.sendall",
"pygnmi.client.gNMIclient",
"traceback.format_tb",
"logging.debug",
"telemetry_service_pb2_grpc.SdkMgrTelemetryServiceStub",
"eventlet.monkey_patch",
"sys.exit",
"bcc.BPF.attach_raw_socket",
"os.read",
"logging.info",
"logging.error",
"sdk_service_pb2.AgentRegistrationRequest... | [((257, 280), 'eventlet.monkey_patch', 'eventlet.monkey_patch', ([], {}), '()\n', (278, 280), False, 'import eventlet\n'), ((428, 457), 'grpc.experimental.eventlet.init_eventlet', 'grpc_eventlet.init_eventlet', ([], {}), '()\n', (455, 457), True, 'from grpc.experimental import eventlet as grpc_eventlet\n'), ((2608, 264... |
"""
My commenly used Modules:
(1) get_directory_name(caption) -- browses for directory name
(2) get_file_name(caption) -- browses for a specific file
(3) save_file_name(caption) -- saves named file to selected directory
(4) get_exif(fn) -- gets inag exif data
(5) get_gps_deg(exf) -- extracts GPS data from exi... | [
"tkinter.filedialog.askdirectory",
"PIL.Image.open",
"math.tan",
"DateTime.DateTime",
"math.asin",
"tkinter.filedialog.asksaveasfilename",
"PIL.ExifTags.TAGS.get",
"DateTime.DateTime.hour",
"math.cos",
"DateTime.DateTime.second",
"tkinter.Tk",
"DateTime.DateTime.minute",
"math.atan2",
"tki... | [((992, 1004), 'tkinter.Tk', 'tkinter.Tk', ([], {}), '()\n', (1002, 1004), False, 'import tkinter\n'), ((1186, 1261), 'tkinter.filedialog.askdirectory', 'tkinter.filedialog.askdirectory', ([], {'parent': 'root', 'initialdir': '"""/"""', 'title': 'caption'}), "(parent=root, initialdir='/', title=caption)\n", (1217, 1261... |
import os
import numpy as np
import pandas as pd
import yaml
from . import model as model_lib
from . import training, tensorize, io_local
def main():
#Turn off warnings:
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
###Load training data - Put the path to your own data here
training_data_path = "/root/trai... | [
"numpy.stack",
"numpy.sqrt",
"pandas.read_csv",
"yaml.dump"
] | [((368, 399), 'pandas.read_csv', 'pd.read_csv', (['training_data_path'], {}), '(training_data_path)\n', (379, 399), True, 'import pandas as pd\n'), ((2552, 2594), 'numpy.stack', 'np.stack', (['training_df.relative_intensities'], {}), '(training_df.relative_intensities)\n', (2560, 2594), True, 'import numpy as np\n'), (... |
from secml.testing import CUnitTest
from secml.core.attr_utils import extract_attr
class TestAttributeUtilities(CUnitTest):
"""Unit test for secml.core.attr_utils."""
def test_extract_attr(self):
# Toy class for testing
class Foo:
def __init__(self):
self.a = 5
... | [
"secml.core.attr_utils.extract_attr",
"secml.testing.CUnitTest.main"
] | [((1480, 1496), 'secml.testing.CUnitTest.main', 'CUnitTest.main', ([], {}), '()\n', (1494, 1496), False, 'from secml.testing import CUnitTest\n'), ((834, 855), 'secml.core.attr_utils.extract_attr', 'extract_attr', (['f', 'code'], {}), '(f, code)\n', (846, 855), False, 'from secml.core.attr_utils import extract_attr\n')... |
from time import perf_counter
import numpy as np
from sklearn.model_selection import RepeatedStratifiedKFold
from sklearn.metrics import *
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.decomposition import PCA
from sklearn.manifold import TSNE
import matplotlib.pyplot as pl... | [
"sklearn.metrics.f1_score",
"numpy.load",
"numpy.random.seed",
"hpsklearn.random_forest"
] | [((545, 563), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (559, 563), True, 'import numpy as np\n'), ((578, 605), 'numpy.load', 'np.load', (['"""data/train_X.npy"""'], {}), "('data/train_X.npy')\n", (585, 605), True, 'import numpy as np\n'), ((619, 645), 'numpy.load', 'np.load', (['"""data/test_X.n... |
import os
import sys
import shutil
import re
def print_m(msg):
msg_color = '\033[0m'
sys.stdout.write(f'{msg_color}{msg}{msg_color}\n')
def print_w(warning):
warn_color = '\033[93m'
msg_color = '\033[0m'
sys.stdout.write(f'{warn_color}Warning: {warning} {msg_color}\n')
def print_e(error):
... | [
"os.path.exists",
"re.split",
"os.listdir",
"os.makedirs",
"os.path.splitext",
"os.path.join",
"os.path.isfile",
"shutil.rmtree",
"sys.stdout.write"
] | [((95, 145), 'sys.stdout.write', 'sys.stdout.write', (['f"""{msg_color}{msg}{msg_color}\n"""'], {}), "(f'{msg_color}{msg}{msg_color}\\n')\n", (111, 145), False, 'import sys\n'), ((228, 293), 'sys.stdout.write', 'sys.stdout.write', (['f"""{warn_color}Warning: {warning} {msg_color}\n"""'], {}), "(f'{warn_color}Warning: {... |
import time
import base64
import hashlib
import hmac
from requests.auth import AuthBase
class CoinbaseProAuth(AuthBase):
"""Request authorization.
Provided by Coinbase Pro:
https://docs.pro.coinbase.com/?python#signing-a-message
"""
def __init__(self, api_key, secret_key, passphrase):
s... | [
"hmac.new",
"time.time",
"base64.b64decode"
] | [((635, 668), 'base64.b64decode', 'base64.b64decode', (['self.secret_key'], {}), '(self.secret_key)\n', (651, 668), False, 'import base64\n'), ((689, 732), 'hmac.new', 'hmac.new', (['hmac_key', 'message', 'hashlib.sha256'], {}), '(hmac_key, message, hashlib.sha256)\n', (697, 732), False, 'import hmac\n'), ((474, 485), ... |
from django.conf import settings
from django.db import models
from django.utils import timezone
class Post(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField... | [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.FileField",
"django.db.models.BooleanField",
"django.utils.timezone.now",
"django.db.models.ImageField",
"django.db.models.BigIntegerField",
"django.db.models.DateTimeField",
"django.... | [((136, 205), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE'}), '(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)\n', (153, 205), False, 'from django.db import models\n'), ((218, 250), 'django.db.models.CharField', 'models.CharField', ([], {'max_len... |
import socket
import threading
import sys
import subprocess
from colorama import init
from colorama import Fore, Back, Style
init()
def sending(server):
while True:
user = subprocess.run(['whoami'], stdout=subprocess.PIPE).stdout.decode('utf-8').strip().split('\\')[-1]
print(Fore.GREEN + user + ":... | [
"socket.socket",
"subprocess.run",
"sys.exit",
"threading.Thread",
"colorama.init"
] | [((125, 131), 'colorama.init', 'init', ([], {}), '()\n', (129, 131), False, 'from colorama import init\n'), ((787, 802), 'socket.socket', 'socket.socket', ([], {}), '()\n', (800, 802), False, 'import socket\n'), ((1003, 1052), 'threading.Thread', 'threading.Thread', ([], {'target': 'listening', 'args': '[server]'}), '(... |
#!/usr/bin/env python
# coding: utf-8
import random
import matplotlib.pyplot as plt
cities = ['A', 'B', 'C', 'E', 'M', 'S']
# Represent the Graph as a dictionary of dictionaries
A = {'A':0, 'B':10, 'C':15, 'E':14, 'M':11, 'S':10}
B = {'A':10, 'B':0, 'C':8, 'E':13 ,'M':15 ,'S':9}
C = {'A':15, 'B':8, 'C':0, 'E':11,'M'... | [
"random.sample",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.title",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((2439, 2463), 'random.sample', 'random.sample', (['cities', '(6)'], {}), '(cities, 6)\n', (2452, 2463), False, 'import random\n'), ((4586, 4624), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_fsol', '"""b"""'], {'label': '"""fSol"""'}), "(x, y_fsol, 'b', label='fSol')\n", (4594, 4624), True, 'import matplotlib.pypl... |
from django.contrib import admin
from . import models
class SightingAdmin(admin.ModelAdmin):
list_display = ('superhero', 'power', 'location', 'sighted_on')
date_hierarchy = 'sighted_on'
search_fields = ['superhero']
ordering = ['superhero']
admin.site.register(models.Origin)
admin.site.register(mod... | [
"django.contrib.admin.site.register"
] | [((262, 296), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Origin'], {}), '(models.Origin)\n', (281, 296), False, 'from django.contrib import admin\n'), ((297, 333), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Location'], {}), '(models.Location)\n', (316, 333), False... |
print ('mp4を動画と音声に分割する')
import os
import datetime
import ffmpeg
from pydub import AudioSegment
from ConfigX import ConfigX
# 文字列を右側から印文字を検索し、右側の文字を切り出す
# @param string s 対象文字列
# @param $mark 印文字
# @return 印文字から右側の文字列
def stringRightRev(s, mark):
a =s.rfind(mark)
res = s[a+len(mark):]
return res
# 文字列を... | [
"os.path.exists",
"ffmpeg.output",
"ffmpeg.input",
"ffmpeg.run",
"datetime.datetime.now",
"ffmpeg.probe",
"pydub.AudioSegment.from_file",
"pydub.AudioSegment.silent",
"ConfigX.ConfigX",
"os.remove"
] | [((1905, 1928), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1926, 1928), False, 'import datetime\n'), ((1964, 1973), 'ConfigX.ConfigX', 'ConfigX', ([], {}), '()\n', (1971, 1973), False, 'from ConfigX import ConfigX\n'), ((2645, 2671), 'ffmpeg.probe', 'ffmpeg.probe', (['input_mp4_fp'], {}), '(in... |
"""
Module:
unicon.plugins.nd
Authors:
<NAME> (<EMAIL>)
Description:
This subpackage implements ND
"""
# from unicon.plugins.linux import LinuxConnection
from unicon.plugins.linux import LinuxConnection,LinuxServiceList
from unicon.plugins.linux.statemachine import LinuxStateMachine
from unicon.plugins.l... | [
"unicon.plugins.linux.settings.LinuxSettings"
] | [((908, 923), 'unicon.plugins.linux.settings.LinuxSettings', 'LinuxSettings', ([], {}), '()\n', (921, 923), False, 'from unicon.plugins.linux.settings import LinuxSettings\n')] |
"""Test responses from Denon/Marantz."""
from pyavreceiver.denon.response import DenonMessage
def test_separate(message_none):
"""Test separation of messages."""
assert message_none.separate("PWON") == ("PW", None, "ON")
assert message_none.separate("PWSTANDBY") == ("PW", None, "STANDBY")
assert mess... | [
"pyavreceiver.denon.response.DenonMessage"
] | [((4813, 4847), 'pyavreceiver.denon.response.DenonMessage', 'DenonMessage', (['"""PWON"""', 'command_dict'], {}), "('PWON', command_dict)\n", (4825, 4847), False, 'from pyavreceiver.denon.response import DenonMessage\n'), ((4993, 5027), 'pyavreceiver.denon.response.DenonMessage', 'DenonMessage', (['"""MV75"""', 'comman... |
import numpy as np
from scipy.spatial.distance import pdist, squareform
from scipy import exp
from scipy.linalg import eigh
def rbf_kernel_pca(X, gamma, n_components):
"""
RBF kernel PCA implementation.
Parameters
------------
X: {NumPy ndarray}, shape = [n_samples, n_features]
gamma: float
... | [
"scipy.linalg.eigh",
"scipy.spatial.distance.squareform",
"numpy.sqrt",
"numpy.ones",
"scipy.exp",
"scipy.spatial.distance.pdist",
"numpy.dot"
] | [((660, 683), 'scipy.spatial.distance.pdist', 'pdist', (['X', '"""sqeuclidean"""'], {}), "(X, 'sqeuclidean')\n", (665, 683), False, 'from scipy.spatial.distance import pdist, squareform\n'), ((759, 779), 'scipy.spatial.distance.squareform', 'squareform', (['sq_dists'], {}), '(sq_dists)\n', (769, 779), False, 'from scip... |
# Generated by Django 3.0.4 on 2020-03-26 14:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('business', '0030_auto_20200326_1533'),
]
operations = [
migrations.AddField(
model_name='request',
name='lang',
... | [
"django.db.models.CharField"
] | [((334, 378), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""en"""', 'max_length': '(2)'}), "(default='en', max_length=2)\n", (350, 378), False, 'from django.db import migrations, models\n')] |
"""The test for the sensibo update platform."""
from __future__ import annotations
from datetime import timedelta
from unittest.mock import patch
from pysensibo.model import SensiboData
from pytest import MonkeyPatch
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_OFF, STAT... | [
"homeassistant.util.dt.utcnow",
"datetime.timedelta",
"unittest.mock.patch"
] | [((1097, 1220), 'unittest.mock.patch', 'patch', (['"""homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data"""'], {'return_value': 'get_data'}), "(\n 'homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data'\n , return_value=get_data)\n", (1102, 1220), False, ... |
import subprocess
from flask import Flask
from os import environ
BOT_START_FILE = 'run_bot.py'
# for start PYTHON_PROCESS = 'python3'
# for testing
PYTHON_PROCESS = r"C:\Python3.7\python.exe"
app = Flask(__name__)
@app.route("/", methods=["GET"])
def index():
return "Bot is On"
print(f"Running {BOT_START_FIL... | [
"subprocess.Popen",
"flask.Flask"
] | [((201, 216), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (206, 216), False, 'from flask import Flask\n'), ((339, 389), 'subprocess.Popen', 'subprocess.Popen', (['[PYTHON_PROCESS, BOT_START_FILE]'], {}), '([PYTHON_PROCESS, BOT_START_FILE])\n', (355, 389), False, 'import subprocess\n')] |
__all__ = [
"BoltQuoteHelper",
"rewrite_traceback",
"fake_traceback",
"internal",
"INTERNAL_CODE",
"SAFE_BUILTINS",
]
from bisect import bisect
from dataclasses import dataclass, field
from types import CodeType, TracebackType
from typing import Dict, List, Set, TypeVar
from mecha.utils impor... | [
"bisect.bisect",
"dataclasses.field",
"typing.TypeVar"
] | [((368, 380), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (375, 380), False, 'from typing import Dict, List, Set, TypeVar\n'), ((1271, 1375), 'dataclasses.field', 'field', ([], {'default_factory': "(lambda : {'\\\\\\\\': '\\\\', '\\\\f': '\\x0c', '\\\\n': '\\n', '\\\\r': '\\r', '\\\\t': '\\t'})"}), "(def... |
#!/bin/python
import os
import js2py
from pathlib import Path
from configStrategies import cS
from configIndicators import cI
class _settings:
def __init__(self, **entries):
'''
print(entries)
def iterate(self, DATA):
for W in DATA.keys():
if type(DATA[W]) == ... | [
"pathlib.Path.home",
"js2py.eval_js"
] | [((634, 645), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (643, 645), False, 'from pathlib import Path\n'), ((4901, 4920), 'js2py.eval_js', 'js2py.eval_js', (['text'], {}), '(text)\n', (4914, 4920), False, 'import js2py\n')] |
import sys
import hypothesis.strategies as st
from hypothesis import given
def is_pytest():
return "pytest" in sys.modules
def pytest_configure(config):
# Workaround for Hypothesis bug causing flaky tests if they use characters
# or text: https://github.com/HypothesisWorks/hypothesis/issues/2108
@gi... | [
"hypothesis.strategies.text"
] | [((324, 333), 'hypothesis.strategies.text', 'st.text', ([], {}), '()\n', (331, 333), True, 'import hypothesis.strategies as st\n')] |
#!/usr/bin/env python3
#Basic Diffie Hellman
import random, numpy, sys, argparse
if "-h" in sys.argv or "--help" in sys.argv:
print("Usage:")
print("./Diffie_Hellman.py")
print("./Diffie_Hellman.py XOR")
print()
print("XOR option replaces (mod) with ^, and outputs how often it works.")
exit()... | [
"random.choice",
"random.randrange"
] | [((663, 689), 'random.choice', 'random.choice', (['LargePrimes'], {}), '(LargePrimes)\n', (676, 689), False, 'import random, numpy, sys, argparse\n'), ((809, 835), 'random.randrange', 'random.randrange', (['(3)', '(p - 1)'], {}), '(3, p - 1)\n', (825, 835), False, 'import random, numpy, sys, argparse\n')] |
from collections.abc import Container, Sequence
from ipaddress import (IPv4Address, IPv4Network, IPv6Address, IPv6Network,
ip_address, ip_network)
from .exceptions import IncorrectIPCount, UntrustedIP
MSG = ("Trusted list should be a sequence of sets "
"with either addresses or networks.... | [
"ipaddress.ip_network",
"ipaddress.ip_address"
] | [((1141, 1157), 'ipaddress.ip_address', 'ip_address', (['item'], {}), '(item)\n', (1151, 1157), False, 'from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network, ip_address, ip_network\n'), ((1259, 1275), 'ipaddress.ip_network', 'ip_network', (['item'], {}), '(item)\n', (1269, 1275), False, 'from ipaddr... |
from django.db.models.signals import post_save
from django.dispatch import receiver
from communique.utils.utils_signals import generate_notifications
from user.models import NotificationRegistration
from .models import Program
@receiver(post_save, sender=Program)
def post_program_save_callback(sender, **kwargs):
... | [
"django.dispatch.receiver",
"communique.utils.utils_signals.generate_notifications"
] | [((231, 266), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'Program'}), '(post_save, sender=Program)\n', (239, 266), False, 'from django.dispatch import receiver\n'), ((452, 517), 'communique.utils.utils_signals.generate_notifications', 'generate_notifications', (['NotificationRegistration.PROGRAM... |
import os
from smsapi.client import SmsApiPlClient
access_token = os.getenv('SMSAPI_ACCESS_TOKEN')
client = SmsApiPlClient(access_token=access_token)
def send_push():
r = client.push.send(app_id='app id', alert='push notification text')
print(r.id, r.date_created, r.scheduled_date, r.summary.points, r.... | [
"smsapi.client.SmsApiPlClient",
"os.getenv"
] | [((70, 102), 'os.getenv', 'os.getenv', (['"""SMSAPI_ACCESS_TOKEN"""'], {}), "('SMSAPI_ACCESS_TOKEN')\n", (79, 102), False, 'import os\n'), ((114, 155), 'smsapi.client.SmsApiPlClient', 'SmsApiPlClient', ([], {'access_token': 'access_token'}), '(access_token=access_token)\n', (128, 155), False, 'from smsapi.client import... |
from collections import deque
import string
from torch.utils.data import DataLoader
import torch
import torch.nn as nn
import torch.optim as optim
from datasets import TextDataset
from model import LSTMAE
from generate import generate
import numpy as np
data_root = 'Data'
max_length = 50
batch_size = 50
num_epochs = 1... | [
"collections.deque",
"torch.nn.CrossEntropyLoss",
"model.LSTMAE",
"torch.utils.data.DataLoader",
"datasets.TextDataset"
] | [((377, 411), 'datasets.TextDataset', 'TextDataset', (['data_root', 'max_length'], {}), '(data_root, max_length)\n', (388, 411), False, 'from datasets import TextDataset\n'), ((425, 481), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'shuffle': '(True)'}), '(dataset, batch_size... |
# %%
import timeit
import tqdm
from os import path
import inspect
import numpy as np
import dill
import init
import fastg3.crisp as g3crisp
from plot_utils import plot_bench
from constants import N_REPEATS, N_STEPS, DILL_FOLDER
from number_utils import format_number
from dataset_utils import AVAILABLE_DATASETS, load_d... | [
"init.gen_sampling_benchmark",
"number_utils.format_number",
"inspect.stack",
"tqdm.tqdm",
"os.path.join",
"os.path.isfile",
"dataset_utils.load_dataset",
"timeit.timeit",
"plot_utils.plot_bench",
"numpy.arange",
"init.gen_time_benchmark"
] | [((718, 743), 'init.gen_time_benchmark', 'init.gen_time_benchmark', ([], {}), '()\n', (741, 743), False, 'import init\n'), ((757, 780), 'tqdm.tqdm', 'tqdm.tqdm', (['frac_samples'], {}), '(frac_samples)\n', (766, 780), False, 'import tqdm\n'), ((1367, 1396), 'init.gen_sampling_benchmark', 'init.gen_sampling_benchmark', ... |
from __future__ import unicode_literals
import frappe
from frappe import msgprint, _
import datetime
from frappe.utils import flt
from erpnext.accounts.utils import get_balance_on
from frappe.utils import (flt, getdate, get_url, now,
nowtime, get_time, today, get_datetime, add_days)
def execute(filters=None):
col... | [
"datetime.timedelta",
"frappe.db.sql",
"frappe.utils.getdate",
"frappe._"
] | [((1963, 2512), 'frappe.db.sql', 'frappe.db.sql', (['"""\n SELECT\n `tabAccount`.name as account\n FROM\n `tabAccount`\n WHERE\n `tabAccount`.disabled = 0\n and `tabAccount`.is_group = 0\n and `tabAccount`.accoun... |
"""
Contabo API
The version of the OpenAPI document: 1.0.0
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
import unittest
import pfruck_contabo
from pfruck_contabo.api.instance_actions_audits_api import InstanceActionsAuditsApi # noqa: E501
class TestInstanceActionsAuditsApi(u... | [
"unittest.main",
"pfruck_contabo.api.instance_actions_audits_api.InstanceActionsAuditsApi"
] | [((777, 792), 'unittest.main', 'unittest.main', ([], {}), '()\n', (790, 792), False, 'import unittest\n'), ((431, 457), 'pfruck_contabo.api.instance_actions_audits_api.InstanceActionsAuditsApi', 'InstanceActionsAuditsApi', ([], {}), '()\n', (455, 457), False, 'from pfruck_contabo.api.instance_actions_audits_api import ... |
import prime
MAX = 28124
prime._refresh(MAX/2)
abundants = [n for n in range(1, MAX) if sum(prime.all_factors(n)) > n+n]
abundants_dict = dict.fromkeys(abundants, 1)
total = 0
for n in range(1, MAX):
sum_of_abundants = 0
for a in abundants:
if a > n: break
if abundants_dict.get(n - a):... | [
"prime._refresh",
"prime.all_factors"
] | [((27, 50), 'prime._refresh', 'prime._refresh', (['(MAX / 2)'], {}), '(MAX / 2)\n', (41, 50), False, 'import prime\n'), ((95, 115), 'prime.all_factors', 'prime.all_factors', (['n'], {}), '(n)\n', (112, 115), False, 'import prime\n')] |
# We often don't use all members of all the pyuv callbacks
# pylint: disable=unused-argument
import sys, hashlib
import logging
import os
import pickle
import signal
import argparse
import re
import pyuv
from ..__main__ import getObjectFileHash
class HashCache:
def __init__(self, loop, excludePatterns, disableWat... | [
"logging.basicConfig",
"pyuv.fs.FSEvent",
"logging.debug",
"argparse.ArgumentParser",
"pickle.dumps",
"pyuv.Signal",
"os.path.join",
"pyuv.Pipe",
"pyuv.Loop.default_loop",
"sys.exit",
"os.stat",
"os.path.normcase",
"logging.info",
"re.search"
] | [((7319, 7365), 'logging.info', 'logging.info', (['"""Ctrl+C detected, shutting down"""'], {}), "('Ctrl+C detected, shutting down')\n", (7331, 7365), False, 'import logging\n'), ((7429, 7473), 'logging.info', 'logging.info', (['"""Server was killed by SIGTERM"""'], {}), "('Server was killed by SIGTERM')\n", (7441, 7473... |
from dataclasses import dataclass
from pathlib import PurePath
from typing import Optional
@dataclass(frozen=True)
class EnvironmentSettings:
working_dir: PurePath
project_dir: Optional[PurePath]
app_dir: PurePath
cache_dir: PurePath
catalog_url: str
| [
"dataclasses.dataclass"
] | [((94, 116), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (103, 116), False, 'from dataclasses import dataclass\n')] |
#-*- coding: utf-8 -*-
import xlrd
import xlwt
import re
# 检查是否满足报考条件
def check(row_value):
zy = row_value[11]
if not checkZY(zy):
return False
xw = row_value[13]
if not checkXW(xw):
return False
if checkSpecial(row_value):
return False
return True
# 检查是否满足专业要求
def ... | [
"re.search",
"xlrd.open_workbook",
"xlwt.Workbook",
"re.compile"
] | [((346, 374), 're.compile', 're.compile', (['u"""不限|限制|生物工程|化工"""'], {}), "(u'不限|限制|生物工程|化工')\n", (356, 374), False, 'import re\n'), ((382, 403), 're.search', 're.search', (['pat', 'value'], {}), '(pat, value)\n', (391, 403), False, 'import re\n'), ((487, 508), 're.compile', 're.compile', (['u"""学士|不|无"""'], {}), "(u'学... |
from typing import List, Optional
from athenian.api.models.web.base_model_ import AllOf, Model
from athenian.api.models.web.jira_epic_issue_common import JIRAEpicIssueCommon
from athenian.api.models.web.pull_request import PullRequest
class _JIRAIssueSpecials(Model):
"""Details specific to JIRA issues."""
o... | [
"athenian.api.models.web.base_model_.AllOf"
] | [((2827, 2913), 'athenian.api.models.web.base_model_.AllOf', 'AllOf', (['JIRAEpicIssueCommon', '_JIRAIssueSpecials'], {'name': '"""JIRAIssue"""', 'module': '__name__'}), "(JIRAEpicIssueCommon, _JIRAIssueSpecials, name='JIRAIssue', module=\n __name__)\n", (2832, 2913), False, 'from athenian.api.models.web.base_model_... |
'''
##vpc-isolate
What it does: turn off dns resource
change network acl to new empty one with deny all
add iam policy, to all users in the account, which limits vpc use: ec2 and sg use in the vpc
Usage: AUTO: vpc_isolate
Limitation: None
'''
import boto3
from botocore.exception... | [
"json.dumps"
] | [((3585, 3608), 'json.dumps', 'json.dumps', (['deny_policy'], {}), '(deny_policy)\n', (3595, 3608), False, 'import json\n')] |