code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
from django.shortcuts import render, redirect from django.db.models import F from pathlib import Path import os from HAProxyManager import settings from Users import models as user_models from django.http import HttpResponseRedirect from django.contrib.auth.models import User as system_users # Create your views here....
[ "django.shortcuts.render", "Users.models.User.objects.get", "django.db.models.F", "Users.models.User.objects.create", "django.contrib.auth.models.User.objects.get", "django.contrib.auth.models.User.objects.create_user" ]
[((570, 613), 'django.shortcuts.render', 'render', (['request', '"""Users/main.html"""', 'context'], {}), "(request, 'Users/main.html', context)\n", (576, 613), False, 'from django.shortcuts import render, redirect\n'), ((678, 721), 'django.contrib.auth.models.User.objects.get', 'system_users.objects.get', ([], {'usern...
from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('annotation', '0017_auto_20160516_0030'), ] operations = [ migrations.RunSQL(''' -- migrate initial observations to an event insert into anno...
[ "django.db.migrations.RunSQL" ]
[((222, 1238), 'django.db.migrations.RunSQL', 'migrations.RunSQL', (['"""\n -- migrate initial observations to an event\n insert into annotation_event\n (\n create_datetime,\n last_modified_datetime,\n observation_id,\n event_time,\n ...
# -*- coding: utf-8 -*- """Classes for validating data passed to views.""" from __future__ import unicode_literals import copy import jsonschema from jsonschema.exceptions import best_match class ValidationError(Exception): pass class JSONSchema(object): """ Validate data according to a Draft 4 JSON S...
[ "jsonschema.Draft4Validator", "jsonschema.FormatChecker", "copy.deepcopy" ]
[((502, 528), 'jsonschema.FormatChecker', 'jsonschema.FormatChecker', ([], {}), '()\n', (526, 528), False, 'import jsonschema\n'), ((554, 624), 'jsonschema.Draft4Validator', 'jsonschema.Draft4Validator', (['self.schema'], {'format_checker': 'format_checker'}), '(self.schema, format_checker=format_checker)\n', (580, 624...
"""cv URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/3.1/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based view...
[ "django.urls.path" ]
[((1469, 1514), 'django.urls.path', 'path', (['"""dl/<str:filepath>/"""', 'views.download_cv'], {}), "('dl/<str:filepath>/', views.download_cv)\n", (1473, 1514), False, 'from django.urls import path, include\n'), ((1597, 1667), 'django.urls.path', 'path', (['"""my_messages/<pk>/read"""', 'views.message_read'], {'name':...
#!/usr/bin/python3.6 import os, subprocess, json, argparse def Parser(): parser = argparse.ArgumentParser(description='Process metrix from system') parser.add_argument('-C', metavar='Command', type=str, help='Command which process started, delimiter is ":::", e.g. command:command', required=True) return p...
[ "subprocess.run", "json.dumps", "argparse.ArgumentParser" ]
[((88, 153), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process metrix from system"""'}), "(description='Process metrix from system')\n", (111, 153), False, 'import os, subprocess, json, argparse\n'), ((618, 636), 'json.dumps', 'json.dumps', (['proces'], {}), '(proces)\n', (628, 636)...
# Generated by Django 2.0.13 on 2019-05-06 17:02 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('manufacturer', '0001_move_manufacturer_and_rename'), ] operations = [ migrations.AddField( model_name='manufacturer', ...
[ "django.db.models.URLField" ]
[((357, 388), 'django.db.models.URLField', 'models.URLField', ([], {'default': '"""none"""'}), "(default='none')\n", (372, 388), False, 'from django.db import migrations, models\n')]
from math import ceil from objects import * global VERBOSE_OUT VERBOSE_OUT = False # Converts a series of bytes from a list into a String by interpreting them as ASCII values def asciiBytesToString(headerBytes, byteStart, byteEnd): string = "" for i in range(byteStart, byteEnd): string += chr(header...
[ "math.ceil" ]
[((11606, 11630), 'math.ceil', 'ceil', (['(givenCount / 86400)'], {}), '(givenCount / 86400)\n', (11610, 11630), False, 'from math import ceil\n')]
import responses from urllib.parse import urlencode from tests.util import random_str from tests.util import mock_http_response from binance.futures import Futures as Client from binance.error import ParameterRequiredError, ClientError mock_item = {"key_1": "value_1", "key_2": "value_2"} mock_exception = {"code": -11...
[ "urllib.parse.urlencode", "tests.util.mock_http_response", "tests.util.random_str", "binance.futures.Futures" ]
[((372, 384), 'tests.util.random_str', 'random_str', ([], {}), '()\n', (382, 384), False, 'from tests.util import random_str\n'), ((394, 406), 'tests.util.random_str', 'random_str', ([], {}), '()\n', (404, 406), False, 'from tests.util import random_str\n'), ((600, 726), 'tests.util.mock_http_response', 'mock_http_resp...
# <https:// # Prints all built-in property names into a file from archicad import ACConnection conn = ACConnection.connect() assert conn acc = conn.commands built_ins = acc.GetAllPropertyNames() with open('built_ins_list.txt', 'w') as f: print(built_ins, file=f)
[ "archicad.ACConnection.connect" ]
[((104, 126), 'archicad.ACConnection.connect', 'ACConnection.connect', ([], {}), '()\n', (124, 126), False, 'from archicad import ACConnection\n')]
# really dumb example of using tree transformations w/asp import asp.codegen.ast_tools as ast_tools import asp.codegen.python_ast as ast import asp.codegen.cpp_ast as cpp #import asp.codegen.ast_explorer as ast_explorer class Converter(ast_tools.ConvertAST): pass class ArrayMap(object): def __init__(self): ...
[ "asp.codegen.ast_tools.parse_method", "asp.codegen.templating.template.Template", "asp.jit.asp_module.ASPModule" ]
[((413, 451), 'asp.codegen.ast_tools.parse_method', 'ast_tools.parse_method', (['self.operation'], {}), '(self.operation)\n', (435, 451), True, 'import asp.codegen.ast_tools as ast_tools\n'), ((665, 744), 'asp.codegen.templating.template.Template', 'template.Template', ([], {'filename': '"""templates/map_template.mako"...
# -*- coding: utf-8 -*- """ Created on Sat Apr 25 17:31:47 2020 @author: Gaurav """ import pandas as pd import numpy as np df=pd.read_csv("E:/Kaggel compitiion/House Prices Advanced Regression Techniques/After analizing/train.csv") df1=pd.read_csv("E:/Kaggel compitiion/House Prices Advanced Regression Techn...
[ "sklearn.model_selection.GridSearchCV", "sklearn.linear_model.Lasso", "pandas.read_csv", "xgboost.XGBRegressor", "pandas.DataFrame" ]
[((137, 252), 'pandas.read_csv', 'pd.read_csv', (['"""E:/Kaggel compitiion/House Prices Advanced Regression Techniques/After analizing/train.csv"""'], {}), "(\n 'E:/Kaggel compitiion/House Prices Advanced Regression Techniques/After analizing/train.csv'\n )\n", (148, 252), True, 'import pandas as pd\n'), ((248, 3...
import krippendorff import pandas as pd import numpy as np from . import utils def r_to_z(r): return np.arctanh(r) def z_to_r(z): return np.tanh(z) def confidence_interval(r, conf_level=95, stat=np.mean): z = r_to_z(r) ci = utils.bootstrap_ci(z, stat=stat, conf_level=conf_level) ci = z_to_r(c...
[ "numpy.arctanh", "pandas.Series", "numpy.tanh", "krippendorff.alpha" ]
[((107, 120), 'numpy.arctanh', 'np.arctanh', (['r'], {}), '(r)\n', (117, 120), True, 'import numpy as np\n'), ((149, 159), 'numpy.tanh', 'np.tanh', (['z'], {}), '(z)\n', (156, 159), True, 'import numpy as np\n'), ((334, 371), 'pandas.Series', 'pd.Series', (["{'lo': ci[0], 'hi': ci[1]}"], {}), "({'lo': ci[0], 'hi': ci[1...
from typing import Optional, Callable import os import torch from torch.utils.data import DataLoader from torch import Tensor from torch.optim.optimizer import Optimizer from torch.optim.lr_scheduler import _LRScheduler from overrides import overrides, EnforceOverrides from ..common.config import Config from ..commo...
[ "os.path.join" ]
[((1910, 1969), 'os.path.join', 'os.path.join', (['self._plotsdir', '"""EP{train_metrics.epoch:03d}"""'], {}), "(self._plotsdir, 'EP{train_metrics.epoch:03d}')\n", (1922, 1969), False, 'import os\n')]
# -*- coding: utf-8 -*- """Test suite for pytan3.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import pytest import pytan3 import re def test_encrypt_decrypt(): """Test encrypt / decrypt with valid key.""" ...
[ "pytan3.utils.crypt.decrypt", "re.match", "pytan3.utils.crypt.encrypt", "pytest.raises" ]
[((397, 443), 'pytan3.utils.crypt.encrypt', 'pytan3.utils.crypt.encrypt', ([], {'data': 'data', 'key': 'key'}), '(data=data, key=key)\n', (423, 443), False, 'import pytan3\n'), ((455, 488), 're.match', 're.match', (['"""\\\\d+\\\\$\\\\d+\\\\$"""', 'crypt'], {}), "('\\\\d+\\\\$\\\\d+\\\\$', crypt)\n", (463, 488), False,...
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import cv2 from PIL import Image targetImg = "test-img02.png" aruco = cv2.aruco dictionary = aruco.getPredefinedDictionary(aruco.DICT_4X4_50) outputImg = "edit01-" + targetImg[0:-4]+".png" def arReader( argTargetImg , argOutputImg ): img = cv2.imread( argTargetI...
[ "PIL.Image.fromarray", "cv2.imread", "cv2.cvtColor" ]
[((298, 322), 'cv2.imread', 'cv2.imread', (['argTargetImg'], {}), '(argTargetImg)\n', (308, 322), False, 'import cv2\n'), ((335, 371), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2RGB'], {}), '(img, cv2.COLOR_BGR2RGB)\n', (347, 371), False, 'import cv2\n'), ((554, 574), 'PIL.Image.fromarray', 'Image.fromarra...
import flask import config # The main flask app that will run the whole show flask_app: flask.Flask = flask.Flask(__name__) # Load the configuration from the Config class flask_app.config.from_object(config.LocalConfig)
[ "flask.Flask" ]
[((103, 124), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (114, 124), False, 'import flask\n')]
import re from haystack import indexes from genesets.models import Geneset try: from celery_haystack.indexes import CelerySearchIndex as SearchIndex except ImportError: from haystack.indexes import SearchIndex NONWORD = re.compile('\W+') class GenesetIndex(SearchIndex, indexes.Indexable): text = inde...
[ "haystack.indexes.CharField", "re.compile" ]
[((233, 251), 're.compile', 're.compile', (['"""\\\\W+"""'], {}), "('\\\\W+')\n", (243, 251), False, 'import re\n'), ((316, 367), 'haystack.indexes.CharField', 'indexes.CharField', ([], {'document': '(True)', 'use_template': '(True)'}), '(document=True, use_template=True)\n', (333, 367), False, 'from haystack import in...
#!/usr/bin/env python3 import http.server import socketserver from urllib.parse import parse_qsl as parse_query, urlparse as parse_url from abc import abstractmethod, ABC as abstractclass import json class ContentEncoder(abstractclass): @abstractmethod def get_type(): pass @abstractmethod def encode(self, conten...
[ "json.dumps", "socketserver.TCPServer", "urllib.parse.urlparse" ]
[((2026, 2071), 'socketserver.TCPServer', 'socketserver.TCPServer', (["('', port)", 'MyHandler'], {}), "(('', port), MyHandler)\n", (2048, 2071), False, 'import socketserver\n'), ((459, 478), 'json.dumps', 'json.dumps', (['content'], {}), '(content)\n', (469, 478), False, 'import json\n'), ((1647, 1667), 'urllib.parse....
import os import importlib, inspect import logging from ..Config import Config class Handler: _protocol = None _description = None _args = {} @classmethod def describe(cls): return { 'protocol': cls.protocol, 'description': cls.description, 'args': cls....
[ "os.listdir", "inspect.getmembers", "logging.debug", "importlib.import_module", "logging.exception", "logging.exeption", "os.path.abspath", "inspect.isclass", "logging.info" ]
[((1584, 1597), 'os.listdir', 'os.listdir', (['p'], {}), '(p)\n', (1594, 1597), False, 'import os\n'), ((1541, 1566), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1556, 1566), False, 'import os\n'), ((1985, 2011), 'inspect.getmembers', 'inspect.getmembers', (['module'], {}), '(module)\n', ...
import functools import inspect import pandas as pd import awkward1 as ak from .series import AwkwardSeries from .dtype import AwkardType funcs = [n for n in dir(ak) if inspect.isfunction(getattr(ak, n))] @pd.api.extensions.register_series_accessor("ak") class AwkwardAccessor: def __init__(self, pandas_obj): ...
[ "functools.wraps", "pandas.api.extensions.register_series_accessor", "awkward1.cartesian" ]
[((209, 257), 'pandas.api.extensions.register_series_accessor', 'pd.api.extensions.register_series_accessor', (['"""ak"""'], {}), "('ak')\n", (251, 257), True, 'import pandas as pd\n'), ((1758, 1779), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (1773, 1779), False, 'import functools\n'), ((1450, 1...
# -*- coding: utf-8 -*- # dcf # --- # A Python library for generating discounted cashflows. # # Author: sonntagsgesicht, based on a fork of Deutsche Postbank [pbrisk] # Version: 0.4, copyright Saturday, 10 October 2020 # Website: https://github.com/sonntagsgesicht/dcf # License: Apache License 2.0 (see LICENSE f...
[ "dcf.plans.annuity", "dcf.plans.amortize", "dcf.plans.same", "dcf.plans.bullet", "dcf.plans.outstanding" ]
[((651, 658), 'dcf.plans.same', 'same', (['n'], {}), '(n)\n', (655, 658), False, 'from dcf.plans import DEFAULT_AMOUNT, FIXED_RATE, same, bullet, amortize, annuity, consumer, outstanding\n'), ((832, 841), 'dcf.plans.bullet', 'bullet', (['n'], {}), '(n)\n', (838, 841), False, 'from dcf.plans import DEFAULT_AMOUNT, FIXED...
import numpy as np #we use numpy alot def main(): i = 0 #declare i = 0 n = 10 #declare n = 10 x = 119.0 #float x, these have a . #we can use numpy to quickly make arrays y = np.zeros(n, dtype=float) #declares 10 zeros #we can use for loops to iterate through a variable for i in range(n): #i in r...
[ "numpy.zeros" ]
[((189, 213), 'numpy.zeros', 'np.zeros', (['n'], {'dtype': 'float'}), '(n, dtype=float)\n', (197, 213), True, 'import numpy as np\n')]
# bot modules from bot.question.emails import EmailQuestion from bot.question.issues import IssueQuestion from bot.question.comments import CommentQuestion import bot.config as config # general python import re import nltk from nltk.tokenize import PunktSentenceTokenizer class QuestionDetector: """Utilizes regex...
[ "re.compile", "bot.question.emails.EmailQuestion", "bot.question.issues.IssueQuestion", "nltk.tokenize.punkt.PunktSentenceTokenizer", "bot.question.comments.CommentQuestion" ]
[((716, 760), 'nltk.tokenize.punkt.PunktSentenceTokenizer', 'nltk.tokenize.punkt.PunktSentenceTokenizer', ([], {}), '()\n', (758, 760), False, 'import nltk\n'), ((791, 826), 're.compile', 're.compile', (['"""[A-Z][a-z][^A-Z]*[?]$"""'], {}), "('[A-Z][a-z][^A-Z]*[?]$')\n", (801, 826), False, 'import re\n'), ((866, 977), ...
#! /usr/bin/python3 # # Copyright (C) 2015 VMware, Inc. All rights reserved. # publishtool for working with photonpublish # # Author(s): <NAME> # import sys import getopt from photonpublish import photonPublish from publishconst import publishConst const = publishConst() class publishTool: def __init__...
[ "getopt.getopt", "publishconst.publishConst", "photonpublish.photonPublish", "sys.exit" ]
[((269, 283), 'publishconst.publishConst', 'publishConst', ([], {}), '()\n', (281, 283), False, 'from publishconst import publishConst\n'), ((393, 415), 'photonpublish.photonPublish', 'photonPublish', (['context'], {}), '(context)\n', (406, 415), False, 'from photonpublish import photonPublish\n'), ((3867, 3970), 'geto...
from tensorflow.keras.layers import (Conv2D, Dense, Flatten, MaxPooling2D, TimeDistributed) def VGG16(inputs): x = Conv2D(64,(3,3),activation = 'relu',padding = 'same',name = 'block1_conv1')(inputs) x = Conv2D(64,(3,3),activation = 'relu',padding = 'same', name = 'bl...
[ "tensorflow.keras.layers.MaxPooling2D", "tensorflow.keras.layers.Flatten", "tensorflow.keras.layers.Dense", "tensorflow.keras.layers.Conv2D" ]
[((163, 237), 'tensorflow.keras.layers.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""', 'padding': '"""same"""', 'name': '"""block1_conv1"""'}), "(64, (3, 3), activation='relu', padding='same', name='block1_conv1')\n", (169, 237), False, 'from tensorflow.keras.layers import Conv2D, Dense, Flatten, M...
import numpy as np import matplotlib as plt from collections import Counter from math import log import sys import time class ListQueue: def __init__(self, capacity): self.__capacity = capacity self.__data = [None] * self.__capacity self.__size = 0 self.__front = 0 ...
[ "numpy.unique", "collections.Counter", "numpy.argsort", "numpy.array", "numpy.log2", "numpy.genfromtxt" ]
[((7410, 7495), 'numpy.genfromtxt', 'np.genfromtxt', (['"""train.csv"""'], {'dtype': 'np.float64', 'encoding': '"""utf-8-sig"""', 'delimiter': '""","""'}), "('train.csv', dtype=np.float64, encoding='utf-8-sig',\n delimiter=',')\n", (7423, 7495), True, 'import numpy as np\n'), ((4406, 4438), 'numpy.unique', 'np.uniqu...
# -*- coding: utf-8 -*- """ OpenNebula Driver for Linstor Copyright 2018 LINBIT USA LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required...
[ "xml.etree.ElementTree.fromstring" ]
[((794, 812), 'xml.etree.ElementTree.fromstring', 'ET.fromstring', (['xml'], {}), '(xml)\n', (807, 812), True, 'import xml.etree.ElementTree as ET\n')]
#!/usr/bin/env python import sys, os, subprocess, string, re, math def tc2frames(tc, Framerate): frames = int(tc.split(':')[3]) frames += int(tc.split(':')[2]) * Framerate frames += int(tc.split(':')[1]) * Framerate * 60 frames += int(tc.split(':')[0]) * Framerate * 60 * 60 return frames def frame...
[ "math.floor" ]
[((512, 554), 'math.floor', 'math.floor', (['(frames / (Framerate * 60 * 60))'], {}), '(frames / (Framerate * 60 * 60))\n', (522, 554), False, 'import sys, os, subprocess, string, re, math\n'), ((650, 691), 'math.floor', 'math.floor', (['(framesleft / (Framerate * 60))'], {}), '(framesleft / (Framerate * 60))\n', (660,...
import json import logging from os import environ from os.path import basename, getsize, getmtime, splitext, exists import arrow from .settings import config def env_to_json(): """convert current env variables to json text.""" return json.dumps(dict(environ)) def get_default_run_name(): """try to get ...
[ "os.path.exists", "os.path.getsize", "os.path.splitext", "os.environ.get", "os.path.basename", "os.path.getmtime" ]
[((420, 453), 'os.environ.get', 'environ.get', (['"""JOB_NAME"""', 'run_name'], {}), "('JOB_NAME', run_name)\n", (431, 453), False, 'from os import environ\n'), ((607, 637), 'os.environ.get', 'environ.get', (['"""BUILD_URL"""', 'None'], {}), "('BUILD_URL', None)\n", (618, 637), False, 'from os import environ\n'), ((549...
#!/usr/bin/env python3 # coding: utf-8 __author__ = 'ChenyangGao <https://chenyanggao.github.io/>' __version__ = (0, 0, 3) __all__ = ['TkinterXMLConfigParser'] # Reference: # - [python > docs > tkinter](docs.python.org/3/library/tkinter.html) # - [Tk tutorial](https://tk-tutorial.readthedocs.io/en/latest/) # ...
[ "textwrap.dedent", "weakref.WeakValueDictionary", "importlib.import_module", "types.MappingProxyType", "types.ModuleType", "collections.ChainMap", "copy.copy", "xml.etree.ElementTree.fromstring", "typing.cast" ]
[((3484, 3504), 'typing.cast', 'cast', (['TokenInfo', 'tok'], {}), '(TokenInfo, tok)\n', (3488, 3504), False, 'from typing import cast, Callable, Container, Dict, Final, Generator, Iterable, List, Mapping, MutableMapping, NamedTuple, Optional, Tuple, Union\n'), ((2640, 2665), 'typing.cast', 'cast', (['str', 'curr.lastg...
import re import lisp_parser as lp def cons(expression): """ cons Is used to compose larger s-expressions from smaller expressions. (cons a b), expects b to be a list and returns a new list with a as the first element followed by all the elements of b :param string :return: string """ ...
[ "re.sub" ]
[((379, 409), 're.sub', 're.sub', (['"""[^a-zA-Z0-9]+"""', '""""""', '_'], {}), "('[^a-zA-Z0-9]+', '', _)\n", (385, 409), False, 'import re\n'), ((1695, 1725), 're.sub', 're.sub', (['"""[^a-zA-Z0-9]+"""', '""""""', '_'], {}), "('[^a-zA-Z0-9]+', '', _)\n", (1701, 1725), False, 'import re\n'), ((943, 973), 're.sub', 're....
import os import io import base64 import os.path from zipfile import ZipFile from odoo import api, fields, models class ExportNfe(models.TransientModel): _name = 'wizard.export.nfe' _description = "Exporta NF-e" start_date = fields.Date(string=u"Data Inicial", required=True) end_date = fields.Date(st...
[ "zipfile.ZipFile", "odoo.fields.Binary", "os.makedirs", "odoo.fields.Date", "io.BytesIO", "os.path.join", "odoo.fields.Many2one", "odoo.fields.Selection", "base64.decodestring", "odoo.fields.Char" ]
[((240, 290), 'odoo.fields.Date', 'fields.Date', ([], {'string': 'u"""Data Inicial"""', 'required': '(True)'}), "(string=u'Data Inicial', required=True)\n", (251, 290), False, 'from odoo import api, fields, models\n'), ((306, 354), 'odoo.fields.Date', 'fields.Date', ([], {'string': 'u"""Data Final"""', 'required': '(Tr...
from copy import copy import sqlite3 import pandas as pd import pandas_to_sql from pandas_to_sql.testing.utils.fake_data_creation import create_fake_dataset from pandas_to_sql.conventions import flatten_grouped_dataframe # table_name = 'random_data' # df, _ = create_fake_dataset() # df_ = pandas_to_sql.wrap_df(df, tab...
[ "pandas_to_sql.wrap_pd", "pandas.read_sql_query", "sqlite3.connect", "pandas.read_csv", "copy.copy", "pandas_to_sql.wrap_df", "pandas_to_sql.testing.utils.asserters.assert_dataframes_equals" ]
[((509, 599), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/mwaskom/seaborn-data/master/iris.csv"""'], {}), "(\n 'https://raw.githubusercontent.com/mwaskom/seaborn-data/master/iris.csv')\n", (520, 599), True, 'import pandas as pd\n'), ((632, 660), 'sqlite3.connect', 'sqlite3.connect', (['...
"""Home Assistant Python 3 API wrapper for Moving Intelligence.""" import datetime import logging from .utils import Utils _LOGGER = logging.getLogger("pymovingintelligence_ha") class MovingIntelligence: """Class for communicating with the Moving Intelligence API.""" def __init__( self, use...
[ "logging.getLogger", "datetime.datetime.fromtimestamp" ]
[((135, 179), 'logging.getLogger', 'logging.getLogger', (['"""pymovingintelligence_ha"""'], {}), "('pymovingintelligence_ha')\n", (152, 179), False, 'import logging\n'), ((5459, 5497), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['stamp'], {}), '(stamp)\n', (5490, 5497), False, 'import dateti...
from flask_restful import Api class ViewInjector: def __init__(self, app=None): if app is not None: self.init_app(app) def init_app(self, app): from app.views.blockchain import Node, Chain, Mine, Transaction api = Api(app) api.add_resource(Node, '/node') ...
[ "flask_restful.Api" ]
[((262, 270), 'flask_restful.Api', 'Api', (['app'], {}), '(app)\n', (265, 270), False, 'from flask_restful import Api\n')]
import os import pytest import ngage.cli this_dir = os.path.dirname(__file__) data_dir = os.path.join(this_dir, "data") @pytest.fixture() def ctx(): return ngage.cli.Context(home=os.path.join(data_dir, "config", "tst0")) host_tst0 = "tst0.example.com" host_tst1 = "tst1.example.com" host_tst2 = "tst2.example....
[ "pytest.fixture", "os.path.dirname", "os.path.join" ]
[((55, 80), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (70, 80), False, 'import os\n'), ((92, 122), 'os.path.join', 'os.path.join', (['this_dir', '"""data"""'], {}), "(this_dir, 'data')\n", (104, 122), False, 'import os\n'), ((126, 142), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n...
from django.contrib import admin # Register your models here. from import_export.admin import ImportExportModelAdmin from .models import StudentDetails,RegisterdStudents @admin.register(StudentDetails) class StudentDetailsAdmin(ImportExportModelAdmin): pass admin.site.register(RegisterdStudents)
[ "django.contrib.admin.register", "django.contrib.admin.site.register" ]
[((174, 204), 'django.contrib.admin.register', 'admin.register', (['StudentDetails'], {}), '(StudentDetails)\n', (188, 204), False, 'from django.contrib import admin\n'), ((266, 304), 'django.contrib.admin.site.register', 'admin.site.register', (['RegisterdStudents'], {}), '(RegisterdStudents)\n', (285, 304), False, 'f...
# --- # jupyter: # jupytext: # text_representation: # extension: .py # format_name: light # format_version: '1.5' # jupytext_version: 1.11.3 # kernelspec: # display_name: Python 3 # name: python3 # --- # + [markdown] id="view-in-github" colab_type="text" # <a href="https://colab...
[ "hps.Hyperparams", "jax.local_devices", "google.colab.auth.authenticate_user", "numpy.array", "data.set_up_data", "train_helpers.setup_save_dirs", "train.get_sample_for_visualization", "jax.random.PRNGKey", "dataclasses.asdict", "argparse.ArgumentParser", "numpy.asarray", "flax.jax_utils.unrep...
[((849, 873), 'google.colab.auth.authenticate_user', 'auth.authenticate_user', ([], {}), '()\n', (871, 873), False, 'from google.colab import auth\n'), ((2613, 2632), 'jax.local_devices', 'jax.local_devices', ([], {}), '()\n', (2630, 2632), False, 'import jax\n'), ((2927, 2940), 'hps.Hyperparams', 'Hyperparams', ([], {...
"""Performs face alignment and stores face thumbnails in the output directory.""" # MIT License # # Copyright (c) 2016 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restr...
[ "os.path.exists", "numpy.minimum", "numpy.power", "scipy.misc.imsave", "os.path.join", "numpy.asarray", "os.path.splitext", "os.path.split", "numpy.squeeze", "numpy.argmax", "scipy.misc.imread", "numpy.zeros", "numpy.vstack", "scipy.misc.imresize", "numpy.maximum", "facenet.to_rgb" ]
[((1824, 1870), 'os.path.join', 'os.path.join', (['curr_dir', "(filename + '_face.jpg')"], {}), "(curr_dir, filename + '_face.jpg')\n", (1836, 1870), False, 'import os\n'), ((1902, 1933), 'os.path.exists', 'os.path.exists', (['output_filename'], {}), '(output_filename)\n', (1916, 1933), False, 'import os\n'), ((1966, 1...
# -*- coding: utf-8 -*- """ Shell functions for the module. """ import pathlib import re import os import sys import subprocess def run_in_shell_with_output(cmd): """ Runs a given command in a subshell and prints the ouput. :param cmd: Command to run. :returns: Returns the exit code of the subproces...
[ "pathlib.Path", "subprocess.Popen", "os.path.join", "os.path.split", "re.findall" ]
[((341, 427), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess\n .PIPE)\n', (357, 427), False, 'import subprocess\n'), ((768, 854), 'subprocess.Popen', 'subprocess.Popen', (['...
""" Convnet classifier for MNIST data using TensorFlow. """ import tensorflow as tf import mnist.mnist as mnist import pandas as pd DEFAULT_INPUT_DIMENSIONS = 784 DEFAULT_OUTPUT_DIMENSIONS = 10 DEFAULT_LEARNING_RATE = 0.1 DEFAULT_BATCH_SIZE = 50 DEFAULT_KEEP_PROB = 0.5 class CNNClassifier(...
[ "tensorflow.nn.conv2d", "tensorflow.nn.max_pool", "tensorflow.initialize_all_variables", "tensorflow.Variable", "tensorflow.log", "tensorflow.placeholder", "tensorflow.Session", "mnist.mnist.fetch_data", "tensorflow.argmax", "tensorflow.constant", "tensorflow.nn.dropout", "tensorflow.matmul", ...
[((975, 1013), 'tensorflow.truncated_normal', 'tf.truncated_normal', (['shape'], {'stddev': '(0.1)'}), '(shape, stddev=0.1)\n', (994, 1013), True, 'import tensorflow as tf\n'), ((1025, 1045), 'tensorflow.Variable', 'tf.Variable', (['initial'], {}), '(initial)\n', (1036, 1045), True, 'import tensorflow as tf\n'), ((1088...
#!/usr/bin/python3 # # Copyright 2010 Google Inc. # All Rights Reserved. """Diff Speed Test """ __author__ = "<EMAIL> (<NAME>)" import imp import gc import sys import time import diff_match_patch as dmp_module # Force a module reload. Allows one to edit the DMP module and rerun the test # without leaving the Pytho...
[ "imp.reload", "time.time", "diff_match_patch.diff_match_patch", "gc.collect" ]
[((335, 357), 'imp.reload', 'imp.reload', (['dmp_module'], {}), '(dmp_module)\n', (345, 357), False, 'import imp\n'), ((460, 489), 'diff_match_patch.diff_match_patch', 'dmp_module.diff_match_patch', ([], {}), '()\n', (487, 489), True, 'import diff_match_patch as dmp_module\n'), ((597, 609), 'gc.collect', 'gc.collect', ...
from tester.ni_usb_6211 import NiUsb6211 import numpy as np OUTPUT_READ_CHANNEL = "ai0" VCC_READ_CHANNEL = "ai1" TOLERANCE = 0.001 def test_find_devices(): devices = NiUsb6211.find_devices() assert type(devices) == list, "Not a list!" if len(devices) > 0: assert type(devices[0]) == str, "An eleme...
[ "tester.ni_usb_6211.NiUsb6211", "numpy.all", "tester.ni_usb_6211.NiUsb6211.find_devices" ]
[((172, 196), 'tester.ni_usb_6211.NiUsb6211.find_devices', 'NiUsb6211.find_devices', ([], {}), '()\n', (194, 196), False, 'from tester.ni_usb_6211 import NiUsb6211\n'), ((382, 472), 'tester.ni_usb_6211.NiUsb6211', 'NiUsb6211', ([], {'output_read_channel': 'OUTPUT_READ_CHANNEL', 'vcc_read_channel': 'VCC_READ_CHANNEL'}),...
from app.main import main from flask import render_template @main.app_errorhandler(404) def page_not_found(): return render_template('404.html'), 404 @main.app_errorhandler(500) def internal_server_error(e): return render_template('500.html'), 500
[ "flask.render_template", "app.main.main.app_errorhandler" ]
[((63, 89), 'app.main.main.app_errorhandler', 'main.app_errorhandler', (['(404)'], {}), '(404)\n', (84, 89), False, 'from app.main import main\n'), ((159, 185), 'app.main.main.app_errorhandler', 'main.app_errorhandler', (['(500)'], {}), '(500)\n', (180, 185), False, 'from app.main import main\n'), ((123, 150), 'flask.r...
from layers import MLP, DotInteraction import tensorflow as tf from tensorflow import keras class DeepFM(keras.Model): def __init__(self, embedding_size, vocab_size, num_int_fea, num_cat_fea, mlp_units): super().__init__() self.embedding_size = embedding_size self.embedding_layer = keras.l...
[ "tensorflow.reduce_sum", "layers.DotInteraction", "tensorflow.keras.layers.Embedding", "tensorflow.concat", "tensorflow.nn.sigmoid", "tensorflow.reshape", "tensorflow.expand_dims", "layers.MLP", "tensorflow.squeeze", "tensorflow.square" ]
[((313, 363), 'tensorflow.keras.layers.Embedding', 'keras.layers.Embedding', (['vocab_size', 'embedding_size'], {}), '(vocab_size, embedding_size)\n', (335, 363), False, 'from tensorflow import keras\n'), ((461, 498), 'layers.MLP', 'MLP', (['mlp_units'], {'final_activation': 'None'}), '(mlp_units, final_activation=None...
from django import forms class ParsePageForm(forms.Form): url = forms.URLField(label='Enter page link here', required=True)
[ "django.forms.URLField" ]
[((70, 129), 'django.forms.URLField', 'forms.URLField', ([], {'label': '"""Enter page link here"""', 'required': '(True)'}), "(label='Enter page link here', required=True)\n", (84, 129), False, 'from django import forms\n')]
""" An example showing how to create fields on a new class. """ from jawa import ClassFile if __name__ == '__main__': cf = ClassFile.create('HelloWorld') # Creating a field from a field name and descriptor field = cf.fields.create('BeerCount', 'I') # A convienience shortcut for creating static fields...
[ "jawa.ClassFile.create" ]
[((128, 158), 'jawa.ClassFile.create', 'ClassFile.create', (['"""HelloWorld"""'], {}), "('HelloWorld')\n", (144, 158), False, 'from jawa import ClassFile\n')]
import time import mxnet as mx benchmark_dataiter = mx.io.ImageRecordIter( path_imgrec="../data/test.rec", data_shape=(1, 28, 28), batch_size=64, mean_r=128, scale=0.00390625, ) mod = mx.mod.Module.load('mnist_lenet', 35, context=mx.gpu(2)) mod.bind( data_shapes=benchmark_dataiter.provide_data...
[ "mxnet.io.ImageRecordIter", "time.time", "mxnet.gpu" ]
[((53, 180), 'mxnet.io.ImageRecordIter', 'mx.io.ImageRecordIter', ([], {'path_imgrec': '"""../data/test.rec"""', 'data_shape': '(1, 28, 28)', 'batch_size': '(64)', 'mean_r': '(128)', 'scale': '(0.00390625)'}), "(path_imgrec='../data/test.rec', data_shape=(1, 28, 28\n ), batch_size=64, mean_r=128, scale=0.00390625)\n...
import typing import matplotlib import matplotlib.colors as mcolors import matplotlib.pyplot as plt import networkx as nx from PyQt5.QtWidgets import QWidget from rpasdt.gui.analysis.models import AnalysisData from rpasdt.gui.mathplotlib_components import NetworkxGraphPanel matplotlib.use("Qt5Agg") class Centralit...
[ "matplotlib.use", "matplotlib.pyplot.gcf", "matplotlib.colors.SymLogNorm" ]
[((278, 302), 'matplotlib.use', 'matplotlib.use', (['"""Qt5Agg"""'], {}), "('Qt5Agg')\n", (292, 302), False, 'import matplotlib\n'), ((1106, 1161), 'matplotlib.colors.SymLogNorm', 'mcolors.SymLogNorm', ([], {'linthresh': '(0.01)', 'linscale': '(1)', 'base': '(10)'}), '(linthresh=0.01, linscale=1, base=10)\n', (1124, 11...
import numpy as np from keras import layers from keras import Model from keras import backend from ConfigSpace import ConfigurationSpace from ConfigSpace import UniformIntegerHyperparameter, CategoricalHyperparameter from alphaml.engine.components.models.base_dl_model import BaseImageClassificationModel from alphaml.u...
[ "ConfigSpace.UniformIntegerHyperparameter", "keras.layers.Conv2D", "keras.backend.image_data_format", "keras.layers.MaxPooling2D", "keras.Model", "keras.layers.add", "keras.layers.Input", "keras.layers.Activation", "keras.layers.SeparableConv2D", "alphaml.engine.components.models.base_dl_model.Bas...
[((2153, 2184), 'keras.layers.Input', 'layers.Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (2165, 2184), False, 'from keras import layers\n'), ((3848, 3873), 'keras.layers.add', 'layers.add', (['[x, residual]'], {}), '([x, residual])\n', (3858, 3873), False, 'from keras import layers\n'), ((4897, 49...
import mysql.connector from fixture.orm import ORMFixture from model.group import Group db = ORMFixture(host="127.0.0.1", database = "addressbook", user = "root", password = "") #connection = mysql.connector.connect(host="127.0.0.1", database = "addressbook", user = "root", password = "") try: l = db.get_group_li...
[ "model.group.Group", "fixture.orm.ORMFixture" ]
[((94, 172), 'fixture.orm.ORMFixture', 'ORMFixture', ([], {'host': '"""127.0.0.1"""', 'database': '"""addressbook"""', 'user': '"""root"""', 'password': '""""""'}), "(host='127.0.0.1', database='addressbook', user='root', password='')\n", (104, 172), False, 'from fixture.orm import ORMFixture\n'), ((411, 426), 'model.g...
from wikipedia.spiders import WikipediaSpider from scrapy.crawler import CrawlerProcess import networkx as nx import matplotlib.pyplot as plt import urllib.parse if __name__ == "__main__": crawl_depth = 2 process = CrawlerProcess({ 'LOG_LEVEL': 'ERROR', 'DEPTH_LIMIT': crawl_depth }) pro...
[ "networkx.astar_path", "scrapy.crawler.CrawlerProcess", "networkx.Graph" ]
[((224, 290), 'scrapy.crawler.CrawlerProcess', 'CrawlerProcess', (["{'LOG_LEVEL': 'ERROR', 'DEPTH_LIMIT': crawl_depth}"], {}), "({'LOG_LEVEL': 'ERROR', 'DEPTH_LIMIT': crawl_depth})\n", (238, 290), False, 'from scrapy.crawler import CrawlerProcess\n'), ((603, 613), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (611, 6...
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. """Defines a class for the COMPAS dataset.""" import pandas as pd import numpy as np from .base_wrapper import BasePerformanceDatasetWrapper from tempeh.constants import FeatureType, Tasks, DataTypes, ClassVars, CompasDatase...
[ "pandas.get_dummies", "numpy.delete", "numpy.unique", "pandas.read_csv" ]
[((768, 888), 'pandas.read_csv', 'pd.read_csv', (['"""https://raw.githubusercontent.com/propublica/compas-analysis/master/compas-scores-two-years.csv"""'], {}), "(\n 'https://raw.githubusercontent.com/propublica/compas-analysis/master/compas-scores-two-years.csv'\n )\n", (779, 888), True, 'import pandas as pd\n')...
#!/usr/bin/python # Python Dependencies import os from datetime import datetime # Python 3rd Party dependencies from werkzeug.utils import secure_filename # Flask dependecies from flask import Flask, flash, render_template, request, redirect, url_for import flask_login # SQL alchemy dependencies from sqlalchemy imp...
[ "flask.render_template", "flask_login.LoginManager", "flask.Flask", "werkzeug.utils.secure_filename", "os.remove", "sqlalchemy.orm.sessionmaker", "flask.flash", "sqlalchemy.create_engine", "flask.request.form.get", "registering.register.RegistrationLogic", "flask.redirect", "os.path.isfile", ...
[((891, 906), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (896, 906), False, 'from flask import Flask, flash, render_template, request, redirect, url_for\n'), ((917, 956), 'sqlalchemy.create_engine', 'create_engine', (['"""sqlite:///userblogs.db"""'], {}), "('sqlite:///userblogs.db')\n", (930, 956), Fal...
import numpy as np from scipy.interpolate import CubicSpline class WaypointTraj(object): """ """ def __init__(self, points): """ This is the constructor for the Trajectory object. A fresh trajectory object will be constructed before each mission. For a waypoint trajectory, ...
[ "numpy.reshape", "scipy.interpolate.CubicSpline", "numpy.zeros", "numpy.linalg.norm", "numpy.shape" ]
[((1920, 1934), 'numpy.zeros', 'np.zeros', (['(3,)'], {}), '((3,))\n', (1928, 1934), True, 'import numpy as np\n'), ((1954, 1968), 'numpy.zeros', 'np.zeros', (['(3,)'], {}), '((3,))\n', (1962, 1968), True, 'import numpy as np\n'), ((1988, 2002), 'numpy.zeros', 'np.zeros', (['(3,)'], {}), '((3,))\n', (1996, 2002), True,...
import frappe from frappe.utils import today, getdate, cint, now, add_days, parse_val,add_to_date,nowdate from frappe.utils.safe_exec import get_safe_globals def create_task_for_event(doc, method): try: if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_inst...
[ "frappe.utils.parse_val", "frappe.get_traceback", "frappe.utils.cint", "frappe.db.has_column", "frappe.utils.today", "frappe.whitelist", "frappe.utils.safe_exec.get_safe_globals", "frappe.utils.nowdate", "frappe.get_doc", "frappe.db.commit", "frappe.safe_eval", "frappe.get_all", "frappe.util...
[((5698, 5716), 'frappe.whitelist', 'frappe.whitelist', ([], {}), '()\n', (5714, 5716), False, 'import frappe\n'), ((1664, 1829), 'frappe.get_all', 'frappe.get_all', (['self.ref_doctype'], {'fields': '"""name"""', 'filters': "[{self.date_changed: ('>=', reference_date_start)}, {self.date_changed: (\n '<=', reference...
"""Inference/predict code for simple_sequence dataset model must be trained before inference, train_simple_sequence.py must be executed beforehand. """ from __future__ import print_function import argparse import os import matplotlib import numpy as np from simple_sequence.simple_sequence_dataset import N_VOCABULA...
[ "argparse.ArgumentParser", "matplotlib.use", "chainer.cuda.get_device", "chainer.cuda.to_cpu", "chainer.serializers.load_npz" ]
[((351, 372), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (365, 372), False, 'import matplotlib\n'), ((774, 845), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""simple_sequence RNN predict code"""'}), "(description='simple_sequence RNN predict code')\n", (797, 8...
# Record mono 16bits samples from the audio device and send them to stdout. # Assume the sampling rate is compatible. # Use a small queue size to minimize delays. import al, sys import AL BUFSIZE = 2000 QSIZE = 4000 def main(): c = al.newconfig() c.setchannels(AL.MONO) c.setqueuesize(QSIZE) p = al.openport('', '...
[ "sys.exit", "al.newconfig", "al.openport", "sys.stdout.write" ]
[((236, 250), 'al.newconfig', 'al.newconfig', ([], {}), '()\n', (248, 250), False, 'import al, sys\n'), ((303, 326), 'al.openport', 'al.openport', (['""""""', '"""r"""', 'c'], {}), "('', 'r', c)\n", (314, 326), False, 'import al, sys\n'), ((369, 391), 'sys.stdout.write', 'sys.stdout.write', (['data'], {}), '(data)\n', ...
from gym.envs.registration import register def register_custom_env(): # finite time convergence test suite config = { 'robot_base': 'xmls/point.xml', # dt in xml, default 0.002s for point # finite time convergence test suite modification 'robot_placements': None, # Robot placements list (defa...
[ "gym.envs.registration.register" ]
[((3781, 3877), 'gym.envs.registration.register', 'register', ([], {'id': 'env_id', 'entry_point': '"""safety_gym.envs.mujoco:Engine"""', 'kwargs': "{'config': config}"}), "(id=env_id, entry_point='safety_gym.envs.mujoco:Engine', kwargs={\n 'config': config})\n", (3789, 3877), False, 'from gym.envs.registration impo...
# Question 07, Lab 07 # AB Satyaprakash, 180123062 # imports import pandas as pd import numpy as np # functions def f(t, y): return y - t**2 + 1 def F(t): return (t+1)**2 - 0.5*np.exp(t) def RungeKutta4(t, y, h): k1 = f(t, y) k2 = f(t+h/2, y+h*k1/2) k3 = f(t+h/2, y+h*k2/2) k4 = f(t+h, y+...
[ "pandas.DataFrame", "numpy.exp", "pandas.Series" ]
[((1063, 1077), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1075, 1077), True, 'import pandas as pd\n'), ((1120, 1132), 'pandas.Series', 'pd.Series', (['y'], {}), '(y)\n', (1129, 1132), True, 'import pandas as pd\n'), ((1154, 1169), 'pandas.Series', 'pd.Series', (['yact'], {}), '(yact)\n', (1163, 1169), True...
import numpy as np import cwrapping GurobiEnv = cwrapping.gurobicpy.GurobiEnv def make_float64(lists): newlists = [] for e in lists: newlists.append(np.float64(e)) return newlists def check_feasibility(A, b, solution): RHS = np.dot(A, solution) if np.sum(RHS - (1.0 - 1e-10) * b > 1e-5) >= 1: return False ...
[ "numpy.sum", "numpy.dot", "numpy.float64" ]
[((235, 254), 'numpy.dot', 'np.dot', (['A', 'solution'], {}), '(A, solution)\n', (241, 254), True, 'import numpy as np\n'), ((259, 298), 'numpy.sum', 'np.sum', (['(RHS - (1.0 - 1e-10) * b > 1e-05)'], {}), '(RHS - (1.0 - 1e-10) * b > 1e-05)\n', (265, 298), True, 'import numpy as np\n'), ((155, 168), 'numpy.float64', 'np...
import math import vmath from vmathlib import vcolor, vutil import vmathlib import toy import keycodes import drawutil from unit_manager import Unit import mathutil class Shadowman(Unit): def __init__(self, world, unit_id, param): super().__init__(world, unit_id, param) self.camera_transform...
[ "toy.app.light_manager.get_sun", "toy.app.input_manager.get_key_down", "drawutil.draw_transform", "drawutil.draw_perspective", "toy.app.camera_manager.get_camera", "vmathlib.vcolor.hue", "mathutil.perspective_to_bounding_orthographic", "vmath.Transform", "toy.app.camera_manager.get_camera_controller...
[((323, 340), 'vmath.Transform', 'vmath.Transform', ([], {}), '()\n', (338, 340), False, 'import vmath\n'), ((422, 471), 'toy.app.input_manager.get_key_down', 'toy.app.input_manager.get_key_down', (['keycodes.VK_T'], {}), '(keycodes.VK_T)\n', (456, 471), False, 'import toy\n'), ((1195, 1226), 'toy.app.light_manager.get...
import numpy as np import torch from torch.utils.data import Dataset class DSpritesDataset(Dataset): """dSprites dataset.""" def __init__(self, npz_file:str, transform=None): """ Args: npz_file: Path to the npz file. root_dir: Directory with all the images. ...
[ "numpy.load" ]
[((449, 504), 'numpy.load', 'np.load', (['npz_file'], {'allow_pickle': '(True)', 'encoding': '"""latin1"""'}), "(npz_file, allow_pickle=True, encoding='latin1')\n", (456, 504), True, 'import numpy as np\n')]
import torch import torch.nn as nn class STConvLSTMCell(nn.Module): """ Spatio-Temporal Convolutional LSTM Cell Implementation. """ def __init__(self, input_size, input_dim, hidden_dim, kernel_size, bias, forget_bias=1.0, layer_norm=True): super(STConvLSTMCell, self).__init__() self....
[ "torch.tanh", "torch.nn.BatchNorm2d", "torch.split", "torch.sigmoid", "torch.nn.Conv2d", "torch.nn.init.orthogonal_", "torch.tensor", "torch.cat" ]
[((653, 796), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'self.input_dim', 'out_channels': '(7 * self.hidden_dim)', 'kernel_size': 'self.kernel_size', 'padding': 'self.padding', 'bias': 'self.bias'}), '(in_channels=self.input_dim, out_channels=7 * self.hidden_dim,\n kernel_size=self.kernel_size, padding=se...
import seaborn as sns import matplotlib.pyplot as plt def dfSummary(data): try: return data.describe() #statistical summary except: print("Unable to provide a statistical summary") return False def colBoxPlot(data): boxplot_inputs = [] for col in range(0, len(data.colu...
[ "seaborn.boxplot", "matplotlib.pyplot.show" ]
[((676, 723), 'seaborn.boxplot', 'sns.boxplot', (['data[1]'], {'orient': '"""v"""', 'linewidth': '(2.5)'}), "(data[1], orient='v', linewidth=2.5)\n", (687, 723), True, 'import seaborn as sns\n'), ((808, 818), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (816, 818), True, 'import matplotlib.pyplot as plt\n')]
# -*- coding: utf-8 -*- from tflearn.data_utils import * from os.path import join import numpy as np from skimage import io, transform from keras.models import load_model from skimage.color import rgb2lab, lab2rgb import time from functools import wraps import warnings from tensorflow.python.ops.image_ops import rgb_to...
[ "numpy.uint8", "tensorflow.shape", "tensorflow.transpose", "tensorflow.slice", "numpy.mean", "skimage.color.lab2rgb", "keras.backend.square", "functools.wraps", "keras.backend.var", "tensorflow.extract_image_patches", "tensorflow.size", "keras.backend.abs", "tensorflow.stack", "keras.backe...
[((380, 413), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (403, 413), False, 'import warnings\n'), ((451, 462), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (456, 462), False, 'from functools import wraps\n'), ((513, 524), 'time.time', 'time.time', ([], {}), ...
# !/usr/bin/env python # -*- coding: utf-8 -*- """Database""" import os from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from app.config import get_config_name, get_config from app.models import Base def get_engine(config_name=None): if config_name is None: config_name = get_c...
[ "sqlalchemy.orm.sessionmaker", "app.config.get_config_name", "sqlalchemy.create_engine", "sqlalchemy.ext.compiler.compiles", "app.models.Base.metadata.drop_all", "app.config.get_config", "app.models.Base.metadata.create_all" ]
[((1031, 1064), 'sqlalchemy.ext.compiler.compiles', 'compiles', (['DropTable', '"""postgresql"""'], {}), "(DropTable, 'postgresql')\n", (1039, 1064), False, 'from sqlalchemy.ext.compiler import compiles\n'), ((346, 369), 'app.config.get_config', 'get_config', (['config_name'], {}), '(config_name)\n', (356, 369), False,...
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, ...
[ "jax.lax.pmax", "jax.lax.pmean", "jax.lax.psum", "jax.lax.pmin" ]
[((870, 892), 'jax.lax.pmax', 'lax.pmax', (['x', 'axis_name'], {}), '(x, axis_name)\n', (878, 892), False, 'from jax import lax\n'), ((1071, 1094), 'jax.lax.pmean', 'lax.pmean', (['x', 'axis_name'], {}), '(x, axis_name)\n', (1080, 1094), False, 'from jax import lax\n'), ((1271, 1293), 'jax.lax.pmin', 'lax.pmin', (['x',...
from flask import jsonify,request from app import app import base64 import sys,os #import sgxwraper to get access to the enclave sys.path.insert(0,'../SGX_lib/') import sgxwrapper @app.route('/') @app.route('/index') def index(): return "<h1>My Inventory List</h1>" @app.route('/api/v1.0/image_verify',methods=['P...
[ "sys.path.insert", "flask.jsonify", "base64.b64decode", "flask.request.get_json", "app.app.route", "os.remove" ]
[((130, 163), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../SGX_lib/"""'], {}), "(0, '../SGX_lib/')\n", (145, 163), False, 'import sys, os\n'), ((183, 197), 'app.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (192, 197), False, 'from app import app\n'), ((199, 218), 'app.app.route', 'app.route', (['"""/...
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __a...
[ "pulumi.getter", "pulumi.set", "pulumi.ResourceOptions", "pulumi.get" ]
[((1128, 1158), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""parentId"""'}), "(name='parentId')\n", (1141, 1158), False, 'import pulumi\n'), ((1455, 1485), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""pathPart"""'}), "(name='pathPart')\n", (1468, 1485), False, 'import pulumi\n'), ((1789, 1820), 'pulumi.ge...
import pytest from veras import router @pytest.mark.parametrize("origin, destination, expected", [ ("KLAX", "KSFO", "SUMMR2 STOKD SERFR SERFR4"), ]) def test_find_route(origin, destination, expected): assert router.find_route(origin, destination) == expected
[ "pytest.mark.parametrize", "veras.router.find_route" ]
[((42, 151), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""origin, destination, expected"""', "[('KLAX', 'KSFO', 'SUMMR2 STOKD SERFR SERFR4')]"], {}), "('origin, destination, expected', [('KLAX', 'KSFO',\n 'SUMMR2 STOKD SERFR SERFR4')])\n", (65, 151), False, 'import pytest\n'), ((218, 256), 'veras.rout...
import argparse def merge(infiles, outfile): setReads = set() for infile in infiles: with open(infile, "r") as fileIn: for strLine in fileIn: if strLine.startswith('@'): continue strSplit = strLine.split("\t") if strSplit[...
[ "argparse.ArgumentParser" ]
[((545, 570), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (568, 570), False, 'import argparse\n')]
# -*- coding: utf-8 -*- # Copyright 2018 <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by appli...
[ "cloudkittyclient.tests.utils.FakeRequest" ]
[((6162, 6338), 'cloudkittyclient.tests.utils.FakeRequest', 'utils.FakeRequest', ([], {'cost': '"""Bad value"""', 'value': '"""Bad value"""', 'service_id': '"""Bad value"""', 'field_id': '"""Bad value"""', 'tenant_id': '"""Bad value"""', 'type': '"""Bad value"""', 'mapping_id': '"""mapping_id"""'}), "(cost='Bad value',...
"""Do requests.""" import asyncio import http from random import random import sentry_sdk from asgi_tools._compat import aio_sleep from httpx import ( HTTPError, ConnectError, TimeoutException, NetworkError, AsyncClient, Response, HTTPStatusError) from . import config as global_config, logger async def pro...
[ "random.random", "http.HTTPStatus", "asgi_tools._compat.aio_sleep", "sentry_sdk.capture_exception" ]
[((826, 858), 'http.HTTPStatus', 'http.HTTPStatus', (['res.status_code'], {}), '(res.status_code)\n', (841, 858), False, 'import http\n'), ((1647, 1680), 'sentry_sdk.capture_exception', 'sentry_sdk.capture_exception', (['exc'], {}), '(exc)\n', (1675, 1680), False, 'import sentry_sdk\n'), ((1984, 2017), 'sentry_sdk.capt...
## the noise masks of funcSize are not binarized, this script is to binarize them import os, json import nibabel as nib import numpy as np from scipy import ndimage # initalize data work_dir = '/mindhive/saxelab3/anzellotti/forrest/output_denoise/' all_subjects = ['sub-01', 'sub-02', 'sub-03', 'sub-04', 'sub-05', 'sub...
[ "nibabel.Nifti1Image", "numpy.zeros", "nibabel.save", "nibabel.load" ]
[((938, 956), 'nibabel.load', 'nib.load', (['mask_dir'], {}), '(mask_dir)\n', (946, 956), True, 'import nibabel as nib\n'), ((1089, 1115), 'numpy.zeros', 'np.zeros', (['mask_union.shape'], {}), '(mask_union.shape)\n', (1097, 1115), True, 'import numpy as np\n'), ((1427, 1496), 'nibabel.Nifti1Image', 'nib.Nifti1Image', ...
import time from wait_until import wait_until import pytest def some_function_that_cannot_work(): raise ValueError("I cannot work!") def test_wait_until_exception_raised(): with pytest.raises(TimeoutError) as err: wait_until(some_function_that_cannot_work, timeout=1) assert "Timeou...
[ "pytest.raises", "wait_until.wait_until", "time.time" ]
[((652, 688), 'wait_until.wait_until', 'wait_until', (['dum.is_loaded'], {'timeout': '(2)'}), '(dum.is_loaded, timeout=2)\n', (662, 688), False, 'from wait_until import wait_until\n'), ((200, 227), 'pytest.raises', 'pytest.raises', (['TimeoutError'], {}), '(TimeoutError)\n', (213, 227), False, 'import pytest\n'), ((245...
import json import pandas as pd from sqlalchemy import create_engine class SqlClient: def __init__(self, host, port, username, password, db): self.host = host self.port = port self.username = username self.password = password self.db = db self._conn = None ...
[ "pandas.read_sql", "sqlalchemy.create_engine", "json.dumps" ]
[((505, 532), 'sqlalchemy.create_engine', 'create_engine', (['_conn_string'], {}), '(_conn_string)\n', (518, 532), False, 'from sqlalchemy import create_engine\n'), ((575, 605), 'pandas.read_sql', 'pd.read_sql', (['query', 'self._conn'], {}), '(query, self._conn)\n', (586, 605), True, 'import pandas as pd\n'), ((2218, ...
from argparse import ArgumentTypeError, FileType import sys class BoundedInt: def __init__(self, mini=None, maxi=None, clamp=False): self.mini = mini self.maxi = maxi self.clamp = clamp def __call__(self, str_value): try: i = int(str_value) except Exception...
[ "argparse.ArgumentTypeError" ]
[((345, 365), 'argparse.ArgumentTypeError', 'ArgumentTypeError', (['e'], {}), '(e)\n', (362, 365), False, 'from argparse import ArgumentTypeError, FileType\n'), ((515, 578), 'argparse.ArgumentTypeError', 'ArgumentTypeError', (["('%d cannot be less than %d' % (i, self.mini))"], {}), "('%d cannot be less than %d' % (i, s...
## # # File: testCcdcSearch.py # Author: <NAME> # Date: 13-Dec-2020 # Version: 0.001 # # Updated: # ## """ Test cases for chemical component search against the CCDC local Python API - """ __docformat__ = "restructuredtext en" __author__ = "<NAME>" __email__ = "<EMAIL>" __license__ = "Apache 2.0" import glob im...
[ "logging.basicConfig", "unittest.TestSuite", "logging.getLogger", "resource.getrusage", "rcsb.utils.ccdc.CcdcSearch.CcdcSearch", "os.path.join", "os.path.splitext", "os.path.split", "os.path.dirname", "platform.system", "time.localtime", "time.time", "unittest.TextTestRunner" ]
[((627, 746), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s [%(levelname)s]-%(module)s.%(funcName)s: %(message)s"""'}), "(level=logging.INFO, format=\n '%(asctime)s [%(levelname)s]-%(module)s.%(funcName)s: %(message)s')\n", (646, 746), False, 'import logging\n...
from collections import Counter STRING = "abeceda" # Varianta 1 - bez specialnich znalosti cetnosti = {} for pismeno in STRING: if pismeno in cetnosti: #cetnosti[pismeno] = cetnosti[pismeno] + 1 cetnosti[pismeno] += 1 else: cetnosti[pismeno] = 1 print(cetnosti) # Varianta 2 - vyuziti metody setdefault u slovn...
[ "collections.Counter" ]
[((473, 488), 'collections.Counter', 'Counter', (['STRING'], {}), '(STRING)\n', (480, 488), False, 'from collections import Counter\n')]
# This is a comparison for the CSSP algorithms on real datasets. # This is a test for subsampling functions: ## * Projection DPPs ## * Volume sampling ## * Pivoted QR ## * Double Phase ## * Largest leverage scores ## import sys sys.path.insert(0, '..') from CSSPy.dataset_tools import * from CSSPy.volume_sampler impor...
[ "matplotlib.pyplot.boxplot", "matplotlib.pyplot.setp", "sys.path.insert", "matplotlib.pyplot.savefig", "timeit.Timer", "pandas.read_csv", "matplotlib.pyplot.xticks", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.gca", "matplotlib.pyplot.figure", "matplotlib.pyplot.yticks", "numpy.savetxt", ...
[((230, 254), 'sys.path.insert', 'sys.path.insert', (['(0)', '""".."""'], {}), "(0, '..')\n", (245, 254), False, 'import sys\n'), ((803, 875), 'timeit.Timer', 'timeit.Timer', (['"""char in text"""'], {'setup': '"""text = "sample string"; char = "g\\""""'}), '(\'char in text\', setup=\'text = "sample string"; char = "g"...
# -*- coding: utf-8 -*- """ Created on Sun Jun 5 15:54:03 2016 @author: waffleboy """ from flask import Flask, render_template import requests import ast from datetime import datetime from datetime import timedelta import pandas as pd import pickle,json from pandas_highcharts.core import serialize from collections im...
[ "flask.render_template", "pandas.isnull", "json.loads", "collections.OrderedDict", "pandas.DataFrame.from_records", "pandas.read_csv", "flask.Flask", "json.dumps", "pickle.load", "requests.get", "ast.literal_eval", "datetime.datetime.now", "pandas.DataFrame", "datetime.timedelta" ]
[((344, 359), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (349, 359), False, 'from flask import Flask, render_template\n'), ((575, 651), 'flask.render_template', 'render_template', (['"""main.html"""'], {'masterDic': 'masterDic', 'summaryStats': 'summaryStats'}), "('main.html', masterDic=masterDic, summ...
from copusher import Copusher app = Copusher() app.run()
[ "copusher.Copusher" ]
[((37, 47), 'copusher.Copusher', 'Copusher', ([], {}), '()\n', (45, 47), False, 'from copusher import Copusher\n')]
from django.shortcuts import render from django.http import HttpResponseRedirect from .models import Message, MessageForm from .forms import QueryForm # Create your views here. def forms_home(request): if request.method == 'POST': post_form = MessageForm(request.POST) if post_form.is_valid(): ...
[ "django.shortcuts.render", "django.http.HttpResponseRedirect" ]
[((941, 977), 'django.shortcuts.render', 'render', (['request', '"""forms/thanks.html"""'], {}), "(request, 'forms/thanks.html')\n", (947, 977), False, 'from django.shortcuts import render\n'), ((445, 483), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/forms/thanks/"""'], {}), "('/forms/thanks/')\n...
print('The documentation for the pyNetSocket library') print('This covers all the information you need to start') print('') print('Topics:', 'server', 'client', 'callbacks', sep='\n\t') print('To view information:', 'import pyNetSocket.docs.TOPIC', sep='\n') print('') print('You can ...
[ "webbrowser.open" ]
[((517, 578), 'webbrowser.open', 'wb.open', (['"""https://github.com/DrSparky-2007/PyNetSocket/wiki/"""'], {}), "('https://github.com/DrSparky-2007/PyNetSocket/wiki/')\n", (524, 578), True, 'import webbrowser as wb\n')]
# -*- coding: utf-8 -*- """ Remove transcription sites in the FISH image. """ import os import argparse import time import datetime import sys import bigfish.stack as stack import numpy as np from utils import Logger from loader import (get_metadata_directory, generate_filename_base, images_gene...
[ "numpy.savez", "bigfish.stack.remove_transcription_site", "argparse.ArgumentParser", "os.path.join", "bigfish.stack.read_image", "utils.Logger", "datetime.datetime.now", "os.path.isdir", "loader.generate_filename_base", "os.path.basename", "loader.get_metadata_directory", "numpy.load", "time...
[((404, 429), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (427, 429), False, 'import argparse\n'), ((1453, 1495), 'os.path.join', 'os.path.join', (['output_directory', '"""nuc_mask"""'], {}), "(output_directory, 'nuc_mask')\n", (1465, 1495), False, 'import os\n'), ((1517, 1565), 'os.path.joi...
""" Methods to search an ImageCollection with brute force, exhaustive search. """ import cgi import abc import cPickle import numpy as np from sklearn.decomposition import PCA from sklearn.metrics.pairwise import \ manhattan_distances, euclidean_distances, additive_chi2_kernel import pyflann from scipy.spatial imp...
[ "scipy.spatial.cKDTree", "sklearn.metrics.pairwise.manhattan_distances", "sklearn.decomposition.PCA", "sklearn.metrics.pairwise.euclidean_distances", "util.histogram_colors_smoothed", "pyflann.set_distance_type", "sklearn.metrics.pairwise.additive_chi2_kernel", "numpy.argsort", "pyflann.FLANN", "r...
[((408, 416), 'rayleigh.util.TicToc', 'TicToc', ([], {}), '()\n', (414, 416), False, 'from rayleigh.util import TicToc\n'), ((2377, 2427), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': 'self.num_dimensions', 'whiten': '(True)'}), '(n_components=self.num_dimensions, whiten=True)\n', (2380, 2427), False, 'fro...
#!/usr/bin/env python3 import os import argparse import numpy as np from sklearn import preprocessing from sklearn import datasets from tqdm import tqdm class Network(object): def __init__(self): self.linear1 = Linear(64, 128) self.relu1 = ReLU() self.linear2 = Linear(128, 64) se...
[ "numpy.random.normal", "numpy.clip", "argparse.ArgumentParser", "ipdb.set_trace", "sklearn.datasets.load_digits", "numpy.max", "sklearn.preprocessing.StandardScaler", "numpy.sum", "numpy.zeros", "numpy.split", "numpy.arange", "numpy.random.shuffle" ]
[((1714, 1751), 'sklearn.datasets.load_digits', 'datasets.load_digits', ([], {'return_X_y': '(True)'}), '(return_X_y=True)\n', (1734, 1751), False, 'from sklearn import datasets\n'), ((1766, 1790), 'numpy.arange', 'np.arange', (['data.shape[0]'], {}), '(data.shape[0])\n', (1775, 1790), True, 'import numpy as np\n'), ((...
""" University of Minnesota Aerospace Engineering and Mechanics - UAV Lab Copyright 2019 Regents of the University of Minnesota See: LICENSE.md for complete license details Author: <NAME> Analysis for Huginn (mAEWing2) FLT03 and FLT04 """ #%% # Import Libraries import numpy as np import matplotlib.pyplot as plt # H...
[ "Core.AirData.ApplyCalibration", "matplotlib.pyplot.grid", "Core.AirData.Airspeed2NED", "Core.AirDataCalibration.EstCalib", "numpy.array", "Core.OpenData.Decimate", "numpy.linalg.norm", "numpy.repeat", "matplotlib.pyplot.plot", "numpy.asarray", "numpy.linspace", "Core.Loader.Log_RAPTRS", "sy...
[((811, 868), 'sys.path.join', 'path.join', (['"""/home"""', '"""rega0051"""', '"""FlightArchive"""', '"""Huginn"""'], {}), "('/home', 'rega0051', 'FlightArchive', 'Huginn')\n", (820, 868), False, 'from sys import path, argv\n'), ((1059, 1118), 'sys.path.join', 'path.join', (['pathBase', "('Huginn' + flt)", "('Huginn' ...
from setuptools import setup, find_packages setup_requires = [ ] install_requires = [ ] dependency_links = [ ] setup( name='things3-wrapper', version='0.1', description='Things 3 app python wrapper using URL scheme', author='jkkwoen', author_email='<EMAIL>', packages=find_package...
[ "setuptools.find_packages" ]
[((308, 323), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (321, 323), False, 'from setuptools import setup, find_packages\n')]
#! /usr/bin/env python import Queue from Queue import Empty from ListThread import ListThread BUF_SIZE = 10 q = Queue.Queue(BUF_SIZE) class ProducerThread(ListThread): def __init__(self, myself, master, slave, slave_of_slave, master_of_master, logger, settings, name): ListThread.__init__(self, myself, m...
[ "Queue.Queue", "ListThread.ListThread.__init__" ]
[((114, 135), 'Queue.Queue', 'Queue.Queue', (['BUF_SIZE'], {}), '(BUF_SIZE)\n', (125, 135), False, 'import Queue\n'), ((285, 395), 'ListThread.ListThread.__init__', 'ListThread.__init__', (['self', 'myself', 'master', 'slave', 'slave_of_slave', 'master_of_master', 'logger', 'settings', 'name'], {}), '(self, myself, mas...
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import pytest from base64 import b64decode from cs import CloudStackApiException from datetime import datetime, timedelta from time import sleep from exoscale.api import ResourceNotFoundError from exoscale.api.compute import * from .conftest import _random_str class Tes...
[ "base64.b64decode", "time.sleep", "datetime.datetime.now", "pytest.raises", "datetime.timedelta" ]
[((16357, 16371), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (16369, 16371), False, 'from datetime import datetime, timedelta\n'), ((1936, 1972), 'pytest.raises', 'pytest.raises', (['ResourceNotFoundError'], {}), '(ResourceNotFoundError)\n', (1949, 1972), False, 'import pytest\n'), ((2242, 2278), 'pytes...
from optparse import make_option import datetime from django.core.management import BaseCommand from corehq.apps.accounting.tasks import generate_invoices class Command(BaseCommand): help = ("Generate missing invoices based on the given date in YYYY-MM-DD " "format") option_list = BaseCommand.opt...
[ "optparse.make_option" ]
[((341, 431), 'optparse.make_option', 'make_option', (['"""--create"""'], {'action': '"""store_true"""', 'default': '(False)', 'help': '"""Generate invoices"""'}), "('--create', action='store_true', default=False, help=\n 'Generate invoices')\n", (352, 431), False, 'from optparse import make_option\n')]
from collections import defaultdict s = """ 0 <-> 2 1 <-> 1 2 <-> 0, 3, 4 3 <-> 2, 4 4 <-> 2, 3, 6 5 <-> 6 6 <-> 4, 5 """ s = """ 0 <-> 659, 737 1 <-> 1, 1433 2 <-> 982, 1869 3 <-> 306, 380, 1462, 1827 4 <-> 1076 5 <-> 794, 1451 6 <-> 146, 1055 7 <-> 834, 1557 8 <-> 1333 9 <-> 849, 906, 1863 10 <-> 362, 505 11 <-> 33...
[ "collections.defaultdict" ]
[((35624, 35640), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (35635, 35640), False, 'from collections import defaultdict\n')]
from setuptools import setup, find_packages setup( name="elasticfeed", version="0.1.1", include_package_data=True, packages=find_packages(), author="<NAME>", author_email="<EMAIL>", description="Export csv data into Elasticsearch", license="MIT", url="https://github.com/AliyevH/elk_...
[ "setuptools.find_packages" ]
[((141, 156), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (154, 156), False, 'from setuptools import setup, find_packages\n')]
# -*- coding: utf-8 -*- """ Created on Wed Jan 13 19:02:19 2021 @author: <NAME> @version: 1.0.0 """ #%% IMPORTS import os import sys SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(os.path.dirname(SCRIPT_DIR)) from sklearn.pipeline import Pipeline import multiprocessing as mp import numpy as n...
[ "matplotlib.pyplot.ylabel", "sklearn.metrics.auc", "multiprocessing.cpu_count", "sklearn.metrics.roc_curve", "localPkg.datmgmt.DataManager.load_obj", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "matplotlib.pyplot.ylim", "localPkg.preproc.ProcessPipe.mainLoopTest", "sklearn.metrics.confus...
[((700, 717), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (707, 717), False, 'from os.path import dirname, join, abspath\n'), ((876, 899), 'os.path.join', 'join', (['cfpath', '"""saveBin"""'], {}), "(cfpath, 'saveBin')\n", (880, 899), False, 'from os.path import dirname, join, abspath\n'), ((1438,...
from solid import * from math import * from functools import reduce from random import randint import operator # Copyright (c) 2017 <NAME> # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software w...
[ "functools.reduce", "random.randint" ]
[((5576, 5603), 'functools.reduce', 'reduce', (['operator.add', 'items'], {}), '(operator.add, items)\n', (5582, 5603), False, 'from functools import reduce\n'), ((7779, 7806), 'functools.reduce', 'reduce', (['operator.add', 'items'], {}), '(operator.add, items)\n', (7785, 7806), False, 'from functools import reduce\n'...
import sys # TODO # need to normalize this path for Windows users sys.path.insert(0, '../../../NeuralNet') from NeuralNet import NN from ActivationFunction.AF import * trainingPairs = list() testPairs = list() with open('../data/trainingDigits.data', 'r') as trainingDigitsFile: for line in list(trainingDigitsFi...
[ "sys.path.insert", "NeuralNet.NN" ]
[((66, 106), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../../../NeuralNet"""'], {}), "(0, '../../../NeuralNet')\n", (81, 106), False, 'import sys\n'), ((1183, 1273), 'NeuralNet.NN', 'NN', (['[1024, 30, 10]', 'sigmoid', 'sigmoidDiff', '"""../data/digitsWeights-sgd-1024-500-10.data"""'], {}), "([1024, 30, 10], s...
#! /usr/bin/env python ######################################################################## # # # Resums the non-global logarithms, needs ngl_resum.py # # # # If...
[ "numpy.sqrt", "argparse.ArgumentParser", "ngl_resum.FourVector", "ngl_resum.Shower", "ngl_resum.Event", "ngl_resum.OutsideRegion", "numpy.random.seed", "time.time" ]
[((838, 1158), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""This code shows how to use ngl_resum to shower a single dipole aligned with the z-axis, both legs with velocity b. The outside region is defined by the symmetric rapidity gap from -y to y. This code was used to produce some of...
import gc from torch.autograd import Variable import torch import torch.autograd as ag import torch.nn as nn import torch.nn.functional as F import numpy as np from .context import Context from .nested import * class Function(object): def __call__(self, *args, **kwargs): raise NotImplementedError class ...
[ "torch.autograd.grad", "gc.collect" ]
[((4887, 4899), 'gc.collect', 'gc.collect', ([], {}), '()\n', (4897, 4899), False, 'import gc\n'), ((4517, 4559), 'torch.autograd.grad', 'ag.grad', (['[c_phi_z]', '[x]'], {'retain_graph': '(True)'}), '([c_phi_z], [x], retain_graph=True)\n', (4524, 4559), True, 'import torch.autograd as ag\n'), ((4609, 4652), 'torch.aut...