code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
from itertools import chain from django.conf import settings from django.contrib.gis.db import models as gis_models from django.db import models, router, transaction from django.utils import timezone from django.utils.translation import gettext_lazy as _ from ..fields import CleaningJsonField from ..validators import...
[ "django.db.models.Index", "django.db.models.ForeignKey", "django.db.transaction.atomic", "django.utils.translation.gettext_lazy", "django.db.models.BooleanField", "django.utils.timezone.now", "django.db.models.DateTimeField", "django.db.models.CharField" ]
[((646, 742), 'django.db.models.ForeignKey', 'models.ForeignKey', (['EnforcementDomain'], {'on_delete': 'models.PROTECT', 'related_name': '"""permit_areas"""'}), "(EnforcementDomain, on_delete=models.PROTECT, related_name\n ='permit_areas')\n", (663, 742), False, 'from django.db import models, router, transaction\n'...
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Tests for the union find data structure. """ try: from ..unionfind import UnionFind except ValueError: pass def test_unionfind_basics(): """ Test the basic properties of unionfind. """ u = UnionFind([1, 2, 3]) assert u.in_same_set(1,...
[ "pytest.main" ]
[((2112, 2162), 'pytest.main', 'pytest.main', ([], {'args': "['.', '--doctest-modules', '-v']"}), "(args=['.', '--doctest-modules', '-v'])\n", (2123, 2162), False, 'import pytest\n')]
# Generated by Django 3.2.8 on 2021-11-25 17:50 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('products', '0009_auto_20211125_1846'), ] operations = [ migrations.RemoveField( model_name='product', name='updated_at', ...
[ "django.db.migrations.RemoveField" ]
[((228, 291), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""product"""', 'name': '"""updated_at"""'}), "(model_name='product', name='updated_at')\n", (250, 291), False, 'from django.db import migrations\n')]
from builtins import str from builtins import range from builtins import object import logging import inspect import os class CustomAttr(object): """This type handles non-flat data-types like int, str, bool. """ def __init__(self, key, value): self._value = value self._key = key ...
[ "builtins.range", "logging.info", "builtins.str" ]
[((1296, 1323), 'builtins.range', 'range', (['limits[0]', 'limits[1]'], {}), '(limits[0], limits[1])\n', (1301, 1323), False, 'from builtins import range\n'), ((1444, 1531), 'logging.info', 'logging.info', (["('Skipping key: %s, value: %s due tovalidation failure' % (key, value))"], {}), "('Skipping key: %s, value: %s ...
import gdb class TypeCache(object): def __init__(self): self.cache = {} self.intptr_type = False def clear(self): self.cache = {} self.intptr_type = False def get_type(self, typename): if typename in self.cache: return self.cache[typename] ...
[ "gdb.lookup_type", "gdb.parse_and_eval" ]
[((361, 386), 'gdb.lookup_type', 'gdb.lookup_type', (['typename'], {}), '(typename)\n', (376, 386), False, 'import gdb\n'), ((534, 573), 'gdb.parse_and_eval', 'gdb.parse_and_eval', (["('(%s*)0' % typename)"], {}), "('(%s*)0' % typename)\n", (552, 573), False, 'import gdb\n')]
import os import sys directory = sys.argv[1] outfile = open("key_phrases.csv","w") files = {} for filename in os.listdir(directory): text=[] with open(os.path.join(directory, filename)) as f: text=[l.strip() for l in f if len(l.strip())>2] data='' for t in text: if len(t.split()) > 1: data = data+'. '+t.s...
[ "rake.Rake", "os.listdir", "os.path.join", "pprint.PrettyPrinter" ]
[((112, 133), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (122, 133), False, 'import os\n'), ((535, 636), 'rake.Rake', 'rake.Rake', (['"""/home/ashutosh/Sudeshna/RAKE-tutorial/data/stoplists/SmartStoplist.txt"""', '(3)', '(3)', '(1)'], {}), "(\n '/home/ashutosh/Sudeshna/RAKE-tutorial/data/stopl...
"""Unit tests for helper utilities in :mod:`dftinputgen.utils`.""" import os import pytest from ase import io as ase_io from dftinputgen.utils import get_elem_symbol from dftinputgen.utils import read_crystal_structure from dftinputgen.utils import get_kpoint_grid_from_spacing from dftinputgen.utils import DftInputG...
[ "pytest.approx", "dftinputgen.utils.get_elem_symbol", "dftinputgen.utils.read_crystal_structure", "os.path.join", "dftinputgen.utils.get_kpoint_grid_from_spacing", "os.path.dirname", "pytest.raises", "ase.io.read" ]
[((357, 382), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (372, 382), False, 'import os\n'), ((399, 458), 'os.path.join', 'os.path.join', (['test_base_dir', '"""qe"""', '"""files"""', '"""feo_conv.vasp"""'], {}), "(test_base_dir, 'qe', 'files', 'feo_conv.vasp')\n", (411, 458), False, 'impo...
from django.contrib.auth.models import User from django.db.models import (Model, TextField, DateTimeField, ForeignKey, CASCADE) from asgiref.sync import async_to_sync from channels.layers import get_channel_layer from django.db import models import json class MessageModel(Model): ""...
[ "json.loads", "django.db.models.TextField", "django.db.models.ForeignKey", "json.dumps", "channels.layers.get_channel_layer", "django.db.models.DateTimeField", "django.db.models.CharField", "asgiref.sync.async_to_sync" ]
[((443, 545), 'django.db.models.ForeignKey', 'ForeignKey', (['User'], {'on_delete': 'CASCADE', 'verbose_name': '"""user"""', 'related_name': '"""from_user"""', 'db_index': '(True)'}), "(User, on_delete=CASCADE, verbose_name='user', related_name=\n 'from_user', db_index=True)\n", (453, 545), False, 'from django.db.mo...
# -*- coding: utf-8 -*- """CNN.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1Tq6HUya2PrC0SmyOIFo2c_eVtguRED2q """ import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F from torch.utils.data import DataL...
[ "torch.nn.CrossEntropyLoss", "torch.nn.Conv2d", "torch.nn.MaxPool2d", "torch.cuda.is_available", "torch.nn.Linear", "torch.utils.data.DataLoader", "torch.no_grad", "torchvision.transforms.ToTensor", "torch.randn" ]
[((1138, 1166), 'torch.randn', 'torch.randn', (['(64, 1, 28, 28)'], {}), '((64, 1, 28, 28))\n', (1149, 1166), False, 'import torch\n'), ((1482, 1544), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'train_dataset', 'batch_size': '(64)', 'shuffle': '(True)'}), '(dataset=train_dataset, batch_size=64, shuff...
"""Treadmill hierarchical scheduler. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import abc import collections import datetime import heapq import itertools import logging import operator import sys import tim...
[ "logging.getLogger", "itertools.chain", "six.itervalues", "heapq.merge", "numpy.array", "datetime.date.fromtimestamp", "six.moves.xrange", "datetime.timedelta", "numpy.subtract", "numpy.maximum", "six.moves.zip", "six.viewvalues", "numpy.finfo", "datetime.date.today", "time.time", "tim...
[((377, 404), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (394, 404), False, 'import logging\n'), ((551, 594), 'time.mktime', 'time.mktime', (['(2014, 1, 1, 0, 0, 0, 0, 0, 0)'], {}), '((2014, 1, 1, 0, 0, 0, 0, 0, 0))\n', (562, 594), False, 'import time\n'), ((8874, 8904), 'six.add_meta...
import os import sys import re import json import logging import torch from transformers import ( HfArgumentParser, set_seed, AutoTokenizer, AutoConfig, EvalPrediction, ) from src.model.ca_mtl import CaMtl, CaMtlArguments from src.utils.misc import MultiTaskDataArguments, Split from src.mtl_traine...
[ "logging.getLogger", "logging.basicConfig", "src.model.ca_mtl.CaMtl.get_base_model", "transformers.HfArgumentParser", "src.data.mtl_dataset.MultiTaskDataset", "os.path.isdir", "os.path.abspath", "transformers.set_seed", "src.data.task_dataset.TaskDataset" ]
[((480, 507), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (497, 507), False, 'import logging\n'), ((548, 752), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(levelname)s - %(name)s - %(message)s"""', 'datefmt': '"""%m/%d/%Y %H:%M:%S"""', 'level': '(...
#!/usr/bin/env python3 """Read data in CSV format from websocket """ import sys import asyncio import websockets # read url from command line if len(sys.argv) >= 2: uri = sys.argv[1] else: # host url and port uri = "ws://localhost:8314" print("*==* ", sys.argv[0], " Lese Daten von url ", uri) async def ...
[ "asyncio.get_event_loop", "websockets.connect", "sys.exit" ]
[((394, 437), 'websockets.connect', 'websockets.connect', (['uri'], {'ping_interval': 'None'}), '(uri, ping_interval=None)\n', (412, 437), False, 'import websockets\n'), ((978, 1002), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1000, 1002), False, 'import asyncio\n'), ((880, 891), 'sys.exit',...
#! /usr/bin/env python import tensorflow as tf import numpy as np import os import time import datetime from tensorflow.contrib import learn from input_helpers import InputHelper # Parameters # ================================================== # Eval Parameters tf.flags.DEFINE_integer("batch_size", 64, "Batch Size (...
[ "tensorflow.flags.DEFINE_string", "tensorflow.Graph", "tensorflow.ConfigProto", "tensorflow.initialize_all_variables", "numpy.mean", "tensorflow.flags.DEFINE_boolean", "tensorflow.Session", "numpy.concatenate", "tensorflow.flags.DEFINE_integer", "input_helpers.InputHelper" ]
[((265, 334), 'tensorflow.flags.DEFINE_integer', 'tf.flags.DEFINE_integer', (['"""batch_size"""', '(64)', '"""Batch Size (default: 64)"""'], {}), "('batch_size', 64, 'Batch Size (default: 64)')\n", (288, 334), True, 'import tensorflow as tf\n'), ((335, 425), 'tensorflow.flags.DEFINE_string', 'tf.flags.DEFINE_string', (...
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from...
[ "pulumi.get", "pulumi.getter", "pulumi.set", "pulumi.InvokeOptions", "pulumi.runtime.invoke" ]
[((2494, 2524), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""bundleId"""'}), "(name='bundleId')\n", (2507, 2524), False, 'import pulumi\n'), ((2632, 2665), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""directoryId"""'}), "(name='directoryId')\n", (2645, 2665), False, 'import pulumi\n'), ((2886, 2935), 'pul...
#!/usr/bin/env python ############################################################################### # # # manifestManager.py # # ...
[ "hashlib.sha256", "os.listdir", "os.path.getsize", "os.makedirs", "os.path.join", "os.path.splitext", "os.path.isfile", "os.path.isdir", "os.path.abspath", "os.path.islink", "fileEntity.FileEntity" ]
[((3939, 3960), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (3954, 3960), False, 'import os\n'), ((3979, 4008), 'fileEntity.FileEntity', 'FE', (['"""root"""', '"""."""', 'None', '"""-"""', '(0)'], {}), "('root', '.', None, '-', 0)\n", (3981, 4008), True, 'from fileEntity import FileEntity as FE\n'...
import pytest from flask_resty import Api from flask_resty.testing import assert_response # ----------------------------------------------------------------------------- @pytest.fixture(autouse=True) def routes(app): api = Api(app, "/api") api.add_ping("/ping") # ------------------------------------------...
[ "pytest.fixture", "flask_resty.testing.assert_response", "flask_resty.Api" ]
[((175, 203), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (189, 203), False, 'import pytest\n'), ((231, 247), 'flask_resty.Api', 'Api', (['app', '"""/api"""'], {}), "(app, '/api')\n", (234, 247), False, 'from flask_resty import Api\n'), ((430, 460), 'flask_resty.testing.assert_r...
import os from subprocess import call files = ['000002b66c9c498e.jpg', '000002b97e5471a0.jpg', '000002c707c9895e.jpg', '0000048549557964.jpg', '000004f4400f6ec5.jpg', '0000071d71a0a6f6.jpg', '000013ba71c12506.jpg', '000018acd19b4ad3.jpg', '00001bc2c4027449.jpg', '00001bcc92282a38.jpg', '0000201cd362f303.jpg', '0000207...
[ "os.path.exists", "subprocess.call" ]
[((631, 671), 'os.path.exists', 'os.path.exists', (["('train/' + file + '.jpg')"], {}), "('train/' + file + '.jpg')\n", (645, 671), False, 'import os\n'), ((741, 780), 'subprocess.call', 'call', (["['gsutil', 'cp', spath, 'train/']"], {}), "(['gsutil', 'cp', spath, 'train/'])\n", (745, 780), False, 'from subprocess imp...
""" SNMP_FRAMEWORK_MIB """ from collections import OrderedDict from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64 from ydk.filters import YFilter from ydk.errors import YError, YModelError from ydk.errors.error_handler import handle_type_er...
[ "collections.OrderedDict", "ydk.types.YLeaf", "ydk.types.Enum.YLeaf" ]
[((541, 570), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(1)', '"""noAuthNoPriv"""'], {}), "(1, 'noAuthNoPriv')\n", (551, 570), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((589, 616), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['...
from aiogram.utils.markdown import hide_link from aiogram.types import CallbackQuery from loader import dp from utils import ( get_object, get_attributes_of_object ) from keyboards import ( anime_choose_safe_category, anime_sfw_categories, anime_nsfw_categories, animals_categories, ...
[ "keyboards.anime_sfw_categories", "loader.dp.callback_query_handler", "keyboards.control_buttons", "utils.get_object", "keyboards.anime_nsfw_categories", "aiogram.utils.markdown.hide_link", "keyboards.animals_categories", "utils.get_attributes_of_object", "keyboards.anime_choose_safe_category", "k...
[((377, 415), 'loader.dp.callback_query_handler', 'dp.callback_query_handler', ([], {'text': '"""menu"""'}), "(text='menu')\n", (402, 415), False, 'from loader import dp\n'), ((798, 837), 'loader.dp.callback_query_handler', 'dp.callback_query_handler', ([], {'text': '"""anime"""'}), "(text='anime')\n", (823, 837), Fals...
#!c:\users\hooma\documents\github\spinesegmentation\segmentation_test\scripts\python.exe """ Execute a graph cut on a voxel image based on some foreground and background markers. Copyright (C) 2013 <NAME> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Publi...
[ "os.path.exists", "medpy.io.load", "argparse.ArgumentParser", "scipy.zeros", "medpy.core.Logger.getInstance", "medpy.graphcut.wrapper.split_marker", "medpy.io.header.get_pixel_spacing", "medpy.graphcut.graph_from_voxels", "medpy.core.ArgumentError" ]
[((3039, 3059), 'medpy.core.Logger.getInstance', 'Logger.getInstance', ([], {}), '()\n', (3057, 3059), False, 'from medpy.core import ArgumentError, Logger\n'), ((5157, 5179), 'medpy.io.load', 'load', (['args.badditional'], {}), '(args.badditional)\n', (5161, 5179), False, 'from medpy.io import load, save, header\n'), ...
author = '<NAME>' email = '<EMAIL>' project = 'Astronaut Training Program' description = 'Astronaut Training Program' extensions = [ 'sphinx.ext.todo', 'sphinx.ext.imgmath', ] todo_emit_warnings = False todo_include_todos = True exclude_patterns = [] # --------------------------------------------------------...
[ "subprocess.Popen", "os.path.dirname", "datetime.datetime.now", "re.sub", "sys.path.append" ]
[((1499, 1528), 're.sub', 're.sub', (['"""[\\\\W]+"""', '""""""', 'project'], {}), "('[\\\\W]+', '', project)\n", (1505, 1528), False, 'import re\n'), ((1664, 1678), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1676, 1678), False, 'from datetime import datetime\n'), ((1908, 1939), 'sys.path.append', 'sys...
from __future__ import absolute_import, division, print_function, unicode_literals import tensorflow as tf from keras import regularizers from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D from tensorflow.keras.preprocessing.image import Ima...
[ "matplotlib.pyplot.imshow", "os.listdir", "tensorflow.keras.layers.Conv2D", "tensorflow.keras.layers.MaxPooling2D", "tensorflow.keras.layers.Flatten", "matplotlib.pyplot.plot", "os.path.join", "tensorflow.keras.preprocessing.image.ImageDataGenerator", "keras.preprocessing.image.load_img", "tensorf...
[((834, 861), 'os.path.join', 'os.path.join', (['PATH', '"""train"""'], {}), "(PATH, 'train')\n", (846, 861), False, 'import os\n'), ((879, 911), 'os.path.join', 'os.path.join', (['PATH', '"""validation"""'], {}), "(PATH, 'validation')\n", (891, 911), False, 'import os\n'), ((923, 949), 'os.path.join', 'os.path.join', ...
#client.py #!/usr/bin/python # This is client.py file import socket # Import socket module s = socket.socket() # Create a socket object host = socket.gethostname() # Get local machine name port = 12352 ...
[ "socket.gethostname", "socket.socket" ]
[((162, 177), 'socket.socket', 'socket.socket', ([], {}), '()\n', (175, 177), False, 'import socket\n'), ((238, 258), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (256, 258), False, 'import socket\n')]
"""Consts for Kaiterra integration.""" from datetime import timedelta from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER, CONCENTRATION_PARTS_PER_BILLION, CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, Platform, ) DOMAIN = "kaite...
[ "datetime.timedelta" ]
[((1666, 1687), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(30)'}), '(seconds=30)\n', (1675, 1687), False, 'from datetime import timedelta\n')]
from datetime import datetime from werkzeug.security import generate_password_hash from flask import Blueprint, jsonify, request from sqlalchemy.orm import joinedload from flask_login import login_required from app.models import db, User, Type from app.forms import UpdateUserForm from .auth_routes import authenticate, ...
[ "app.models.db.session.delete", "app.models.db.session.query", "app.forms.UpdateUserForm", "datetime.datetime.now", "werkzeug.security.generate_password_hash", "sqlalchemy.orm.joinedload", "app.models.db.session.commit", "app.models.User.query.get", "flask.Blueprint", "flask.jsonify" ]
[((371, 399), 'flask.Blueprint', 'Blueprint', (['"""users"""', '__name__'], {}), "('users', __name__)\n", (380, 399), False, 'from flask import Blueprint, jsonify, request\n'), ((773, 791), 'app.models.User.query.get', 'User.query.get', (['id'], {}), '(id)\n', (787, 791), False, 'from app.models import db, User, Type\n...
"""Machine Learning""" import importlib import numpy as np import pandas as pd import json from jsonschema import validate from sklearn.pipeline import make_pipeline from timeflux.core.node import Node from timeflux.core.exceptions import ValidationError, WorkerInterrupt from timeflux.helpers.background import Task fr...
[ "timeflux.core.exceptions.WorkerInterrupt", "timeflux.helpers.clock.max_time", "pandas.infer_freq", "numpy.array", "timeflux.helpers.clock.now", "timeflux.core.exceptions.ValidationError", "pandas.date_range", "timeflux.helpers.background.Task", "timeflux.helpers.port.make_event", "numpy.asarray",...
[((3357, 3382), 'pandas.Timedelta', 'pd.Timedelta', (['buffer_size'], {}), '(buffer_size)\n', (3369, 3382), True, 'import pandas as pd\n'), ((8422, 8455), 'numpy.array', 'np.array', (['[]'], {'dtype': 'np.datetime64'}), '([], dtype=np.datetime64)\n', (8430, 8455), True, 'import numpy as np\n'), ((10524, 10576), 'sklear...
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-01-22 15:45 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('content', '0002_auto_20170122_1509'), ('cms', '0005...
[ "django.db.models.OneToOneField" ]
[((487, 637), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""posts1_imagen"""', 'to': '"""content.ImageContent"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, re...
from pug.nlp.db import representation from django.db import models class RepresentationMixin(models.Model): """Produce a meaningful string representation of a model with `str(model.objects.all[0])`.""" __unicode__ = representation class Meta: abstract = True class DateMixin(models.Model): ""...
[ "django.db.models.DateTimeField" ]
[((479, 514), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (499, 514), False, 'from django.db import models\n'), ((529, 568), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (549, 568), F...
from django.shortcuts import render from django.views.generic.edit import FormView from django.views.generic.edit import View from . import forms # Опять же, спасибо django за готовую форму аутентификации. from django.contrib.auth.forms import AuthenticationForm from django.contrib.auth import logout from django...
[ "django.shortcuts.render", "django.contrib.auth.forms.AuthenticationForm", "django.contrib.auth.login", "django.contrib.auth.logout" ]
[((698, 730), 'django.contrib.auth.forms.AuthenticationForm', 'AuthenticationForm', (['request.POST'], {}), '(request.POST)\n', (716, 730), False, 'from django.contrib.auth.forms import AuthenticationForm\n'), ((749, 828), 'django.shortcuts.render', 'render', (['request', '"""MainApp/homepage.html"""', "{'form': form1,...
""" Django settings for imagetagger project. Generated by 'django-admin startproject' using Django 1.10.3. For more information on this file, see https://docs.djangoproject.com/en/1.10/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.10/ref/settings/ """ impor...
[ "os.path.abspath", "os.path.join" ]
[((3683, 3713), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""data"""'], {}), "(BASE_DIR, 'data')\n", (3695, 3713), False, 'import os\n'), ((3728, 3760), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""images"""'], {}), "(BASE_DIR, 'images')\n", (3740, 3760), False, 'import os\n'), ((499, 524), 'os.path.abspath',...
#encoding:utf-8 from utils import weighted_random_subreddit t_channel = '@news756' subreddit = weighted_random_subreddit({ 'politics': 0.5, 'news': 0.5 }) def send_post(submission, r2t): return r2t.send_simple(submission, text='{title}\n\n{self_text}\n\n/r/{subreddit_name}\n{short_link}', ...
[ "utils.weighted_random_subreddit" ]
[((98, 155), 'utils.weighted_random_subreddit', 'weighted_random_subreddit', (["{'politics': 0.5, 'news': 0.5}"], {}), "({'politics': 0.5, 'news': 0.5})\n", (123, 155), False, 'from utils import weighted_random_subreddit\n')]
''' This module contains the classes which represent XCB data types. ''' from xcbgen.expr import Field, Expression import __main__ class Type(object): ''' Abstract base class for all XCB data types. Contains default fields, and some abstract methods. ''' def __init__(self, name): ''' ...
[ "xcbgen.expr.Expression", "xcbgen.expr.Field" ]
[((2353, 2409), 'xcbgen.expr.Field', 'Field', (['self', 'field_type', 'field_name', 'visible', 'wire', 'auto'], {}), '(self, field_type, field_name, visible, wire, auto)\n', (2358, 2409), False, 'from xcbgen.expr import Field, Expression\n'), ((17293, 17349), 'xcbgen.expr.Field', 'Field', (['self', 'field_type', 'field...
# Copyright 2020 by <NAME>, Solis-Lemus Lab, WID. # All rights reserved. # This file is part of the BioKlustering Website. import pandas as pd from Bio import SeqIO from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.cluster import KMeans from sklearn.decomposition import PCA from sklearn.cluster ...
[ "pandas.Series", "sklearn.cluster.KMeans", "sklearn.decomposition.PCA", "os.path.join", "warnings.catch_warnings", "numpy.array", "sklearn.feature_extraction.text.TfidfVectorizer", "warnings.simplefilter", "Bio.SeqIO.parse", "pandas.DataFrame", "sklearn.cluster.MeanShift", "sklearn.preprocessi...
[((546, 563), 'pandas.DataFrame', 'pd.DataFrame', (['[d]'], {}), '([d])\n', (558, 563), True, 'import pandas as pd\n'), ((573, 602), 'pandas.Series', 'pd.Series', (['d'], {'name': '"""Sequence"""'}), "(d, name='Sequence')\n", (582, 602), True, 'import pandas as pd\n'), ((658, 673), 'pandas.DataFrame', 'pd.DataFrame', (...
# -*- coding: utf-8 -*- from src import icons, __version__ from src.actions import HOST_URL from src.actions.configure import ConfigureWorkflowAction from src.actions.help import HelpWorkflowAction from src.actions.index import IndexWorkflowAction from src.actions.projects import ProjectWorkflowAction from src.actions....
[ "src.util.workflow", "src.util.call_alfred" ]
[((992, 1028), 'src.util.call_alfred', 'call_alfred', (['"""stash:config sethost """'], {}), "('stash:config sethost ')\n", (1003, 1028), False, 'from src.util import workflow, call_alfred\n'), ((1498, 1508), 'src.util.workflow', 'workflow', ([], {}), '()\n', (1506, 1508), False, 'from src.util import workflow, call_al...
from itertools import count from _pandigital_tools import is_pandigital def pand_products(): """ Returns the sum of all numbers n which have a factorization a * b = n such that a, b, n are (cumulatively) 1 through 9 pandigital. """ total = set() for a in range(2, 100): for b in count(...
[ "itertools.count", "_pandigital_tools.is_pandigital" ]
[((314, 322), 'itertools.count', 'count', (['a'], {}), '(a)\n', (319, 322), False, 'from itertools import count\n'), ((417, 443), '_pandigital_tools.is_pandigital', 'is_pandigital', (['a', 'b', '(a * b)'], {}), '(a, b, a * b)\n', (430, 443), False, 'from _pandigital_tools import is_pandigital\n')]
from django.db import models from openstates.data.models import Bill class LegacyBillMapping(models.Model): legacy_id = models.CharField(max_length=20, primary_key=True) bill = models.ForeignKey( Bill, related_name="legacy_mapping", on_delete=models.CASCADE )
[ "django.db.models.CharField", "django.db.models.ForeignKey" ]
[((126, 175), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'primary_key': '(True)'}), '(max_length=20, primary_key=True)\n', (142, 175), False, 'from django.db import models\n'), ((187, 272), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Bill'], {'related_name': '"""legacy_mapp...
from __future__ import print_function from troposphere import ( Template, Parameter, Ref, Condition, Equals, And, Or, Not, If ) from troposphere import ec2 parameters = { "One": Parameter( "One", Type="String", ), "Two": Parameter( "Two", Type="String", ), "Thr...
[ "troposphere.Parameter", "troposphere.Ref", "troposphere.If", "troposphere.Condition", "troposphere.Template" ]
[((2017, 2027), 'troposphere.Template', 'Template', ([], {}), '()\n', (2025, 2027), False, 'from troposphere import Template, Parameter, Ref, Condition, Equals, And, Or, Not, If\n'), ((189, 220), 'troposphere.Parameter', 'Parameter', (['"""One"""'], {'Type': '"""String"""'}), "('One', Type='String')\n", (198, 220), Fal...
# (c) Copyright IBM Corporation 2020. # LICENSE: Apache License 2.0 (Apache-2.0) # http://www.apache.org/licenses/LICENSE-2.0 import logging from lrtc_lib.data_access import single_dataset_loader from lrtc_lib.data_access.processors.dataset_part import DatasetPart from lrtc_lib.oracle_data_access import gold_labels_...
[ "logging.basicConfig", "lrtc_lib.data_access.single_dataset_loader.load_dataset", "lrtc_lib.data_access.single_dataset_loader.clear_all_saved_files", "lrtc_lib.oracle_data_access.gold_labels_loader.clear_gold_labels_file", "lrtc_lib.oracle_data_access.gold_labels_loader.load_gold_labels", "logging.info" ]
[((328, 448), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s"""'}), "(level=logging.INFO, format=\n '%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s')\n", (347, 448), False, 'import logging...
from .component import Component, using_scope import tensorflow.compat.v1 as tf tf.disable_v2_behavior() class EmbeddingLayer(Component): def __init__(self, input_size, output_size, name='embedding'): Component.__init__(self, name=name) self.input_size = input_size self.output_size = out...
[ "tensorflow.compat.v1.disable_v2_behavior", "tensorflow.compat.v1.nn.embedding_lookup", "tensorflow.compat.v1.get_variable" ]
[((80, 104), 'tensorflow.compat.v1.disable_v2_behavior', 'tf.disable_v2_behavior', ([], {}), '()\n', (102, 104), True, 'import tensorflow.compat.v1 as tf\n'), ((577, 625), 'tensorflow.compat.v1.nn.embedding_lookup', 'tf.nn.embedding_lookup', (['self.embedding_matrix', 'x'], {}), '(self.embedding_matrix, x)\n', (599, 62...
''' Copyright (C) 2021 CG Cookie http://cgcookie.com <EMAIL> Created by <NAME>, <NAME> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your op...
[ "os.path.exists", "inspect.getsourcefile", "re.compile", "inspect.currentframe", "os.path.join", "functools.wraps", "os.path.basename", "os.path.abspath", "time.time" ]
[((1455, 1497), 'os.path.basename', 'os.path.basename', (['frame.f_code.co_filename'], {}), '(frame.f_code.co_filename)\n', (1471, 1497), False, 'import os\n'), ((1379, 1401), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (1399, 1401), False, 'import inspect\n'), ((2087, 2098), 'time.time', 'time.ti...
import numpy as np import pytest from pandas import ( DataFrame, Series, concat, ) import pandas._testing as tm @pytest.mark.parametrize("func", ["cov", "corr"]) def test_ewm_pairwise_cov_corr(func, frame): result = getattr(frame.ewm(span=10, min_periods=5), func)() result = result.loc[(slice(Non...
[ "pandas.Series", "pandas._testing.assert_series_equal", "pytest.mark.parametrize", "pandas._testing.assert_equal", "pytest.raises", "numpy.isnan", "pandas.DataFrame", "numpy.random.randn", "numpy.arange" ]
[((128, 176), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""func"""', "['cov', 'corr']"], {}), "('func', ['cov', 'corr'])\n", (151, 176), False, 'import pytest\n'), ((520, 568), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""name"""', "['cov', 'corr']"], {}), "('name', ['cov', 'corr'])\n", (5...
# TODO finish implementing query import math from pyspark import SparkContext # from genex.cluster import sim_between_seq from brainex.op.query_op import sim_between_seq from brainex.parse import strip_function, remove_trailing_zeros from .classes import Sequence from brainex.database import genexengine def query(...
[ "brainex.op.query_op.sim_between_seq", "brainex.parse.strip_function" ]
[((6638, 6692), 'brainex.op.query_op.sim_between_seq', 'sim_between_seq', (['query_sequence', 'cluster_sequence.data'], {}), '(query_sequence, cluster_sequence.data)\n', (6653, 6692), False, 'from brainex.op.query_op import sim_between_seq\n'), ((1456, 1473), 'brainex.parse.strip_function', 'strip_function', (['x'], {}...
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. from typing import Callable, Dict import torch import torch.nn as nn from nni.retiarii import model_wrapper from nni.retiarii.nn.pytorch import NasBench201Cell __all__ = ['NasBench201'] OPS_WITH_STRIDE = { 'none': lambda C_in, C_out, st...
[ "torch.nn.BatchNorm2d", "torch.nn.ReLU", "torch.nn.ModuleList", "torch.nn.Conv2d", "torch.nn.MaxPool2d", "torch.nn.AdaptiveAvgPool2d", "torch.nn.Linear", "torch.nn.AvgPool2d", "torch.nn.ConstantPad2d", "torch.nn.Identity", "nni.retiarii.nn.pytorch.NasBench201Cell" ]
[((3476, 3498), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(False)'}), '(inplace=False)\n', (3483, 3498), True, 'import torch.nn as nn\n'), ((3909, 3930), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['C_out'], {}), '(C_out)\n', (3923, 3930), True, 'import torch.nn as nn\n'), ((6097, 6112), 'torch.nn.ModuleList', 'nn....
#!/usr/bin/env python3 import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as f: long_description = f.read() setup( name='dialog_py', version='1.0a1', description='Python API for cdialog/linux dialog', long_description=long_de...
[ "os.path.dirname", "setuptools.setup" ]
[((187, 1082), 'setuptools.setup', 'setup', ([], {'name': '"""dialog_py"""', 'version': '"""1.0a1"""', 'description': '"""Python API for cdialog/linux dialog"""', 'long_description': 'long_description', 'url': '"""https://github.com/pasha13666/dialog_py"""', 'author': '"""Pasha__kun"""', 'author_email': '"""<EMAIL>"""'...
# -*- coding: utf-8 -*- from __future__ import unicode_literals import unittest import nose from nose.tools import * from whoswho import who, config from nameparser.config.titles import TITLES as NAMEPARSER_TITLES class TestMatch(unittest.TestCase): def setUp(self): self.name = '<NAME>' def test...
[ "whoswho.who.match", "nose.main", "whoswho.who.ratio" ]
[((7315, 7326), 'nose.main', 'nose.main', ([], {}), '()\n', (7324, 7326), False, 'import nose\n'), ((505, 530), 'whoswho.who.match', 'who.match', (['name', '"""<NAME>"""'], {}), "(name, '<NAME>')\n", (514, 530), False, 'from whoswho import who, config\n'), ((552, 577), 'whoswho.who.match', 'who.match', (['name', '"""<N...
import json from flask import request from flask_restful import Resource, abort, reqparse from models.User import User """ POST Creates a new resource. GET Retrieves a resource. PUT Updates an existing resource. DELETE Deletes a resource. """ class UserEndpoint(Resource)...
[ "models.User.User.objects.get", "flask_restful.reqparse.RequestParser", "models.User.User", "flask.request.get_json", "flask_restful.abort" ]
[((354, 372), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (370, 372), False, 'from flask import request\n'), ((1006, 1072), 'models.User.User', 'User', ([], {'name': 'name', 'username': 'username', 'email': 'email', 'password': 'password'}), '(name=name, username=username, email=email, password=pass...
import datetime from enum import Enum from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union, cast import attr import ciso8601 import structlog from attr import converters from . import enums from .utils import as_json_dict, to_snake_case logger = structlog.get_logger() class Omitted(Enum): ...
[ "structlog.get_logger", "attr.attrs", "attr.converters.optional", "attr.attrib", "typing.cast", "typing.TypeVar" ]
[((273, 295), 'structlog.get_logger', 'structlog.get_logger', ([], {}), '()\n', (293, 295), False, 'import structlog\n'), ((645, 679), 'typing.TypeVar', 'TypeVar', (['"""U"""'], {'bound': '"""BaseAAEntity"""'}), "('U', bound='BaseAAEntity')\n", (652, 679), False, 'from typing import Any, Callable, Dict, List, Optional,...
from datetime import datetime from jsonschema_serialize_fork import NO_DEFAULT from pyramid.security import effective_principals from pyramid.threadlocal import get_current_request from string import ( digits, ascii_uppercase, ) import random import uuid from snovault.schema_utils import server_default A...
[ "pyramid.threadlocal.get_current_request", "random.choice", "datetime.datetime.utcnow", "pyramid.security.effective_principals", "uuid.uuid4", "pyramid.path.DottedNameResolver" ]
[((766, 787), 'pyramid.threadlocal.get_current_request', 'get_current_request', ([], {}), '()\n', (785, 787), False, 'from pyramid.threadlocal import get_current_request\n'), ((805, 834), 'pyramid.security.effective_principals', 'effective_principals', (['request'], {}), '(request)\n', (825, 834), False, 'from pyramid....
import unittest import os import json import requests import requests_mock from ioapi import api_url, IOService, AuthorizationError, UnexpectedResponseCodeError class APIAccountStateTestCase(unittest.TestCase): def setUp(self): self.service = IOService() @requests_mock.mock() def test_account_st...
[ "os.path.dirname", "requests_mock.mock", "ioapi.IOService" ]
[((276, 296), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (294, 296), False, 'import requests_mock\n'), ((570, 590), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (588, 590), False, 'import requests_mock\n'), ((1088, 1108), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', ...
''' fake posts to bootstrap a development database. Put any interesting cases useful for development in here. ''' from datetime import datetime POST_DATA_1 = [ { "created" : datetime(2015, 10, 1), "published": datetime(2015, 10, 1), "edited": datetime(2015, 10, 1), "rende...
[ "datetime.datetime" ]
[((197, 218), 'datetime.datetime', 'datetime', (['(2015)', '(10)', '(1)'], {}), '(2015, 10, 1)\n', (205, 218), False, 'from datetime import datetime\n'), ((241, 262), 'datetime.datetime', 'datetime', (['(2015)', '(10)', '(1)'], {}), '(2015, 10, 1)\n', (249, 262), False, 'from datetime import datetime\n'), ((285, 306), ...
from __future__ import absolute_import from django.shortcuts import render import simplejson import datetime from django.http import HttpResponse class GenericItemBase(object): ITEM_ATTRS = [] def __init__(self, identifier): self.identifier = identifier def jsonify(self, value): """ ...
[ "simplejson.dumps", "django.shortcuts.render" ]
[((1371, 1393), 'simplejson.dumps', 'simplejson.dumps', (['item'], {}), '(item)\n', (1387, 1393), False, 'import simplejson\n'), ((1548, 1591), 'django.shortcuts.render', 'render', (['self.TEMPLATE', "{'item': self._item}"], {}), "(self.TEMPLATE, {'item': self._item})\n", (1554, 1591), False, 'from django.shortcuts imp...
import math import warnings import numpy as np import pandas as pd from scipy.optimize import minimize import scipy.stats from scipy.stats import norm # edit from scipy.special import log_ndtr from sklearn.linear_model import LinearRegression from sklearn.metrics import mean_squared_error, mean_absolute_error def sp...
[ "scipy.special.log_ndtr", "numpy.sqrt", "numpy.squeeze", "numpy.append", "numpy.exp", "numpy.dot", "numpy.square", "numpy.concatenate", "numpy.finfo", "warnings.warn", "sklearn.linear_model.LinearRegression", "numpy.var" ]
[((3180, 3214), 'numpy.append', 'np.append', (['beta_jac', '(sigma_jac / s)'], {}), '(beta_jac, sigma_jac / s)\n', (3189, 3214), True, 'import numpy as np\n'), ((443, 533), 'warnings.warn', 'warnings.warn', (['"""No censored observations; use regression methods for uncensored data"""'], {}), "(\n 'No censored observ...
import os import sys from conda_build import api from conda_build import render import pytest def test_output_with_noarch_says_noarch(testing_metadata): testing_metadata.meta['build']['noarch'] = 'python' output = api.get_output_file_path(testing_metadata) assert os.path.sep + "noarch" + os.path.sep in o...
[ "conda_build.render._simplify_to_exact_constraints", "conda_build.render.get_pin_from_build", "conda_build.api.get_output_file_path" ]
[((225, 267), 'conda_build.api.get_output_file_path', 'api.get_output_file_path', (['testing_metadata'], {}), '(testing_metadata)\n', (249, 267), False, 'from conda_build import api\n'), ((469, 511), 'conda_build.api.get_output_file_path', 'api.get_output_file_path', (['testing_metadata'], {}), '(testing_metadata)\n', ...
#Writing MOOG parameter file for the parameter, abundance, and error calculations. #The parameter file only needs to be written once, at beginning of the routine, because the output #files are overwritten with each itereation of the routine, only minimal output data are needed. # #The user can choose to have the param...
[ "numpy.array", "numpy.core.records.fromarrays" ]
[((4833, 4850), 'numpy.array', 'np.array', (['new_arr'], {}), '(new_arr)\n', (4841, 4850), True, 'import numpy as np\n'), ((4881, 4931), 'numpy.core.records.fromarrays', 'np.core.records.fromarrays', (['new_arr.T'], {'dtype': 'dtype'}), '(new_arr.T, dtype=dtype)\n', (4907, 4931), True, 'import numpy as np\n')]
#coding=utf-8 from __future__ import division """ # OVERLAY UFOS For anyone looking in here, sorry the code is so messy. This is a standalone version of a script with a lot of dependencies. """ import os from AppKit import * #@PydevCodeAnalysisIgnore from vanilla import * #@PydevCodeAnalysisIgnore from mojo.drawingT...
[ "builtins.chr", "mojo.extensions.setExtensionDefaultColor", "defconAppKit.windows.baseWindow.BaseWindowController.windowCloseCallback", "mojo.extensions.getExtensionDefaultColor", "builtins.str", "mojo.extensions.getExtensionDefault", "lib.tools.drawing.strokePixelPath", "mojo.events.removeObserver", ...
[((1594, 1629), 'mojo.events.addObserver', 'addObserver', (['target', 'method', 'action'], {}), '(target, method, action)\n', (1605, 1629), False, 'from mojo.events import addObserver, removeObserver\n'), ((1693, 1731), 'mojo.events.removeObserver', 'removeObserver', (['target', 'method', 'action'], {}), '(target, meth...
import requests import json import datetime import sys from dateutil.parser import parse as to_datetime try: import pandas as pd except: pass from pyteamup.utils.utilities import * from pyteamup.utils.constants import * from pyteamup.Event import Event class Calendar: def __init__(self, cal_id, api_key):...
[ "pandas.DataFrame.from_records", "pandas.Series", "json.loads", "requests.post", "dateutil.parser.parse", "json.dumps", "pyteamup.Event.Event", "requests.get", "datetime.timedelta", "datetime.date.today" ]
[((3970, 4023), 'requests.get', 'requests.get', (['(self._event_collection_url + parameters)'], {}), '(self._event_collection_url + parameters)\n', (3982, 4023), False, 'import requests\n'), ((4553, 4630), 'requests.post', 'requests.post', (['self._event_collection_url'], {'data': 'payload', 'headers': 'POST_HEADERS'})...
from django.conf.urls import patterns, include, url urlpatterns = patterns('reports.views', url(r'^index/*$', 'index'), url(r'^dashboard/*$', 'dashboard'), url(r'^$', 'index'), url(r'^detail/(?P<serial>[^/]+)$', 'detail'), url(r'^detailpkg/(?P<serial>[^/]+)/(?P<manifest_name>[^/]+)$', 'detail_pkg')...
[ "django.conf.urls.url" ]
[((97, 122), 'django.conf.urls.url', 'url', (['"""^index/*$"""', '"""index"""'], {}), "('^index/*$', 'index')\n", (100, 122), False, 'from django.conf.urls import patterns, include, url\n'), ((129, 162), 'django.conf.urls.url', 'url', (['"""^dashboard/*$"""', '"""dashboard"""'], {}), "('^dashboard/*$', 'dashboard')\n",...
#!/usr/bin/env python # -*- coding: utf-8 -*- import time import multiprocessing import pytest import socket import signal import os import logging try: from urllib2 import URLError, urlopen except ImportError: from urllib.error import URLError from urllib.request import urlopen from flask import _request...
[ "os.kill", "socket.socket", "multiprocessing.Process", "time.sleep", "pytest.fixture", "logging.error" ]
[((3608, 3640), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (3622, 3640), False, 'import pytest\n'), ((5242, 5298), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['application/json', 'text/html']"}), "(params=['application/json', 'text/html'])\n", (5256, 5298), ...
from .exceptions import ( ServerResponseError, InternalServerError, NonXMLResponseError, EndpointUnavailableError, ) from functools import wraps from xml.etree.ElementTree import ParseError from ..query import QuerySet import logging try: from distutils2.version import NormalizedVersion as Version ...
[ "logging.getLogger", "distutils.version.LooseVersion", "warnings.warn", "functools.wraps" ]
[((408, 445), 'logging.getLogger', 'logging.getLogger', (['"""tableau.endpoint"""'], {}), "('tableau.endpoint')\n", (425, 445), False, 'import logging\n'), ((5527, 5538), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (5532, 5538), False, 'from functools import wraps\n'), ((6540, 6551), 'functools.wraps', 'wra...
import unittest from freeplane_importer.importer import Importer from mock import Mock from mock import MagicMock from mock import call from freeplane_importer.model_not_found_exception import ModelNotFoundException class TestImporter(unittest.TestCase): def setUp(self): self.mock_collection = Mock() ...
[ "mock.Mock", "mock.call", "mock.MagicMock", "freeplane_importer.importer.Importer" ]
[((311, 317), 'mock.Mock', 'Mock', ([], {}), '()\n', (315, 317), False, 'from mock import Mock\n'), ((345, 356), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (354, 356), False, 'from mock import MagicMock\n'), ((457, 468), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (466, 468), False, 'from mock import MagicMo...
import argparse import numpy as np from scipy.stats import linregress import matplotlib.pyplot as plt parser = argparse.ArgumentParser() parser.add_argument("--plot", action="store_const", default=False, const=True) args = parser.parse_args() data = np.loadtxt("../data/data.csv", skiprows=1, usecols=list(range(1,8)),...
[ "scipy.stats.linregress", "numpy.mean", "argparse.ArgumentParser", "matplotlib.pyplot.gca", "numpy.log", "matplotlib.pyplot.plot", "numpy.exp", "numpy.sum", "matplotlib.pyplot.show" ]
[((112, 137), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (135, 137), False, 'import argparse\n'), ((435, 449), 'numpy.log', 'np.log', (['deaths'], {}), '(deaths)\n', (441, 449), True, 'import numpy as np\n'), ((487, 515), 'scipy.stats.linregress', 'linregress', (['xdays', 'logdeaths'], {}),...
import sys import os import re import json import pickle import tempfile import itertools import contextlib import xml.etree.cElementTree as cET from copy import deepcopy from xml.etree import ElementTree as ET from pprint import pformat from .constants import ( MARKER_COLOR, MARKER_DURATION, MARKER_NAME, ...
[ "xml.etree.ElementTree.fromstringlist", "re.compile", "xml.etree.cElementTree.ElementTree", "sys.platform.startswith", "copy.deepcopy", "flame.execute_shortcut", "os.remove", "openpype.api.run_subprocess", "flame.duplicate", "adsk.libwiretapPythonClientAPI.WireTapStr", "json.dumps", "os.path.i...
[((403, 439), 'openpype.api.Logger.get_logger', 'openpype.Logger.get_logger', (['__name__'], {}), '(__name__)\n', (429, 439), True, 'import openpype.api as openpype\n'), ((457, 488), 're.compile', 're.compile', (['"""[\\\\._](\\\\d+)[\\\\.]"""'], {}), "('[\\\\._](\\\\d+)[\\\\.]')\n", (467, 488), False, 'import re\n'), ...
import sys import numpy as np import pandas as pd import matplotlib.pyplot as plt import seaborn as sns if len(sys.argv) != 3: print('usage: python plot_performances.py <group_csv> <indiv_csv>') exit() group_file = sys.argv[1] indiv_file = sys.argv[2] # Load the data df_group = pd.read_csv(group_file) df_i...
[ "seaborn.set", "matplotlib.pyplot.savefig", "pandas.read_csv", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.gca", "matplotlib.pyplot.xlabel", "seaborn.boxplot", "matplotlib.pyplot.axhline", "matplotlib.pyplot.figure", "matplotlib.pyplot.tight_layout", "pandas.concat", "numpy.arange", "matp...
[((292, 315), 'pandas.read_csv', 'pd.read_csv', (['group_file'], {}), '(group_file)\n', (303, 315), True, 'import pandas as pd\n'), ((327, 350), 'pandas.read_csv', 'pd.read_csv', (['indiv_file'], {}), '(indiv_file)\n', (338, 350), True, 'import pandas as pd\n'), ((356, 398), 'pandas.concat', 'pd.concat', (['[df_group, ...
from collections import OrderedDict import pytest import vcr try: # Python 2.7 # requests's ``json()`` function returns strings as unicode (as per the # JSON spec). In 2.7, those are of type unicode rather than str. basestring # was created to help with that. # https://docs.python.org/2/library/func...
[ "vcr.use_cassette" ]
[((524, 571), 'vcr.use_cassette', 'vcr.use_cassette', (['"""public_API/europe_show.yaml"""'], {}), "('public_API/europe_show.yaml')\n", (540, 571), False, 'import vcr\n'), ((783, 830), 'vcr.use_cassette', 'vcr.use_cassette', (['"""public_API/europe_show.yaml"""'], {}), "('public_API/europe_show.yaml')\n", (799, 830), F...
import os import sys import numpy as np import matplotlib.pyplot as plt import flopy def run(): workspace = os.path.join("lake") # make sure workspace directory exists if not os.path.exists(workspace): os.makedirs(workspace) fext = "png" narg = len(sys.argv) iarg = 0 if narg > 1:...
[ "os.path.exists", "numpy.ones", "flopy.modflow.ModflowPcg", "os.makedirs", "matplotlib.pyplot.clabel", "matplotlib.pyplot.gcf", "os.path.join", "flopy.modflow.ModflowDis", "os.getcwd", "os.chdir", "numpy.linspace", "flopy.modflow.ModflowBas", "numpy.savetxt", "flopy.modflow.ModflowLpf", ...
[((115, 135), 'os.path.join', 'os.path.join', (['"""lake"""'], {}), "('lake')\n", (127, 135), False, 'import os\n'), ((526, 537), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (535, 537), False, 'import os\n'), ((581, 600), 'os.chdir', 'os.chdir', (['workspace'], {}), '(workspace)\n', (589, 600), False, 'import os\n'), (...
# -*- coding: utf-8 -*- ''' Documentación sobre clustering en Python: http://scikit-learn.org/stable/modules/clustering.html http://www.learndatasci.com/k-means-clustering-algorithms-python-intro/ http://hdbscan.readthedocs.io/en/latest/comparing_clustering_algorithms.html https://joernhees.de...
[ "pandas.read_csv", "sklearn.cluster.MeanShift", "sklearn.cluster.DBSCAN", "seaborn.set", "sklearn.cluster.AgglomerativeClustering", "seaborn.color_palette", "scipy.cluster.hierarchy.ward", "pandas.DataFrame", "prettytable.PrettyTable", "seaborn.clustermap", "seaborn.heatmap", "time.time", "s...
[((751, 760), 'seaborn.set', 'sns.set', ([], {}), '()\n', (758, 760), True, 'import seaborn as sns\n'), ((859, 908), 'pandas.read_csv', 'pd.read_csv', (['"""../mujeres_fecundidad_INE_2018.csv"""'], {}), "('../mujeres_fecundidad_INE_2018.csv')\n", (870, 908), True, 'import pandas as pd\n'), ((2256, 2292), 'matplotlib.py...
""" preprocess of (single lead) ecg signal: band pass --> remove baseline --> find rpeaks --> denoise (mainly deal with motion artefact) TODO: 1. motion artefact detection, and slice the signal into continuous (no motion artefact within) segments 2. to add References: ----------- [1] https://github...
[ "scipy.ndimage.filters.median_filter", "os.makedirs", "numpy.where", "os.path.join", "multiprocessing.cpu_count", "numpy.append", "easydict.EasyDict", "numpy.array", "multiprocessing.Pool", "copy.deepcopy", "time.time" ]
[((2371, 2391), 'copy.deepcopy', 'deepcopy', (['PreprocCfg'], {}), '(PreprocCfg)\n', (2379, 2391), False, 'from copy import deepcopy\n'), ((3534, 3586), 'easydict.EasyDict', 'ED', (["{'filtered_ecg': filtered_ecg, 'rpeaks': rpeaks}"], {}), "({'filtered_ecg': filtered_ecg, 'rpeaks': rpeaks})\n", (3536, 3586), True, 'fro...
"""Test Manage All-Link Record.""" import unittest from binascii import unhexlify from pyinsteon.address import Address from pyinsteon.constants import AckNak, ManageAllLinkRecordAction, MessageId from pyinsteon.protocol.messages.all_link_record_flags import \ AllLinkRecordFlags from tests import set_log_levels fr...
[ "pyinsteon.constants.AckNak", "tests.utils.hex_to_inbound_message", "tests.set_log_levels", "pyinsteon.address.Address", "pyinsteon.constants.MessageId", "pyinsteon.protocol.messages.all_link_record_flags.AllLinkRecordFlags", "binascii.unhexlify", "pyinsteon.constants.ManageAllLinkRecordAction" ]
[((653, 667), 'pyinsteon.constants.MessageId', 'MessageId', (['(111)'], {}), '(111)\n', (662, 667), False, 'from pyinsteon.constants import AckNak, ManageAllLinkRecordAction, MessageId\n'), ((691, 720), 'pyinsteon.constants.ManageAllLinkRecordAction', 'ManageAllLinkRecordAction', (['(64)'], {}), '(64)\n', (716, 720), F...
from django.shortcuts import render, HttpResponse, HttpResponseRedirect from django.template import loader from django.conf import settings from django.contrib.auth.models import User from rameniaapp.models import ReviewReport, ProfileReport, NoodleReport, Report, Review, Profile, Noodle from django.views.generic impor...
[ "django.contrib.auth.models.User.objects.get", "rameniaapp.models.ProfileReport.objects.get", "rameniaapp.actionhookutils.dispatch_hook", "rameniaapp.models.NoodleReport.objects.get", "rameniaapp.models.Report.objects.get", "rameniaapp.models.Profile.objects.get", "rameniaapp.models.Profile._meta.get_fi...
[((5632, 5670), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""/app/login"""'}), "(login_url='/app/login')\n", (5646, 5670), False, 'from django.contrib.auth.decorators import login_required\n'), ((6324, 6362), 'django.contrib.auth.decorators.login_required', 'login_required',...
import numpy as np class ProjectionMatrix(): """This matrix provides projection distortion. Projection distortion is when things that are far away appear smaller and things that are close appear bigger. This works flawlessly so far. Takes in screen-size and provides near- and far clipping. fov is f...
[ "numpy.sqrt", "numpy.tan", "numpy.array", "numpy.cos", "numpy.sin", "numpy.matrix" ]
[((648, 673), 'numpy.tan', 'np.tan', (['(fov * np.pi / 2.0)'], {}), '(fov * np.pi / 2.0)\n', (654, 673), True, 'import numpy as np\n'), ((746, 948), 'numpy.array', 'np.array', (['[[screen_size[1] / (tanHalfFOV * screen_size[0]), 0, 0, 0], [0, 1.0 /\n tanHalfFOV, 0, 0], [0, 0, (-zNear - zFar) / zRange, 2.0 * zFar * z...
from xml.dom import minidom import pywikibot from api.decorator import time_this SiteMock = pywikibot.Site class PageMock(pywikibot.Page): def __init__(self, *args, **kwargs): super(PageMock, self).__init__(*args, **kwargs) self.filename = "test_data/test_pages_%s.xml" % self.site.lang ...
[ "api.decorator.time_this" ]
[((1109, 1144), 'api.decorator.time_this', 'time_this', (['"""Page.get() method mock"""'], {}), "('Page.get() method mock')\n", (1118, 1144), False, 'from api.decorator import time_this\n')]
import os from functools import partial from io import BytesIO import numpy as np import PIL.Image import scipy.misc import tensorflow as tf graph = tf.Graph() sess = tf.InteractiveSession(graph=graph) model_fn = "./models/tensorflow_inception_graph.pb" with tf.gfile.FastGFile(model_fn, 'rb') as f: graph_def = tf...
[ "tensorflow.shape", "tensorflow.gfile.FastGFile", "tensorflow.gradients", "tensorflow.reduce_mean", "tensorflow.Graph", "tensorflow.placeholder", "tensorflow.GraphDef", "tensorflow.maximum", "tensorflow.square", "tensorflow.nn.conv2d", "numpy.abs", "numpy.eye", "tensorflow.InteractiveSession...
[((151, 161), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (159, 161), True, 'import tensorflow as tf\n'), ((169, 203), 'tensorflow.InteractiveSession', 'tf.InteractiveSession', ([], {'graph': 'graph'}), '(graph=graph)\n', (190, 203), True, 'import tensorflow as tf\n'), ((383, 423), 'tensorflow.placeholder', 'tf.p...
import calendar import datetime import logging import os import webapp2 import dbmodel TESTING = os.environ.get('SERVER_SOFTWARE', '').startswith('Development') class ResetHandler(webapp2.RequestHandler): def get(self): timestamp = calendar.timegm(datetime.datetime.utcnow().timetuple()) self.re...
[ "datetime.datetime.utcfromtimestamp", "datetime.datetime.utcnow", "dbmodel.ReportItem.all", "os.environ.get", "calendar.timegm", "webapp2.WSGIApplication", "logging.info", "dbmodel.AggregateItem.all" ]
[((1744, 1822), 'webapp2.WSGIApplication', 'webapp2.WSGIApplication', (["[('/tasks/admin/reset', ResetHandler)]"], {'debug': 'TESTING'}), "([('/tasks/admin/reset', ResetHandler)], debug=TESTING)\n", (1767, 1822), False, 'import webapp2\n'), ((100, 137), 'os.environ.get', 'os.environ.get', (['"""SERVER_SOFTWARE"""', '""...
from dataclasses import dataclass from itertools import cycle from typing import Dict, Union import numpy as np from ...layers.utils.color_transformations import ( transform_color, transform_color_cycle, ) @dataclass(eq=False) class ColorCycle: """A dataclass to hold a color cycle for the fallback_color...
[ "numpy.array_equal", "numpy.allclose", "dataclasses.dataclass" ]
[((219, 238), 'dataclasses.dataclass', 'dataclass', ([], {'eq': '(False)'}), '(eq=False)\n', (228, 238), False, 'from dataclasses import dataclass\n'), ((1165, 1206), 'numpy.array_equal', 'np.array_equal', (['self.values', 'other.values'], {}), '(self.values, other.values)\n', (1179, 1206), True, 'import numpy as np\n'...
"""Abstract class for all toggle buttons""" # Standard library imports import logging from collections import OrderedDict # Third party imports import ipywidgets # Local imports from .abc_toggle_buttons import BaseToggleButtons from .layouts import DICT_LAYOUT_HBOX_ANY LOGGER = logging.getLogger(__name__) class Ba...
[ "logging.getLogger", "ipywidgets.HBox", "collections.OrderedDict", "ipywidgets.ToggleButton", "ipywidgets.Button", "ipywidgets.Layout" ]
[((282, 309), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (299, 309), False, 'import logging\n'), ((1092, 1105), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1103, 1105), False, 'from collections import OrderedDict\n'), ((1151, 1164), 'collections.OrderedDict', 'Ordered...
__author__ = '<NAME> and <NAME>' import Player import Message # input #0 for rock #1 for paper #2 for scissors # past move is array of numbers # our move followed by their move #Our strategy is to look at all past moves #In a large number of games, you would expect # each move to be seen an even amount of times #So...
[ "Player.Player.__init__", "Message.Message.get_match_start_message", "Message.Message.get_round_start_message", "Message.Message.get_round_end_message" ]
[((503, 531), 'Player.Player.__init__', 'Player.Player.__init__', (['self'], {}), '(self)\n', (525, 531), False, 'import Player\n'), ((3621, 3669), 'Message.Message.get_match_start_message', 'Message.Message.get_match_start_message', (['players'], {}), '(players)\n', (3660, 3669), False, 'import Message\n'), ((3689, 37...
from tictactoe import TicTacToe import random import csv import os gameNr = 1 gameLimit = 10000 lst_moves_1 = [] lst_moves_2 = [] while gameNr <= gameLimit: print("+++++++++++") print("Game#", gameNr) game = TicTacToe() tmp_moves_1 = [] tmp_moves_2 = [] while game.get_winner() == 0 and game....
[ "csv.writer", "random.randint", "tictactoe.TicTacToe" ]
[((223, 234), 'tictactoe.TicTacToe', 'TicTacToe', ([], {}), '()\n', (232, 234), False, 'from tictactoe import TicTacToe\n'), ((1258, 1271), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (1268, 1271), False, 'import csv\n'), ((1392, 1405), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (1402, 1405), False, 'impor...
import bitstring import pytest from stixcore.data.test import test_data from stixcore.idb.manager import IDBManager from stixcore.tmtc.packets import ( SOURCE_PACKET_HEADER_STRUCTURE, TC_DATA_HEADER_STRUCTURE, TM_DATA_HEADER_STRUCTURE, SourcePacketHeader, TCPacket, TMDataHeader, TMPacket, ...
[ "stixcore.tmtc.packets.TM_DATA_HEADER_STRUCTURE.keys", "stixcore.tmtc.tm.tm_1.TM_1_1", "pytest.mark.parametrize", "stixcore.tmtc.packets.SOURCE_PACKET_HEADER_STRUCTURE.keys", "stixcore.tmtc.packets.TC_DATA_HEADER_STRUCTURE.keys", "stixcore.tmtc.packets.TCPacket", "stixcore.tmtc.packets.TMPacket", "sti...
[((455, 596), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""class_header"""', '[(SourcePacketHeader, SOURCE_PACKET_HEADER_STRUCTURE), (TMDataHeader,\n TM_DATA_HEADER_STRUCTURE)]'], {}), "('class_header', [(SourcePacketHeader,\n SOURCE_PACKET_HEADER_STRUCTURE), (TMDataHeader, TM_DATA_HEADER_STRUCTURE...
# run local models given a path, default to './mxnet_models/' import os import argparse import time import mxnet as mx import numpy as np file_path = os.path.realpath(__file__) dir_name = os.path.dirname(file_path) os.environ["MXNET_CUDNN_AUTOTUNE_DEFAULT"] = "0" class cuda_profiler_start(): import numba.cuda ...
[ "numpy.multiply", "argparse.ArgumentParser", "mxnet.nd.waitall", "mxnet.random.uniform", "mxnet.cpu", "numpy.average", "numba.cuda.profile_stop", "numpy.min", "numpy.max", "os.path.realpath", "os.path.dirname", "numba.cuda.profile_start", "mxnet.gpu", "mxnet.mod.Module", "mxnet.model.loa...
[((152, 178), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (168, 178), False, 'import os\n'), ((190, 216), 'os.path.dirname', 'os.path.dirname', (['file_path'], {}), '(file_path)\n', (205, 216), False, 'import os\n'), ((474, 561), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([],...
# Generated by Django 3.1.1 on 2020-12-16 03:07 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Scheduler', fields=[ ('id', models.AutoFiel...
[ "django.db.models.AutoField", "django.db.models.IntegerField" ]
[((305, 398), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (321, 398), False, 'from django.db import migrations, models\...
from sklearn.preprocessing import StandardScaler from sklearn.datasets import load_wine from sklearn.model_selection import train_test_split wine = load_wine() columns_names = wine.feature_names y = wine.target X = wine.data print('Pre scaling X') print(X) scaler = StandardScaler() scaler.fit(X) scaled_features = sc...
[ "sklearn.preprocessing.StandardScaler", "sklearn.model_selection.train_test_split", "sklearn.datasets.load_wine" ]
[((149, 160), 'sklearn.datasets.load_wine', 'load_wine', ([], {}), '()\n', (158, 160), False, 'from sklearn.datasets import load_wine\n'), ((269, 285), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (283, 285), False, 'from sklearn.preprocessing import StandardScaler\n'), ((422, 475), 'skle...
############################################################## #*** MagicDAQ USB DAQ and M&A Board General Demo Script *** ############################################################## #*** Websites *** # MagicDAQ Website: # https://www.magicdaq.com/ # API Docs Website: # https://magicdaq.github.io/magicdaq_docs/ #*...
[ "magicdaq.api_class.MagicDAQDevice", "time.sleep", "sys.exit" ]
[((6139, 6152), 'time.sleep', 'time.sleep', (['(8)'], {}), '(8)\n', (6149, 6152), False, 'import time\n'), ((8952, 8965), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (8962, 8965), False, 'import time\n'), ((1263, 1279), 'magicdaq.api_class.MagicDAQDevice', 'MagicDAQDevice', ([], {}), '()\n', (1277, 1279), False...
# Copyright 2022 IBM Inc. All rights reserved # SPDX-License-Identifier: Apache2.0 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless ...
[ "matplotlib.pyplot.ylabel", "cl.orderZ", "matplotlib.ticker.FuncFormatter", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.plot", "matplotlib.pyplot.close", "numpy.dot", "itertools.permutations", "cl.create_basic_problem", "cl.Tableau", "matplotlib.pyplot.savefig", "cl.zeroX_algorithm1_cz", ...
[((2046, 2071), 'cl.ensureDirExists', 'cl.ensureDirExists', (['"""fig"""'], {}), "('fig')\n", (2064, 2071), False, 'import cl\n'), ((2103, 2132), 'cl.create_basic_problem', 'cl.create_basic_problem', (['(7)', '(0)'], {}), '(7, 0)\n', (2126, 2132), False, 'import cl\n'), ((2136, 2180), 'cl.generate_full_rank_weights', '...
""" #;+ #; NAME: #; general #; Version 1.0 #; #; PURPOSE: #; Module for monkeying with files and filenames #; 172Sep-2014 by JXP #;- #;------------------------------------------------------------------------------ """ # Import libraries import numpy as np from astropy.io import fits from astropy.io import as...
[ "os.path.lexists" ]
[((554, 577), 'os.path.lexists', 'os.path.lexists', (['filenm'], {}), '(filenm)\n', (569, 577), False, 'import os, pdb\n'), ((734, 765), 'os.path.lexists', 'os.path.lexists', (["(filenm + '.gz')"], {}), "(filenm + '.gz')\n", (749, 765), False, 'import os, pdb\n')]
# Copyright 2022 Collate # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software...
[ "metadata.great_expectations.utils.ometa_config_handler.render_template", "pytest.mark.parametrize", "unittest.mock.patch.dict", "metadata.great_expectations.action.OpenMetadataValidationAction" ]
[((883, 982), 'pytest.mark.parametrize', 'mark.parametrize', (['"""input,expected"""', "[(None, 'list_entities'), ('service_name', 'get_by_name')]"], {}), "('input,expected', [(None, 'list_entities'), (\n 'service_name', 'get_by_name')])\n", (899, 982), False, 'from pytest import mark\n'), ((1586, 1636), 'unittest.m...
# -*- coding: utf-8 -*- from storyruntime.Containers import Containers from storyruntime.constants.ServiceConstants import ServiceConstants import storyscript def test_containers_format_command(story): """ Ensures a simple resolve can be performed """ story_text = 'alpine echo msg:"foo"\n' story....
[ "storyscript.Api.loads" ]
[((631, 664), 'storyscript.Api.loads', 'storyscript.Api.loads', (['story_text'], {}), '(story_text)\n', (652, 664), False, 'import storyscript\n'), ((1129, 1162), 'storyscript.Api.loads', 'storyscript.Api.loads', (['story_text'], {}), '(story_text)\n', (1150, 1162), False, 'import storyscript\n')]
from django.contrib import admin from django.shortcuts import redirect from django.utils.safestring import mark_safe from django.contrib.admin.widgets import AdminFileWidget class AdminImageWidget(AdminFileWidget): def render(self, name, value, attrs=None, renderer=None): output = [] if value and ...
[ "django.shortcuts.redirect" ]
[((1027, 1100), 'django.shortcuts.redirect', 'redirect', (['f"""/admin/{model._meta.app_label}/{model._meta.model_name}/add/"""'], {}), "(f'/admin/{model._meta.app_label}/{model._meta.model_name}/add/')\n", (1035, 1100), False, 'from django.shortcuts import redirect\n'), ((1132, 1227), 'django.shortcuts.redirect', 'red...
import unittest class Solution: def setZeroes(self, matrix): """ :type matrix: List[List[int]] :rtype: void Do not return anything, modify matrix in-place instead. """ rows = [0] * len(matrix) cols = [0] * len(matrix[0]) for i, row in enumerate(matrix): ...
[ "unittest.main" ]
[((1201, 1216), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1214, 1216), False, 'import unittest\n')]
import argparse import collections import shutil import sys import time from datetime import timedelta from pathlib import Path import torch from torch.nn.parallel import DataParallel, DistributedDataParallel try: # PyTorch >= 1.6 supports mixed precision training from torch.cuda.amp import autocast amp_su...
[ "openunreid.utils.dist_utils.init_dist", "openunreid.models.losses.build_loss", "openunreid.utils.logger.Logger", "datetime.timedelta", "argparse.ArgumentParser", "pathlib.Path", "openunreid.data.build_val_dataloader", "openunreid.utils.config.cfg.TRAIN.datasets.keys", "torch.cuda.amp.autocast", "...
[((5588, 5641), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""SPGAN training"""'}), "(description='SPGAN training')\n", (5611, 5641), False, 'import argparse\n'), ((6342, 6378), 'openunreid.utils.config.cfg_from_yaml_file', 'cfg_from_yaml_file', (['args.config', 'cfg'], {}), '(args.conf...
import logging import requests import multiprocessing import pathlib from typing import List from typing import Optional from typing import Tuple from typing import Dict from joblib import delayed from joblib import Parallel from datetime import date from datetime import timedelta logger = logging.getLogger(__name__) ...
[ "logging.getLogger", "logging.NullHandler", "requests.session", "datetime.date.today", "multiprocessing.cpu_count", "joblib.Parallel", "datetime.date", "joblib.delayed", "datetime.timedelta", "multiprocessing.current_process" ]
[((292, 319), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (309, 319), False, 'import logging\n'), ((338, 359), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (357, 359), False, 'import logging\n'), ((388, 404), 'datetime.date', 'date', (['(2018)', '(3)', '(1)'], {}), '...
import logging import os import shutil import time import torch model_state = 'model_state.pt' trainer_state = 'trainer_state.pt' class Checkpoint(): def __init__(self, step, epoch, model, optim, path=None, opt=None): self.step = step self.epoch = epoch self.model = model self.o...
[ "logging.getLogger", "os.path.exists", "os.listdir", "os.makedirs", "os.path.join", "shutil.rmtree", "time.localtime" ]
[((406, 433), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (423, 433), False, 'import logging\n'), ((677, 704), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (694, 704), False, 'import logging\n'), ((790, 829), 'os.path.join', 'os.path.join', (['opt.ckpt_...
import numpy as np import torch import os import cv2 import importlib from dataset import * from PIL import Image from argparse import ArgumentParser from torch.autograd import Variable from torch.utils.data import DataLoader from torchvision.transforms import Compose, CenterCrop, Normalize, Resize from torchvision.tr...
[ "os.path.exists", "transform.Colorize", "lednet.Net", "torchvision.transforms.ToPILImage", "argparse.ArgumentParser", "torch.unsqueeze", "torch.load", "torch.nn.DataParallel", "os.path.dirname", "torchvision.transforms.Resize", "torch.no_grad", "torchvision.transforms.ToTensor", "torch.autog...
[((590, 602), 'torchvision.transforms.ToPILImage', 'ToPILImage', ([], {}), '()\n', (600, 602), False, 'from torchvision.transforms import ToTensor, ToPILImage\n'), ((974, 990), 'lednet.Net', 'Net', (['NUM_CLASSES'], {}), '(NUM_CLASSES)\n', (977, 990), False, 'from lednet import Net\n'), ((1004, 1032), 'torch.nn.DataPar...
import pytest from pytest import raises from pydantic_jsonapi.resource_linkage import ResourceLinkage from pydantic import BaseModel, ValidationError class ThingWithLinkageData(BaseModel): data: ResourceLinkage class TestResourceLinks: @pytest.mark.parametrize( 'linkage, message', [ ...
[ "pytest.mark.parametrize", "pytest.raises" ]
[((251, 742), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""linkage, message"""', "[(None, 'null is valid for empty to-one relationships'), ([],\n 'empty list valid for empty to-many relationships.'), ({'id': 'abc123',\n 'type': 'item', 'meta': None},\n 'single resource identifier valid for non-e...
import numpy as np from keras.datasets import mnist from keras.models import Sequential from keras.layers.core import Dense, Dropout, Activation from keras.optimizers import SGD from keras.utils import np_utils import keras.callbacks import keras.backend.tensorflow_backend as KTF import tensorflow as tf batch_size = 1...
[ "keras.backend.tensorflow_backend.get_session", "tensorflow.Graph", "keras.backend.tensorflow_backend.set_session", "keras.layers.core.Activation", "keras.datasets.mnist.load_data", "tensorflow.Session", "keras.models.Sequential", "keras.layers.core.Dense", "keras.utils.np_utils.to_categorical", "...
[((458, 475), 'keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (473, 475), False, 'from keras.datasets import mnist\n'), ((831, 875), 'keras.utils.np_utils.to_categorical', 'np_utils.to_categorical', (['y_train', 'nb_classes'], {}), '(y_train, nb_classes)\n', (854, 875), False, 'from keras.utils i...
""" usage requires these additional modules pip install azure-batch azure-storage-blob jsonschema pyyaml && pip install git+https://github.com/microsoft/SparseSC.git@ad4bf27edb28f517508f6934f21eb65d17fb6543 && scgrad start usage: from SparseSC import fit, aggregate_batch_results from SparseSC.utils.azure_batch_clie...
[ "importlib.__import__", "azure.batch.models.ImageReference", "io.BytesIO", "azure.batch.models.OutputFileDestination", "time.sleep", "yaml.load", "datetime.timedelta", "sys.path.append", "azure.batch.models.OutputFileUploadOptions", "pathlib.Path", "azure.batch.batch_service_client.models.Contai...
[((2092, 2112), 'sys.path.append', 'sys.path.append', (['"""."""'], {}), "('.')\n", (2107, 2112), False, 'import sys\n'), ((2113, 2134), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (2128, 2134), False, 'import sys\n'), ((4142, 4196), 'azure.batch.models.OutputFileDestination', 'models.OutputFi...
from flask import Flask, current_app from flask import render_template from flask import jsonify from jieba.analyse import extract_tags import string from DB import chinaSQL from DB import worldSQL app = Flask(__name__, template_folder='../../web', static_folder='../../static') @app.route('/', methods=["get", "post"...
[ "flask.render_template", "DB.chinaSQL.urbanDataOfHubeiProvince", "DB.chinaSQL.existingConfirmedTop20UrbanAreas", "DB.chinaSQL.currentConfirmedDataInAllProvinces", "DB.worldSQL.overseasCountriesWithMoreThan10000ConfirmedCases", "flask.Flask", "DB.chinaSQL.hubeiNonHubeiNationalDailyNew", "DB.chinaSQL.da...
[((205, 279), 'flask.Flask', 'Flask', (['__name__'], {'template_folder': '"""../../web"""', 'static_folder': '"""../../static"""'}), "(__name__, template_folder='../../web', static_folder='../../static')\n", (210, 279), False, 'from flask import Flask, current_app\n'), ((353, 382), 'flask.render_template', 'render_temp...
# Generated by Django 2.0.2 on 2018-02-17 10:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('T2API', '0007_apiuser_deviceuser'), ] operations = [ migrations.AddField( model_name='product', name='weight', ...
[ "django.db.models.IntegerField" ]
[((333, 377), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': 'None', 'null': '(True)'}), '(default=None, null=True)\n', (352, 377), False, 'from django.db import migrations, models\n')]
#!/usr/bin/env python3 # Simple and dumb script to send a message to the #podman IRC channel on frenode # Based on example from: https://pythonspot.com/building-an-irc-bot/ import os import time import random import errno import socket import sys class IRC: response_timeout = 10 # seconds irc = socket.sock...
[ "time.sleep", "time.time", "os.environ.get", "socket.socket" ]
[((309, 324), 'socket.socket', 'socket.socket', ([], {}), '()\n', (322, 324), False, 'import socket\n'), ((489, 538), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (502, 538), False, 'import socket\n'), ((922, 933), 'time.time', 'time.time', ...
import unittest from flydenity import Parser class TestParseIcao24Bit(unittest.TestCase): def setUp(self): self.parser = Parser() def test_parse_simple(self): match = self.parser.parse("3D2591", icao24bit=True) self.assertEqual(match, {"nation": "Germany", "description": "general", ...
[ "unittest.main", "flydenity.Parser" ]
[((916, 931), 'unittest.main', 'unittest.main', ([], {}), '()\n', (929, 931), False, 'import unittest\n'), ((136, 144), 'flydenity.Parser', 'Parser', ([], {}), '()\n', (142, 144), False, 'from flydenity import Parser\n')]
import os def create_project(path): dirs = ['configs', 'module', 'data'] dirs = [os.path.join(path, d) for d in dirs] for d in dirs: os.makedirs(d) train_script = r""" import ever as er def train(trainer_name): trainer = er.trainer.get_trainer(trainer_name)() trainer.run() ...
[ "os.path.join", "os.makedirs" ]
[((91, 112), 'os.path.join', 'os.path.join', (['path', 'd'], {}), '(path, d)\n', (103, 112), False, 'import os\n'), ((155, 169), 'os.makedirs', 'os.makedirs', (['d'], {}), '(d)\n', (166, 169), False, 'import os\n'), ((342, 372), 'os.path.join', 'os.path.join', (['path', '"""train.py"""'], {}), "(path, 'train.py')\n", (...