code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
#from utils import ordinal import re from urllib.parse import quote as uriquote import asyncio from bs4 import BeautifulSoup import collections from utils.context import MoreContext from utils.context import Location from utils.paginator import Paginator from utils.units import units ordinal = lambda n: "%d%s" % (n,"t...
[ "bs4.BeautifulSoup", "re.search", "urllib.parse.quote", "re.compile" ]
[((375, 394), 're.compile', 're.compile', (['"""<.*?>"""'], {}), "('<.*?>')\n", (385, 394), False, 'import re\n'), ((747, 768), 'urllib.parse.quote', 'uriquote', (['search_term'], {}), '(search_term)\n', (755, 768), True, 'from urllib.parse import quote as uriquote\n'), ((1618, 1645), 'bs4.BeautifulSoup', 'BeautifulSou...
from django.forms import ModelForm, inlineformset_factory, BaseInlineFormSet from . import models class AuthorContainerForm(ModelForm): class Meta: model = models.AuthorContainer exclude = ('id',) class AuthorForm(ModelForm): class Meta: model = models.Author fields = ('first...
[ "django.forms.inlineformset_factory" ]
[((467, 562), 'django.forms.inlineformset_factory', 'inlineformset_factory', (['models.Author', 'models.Book'], {'form': 'BookForm', 'can_delete': '(True)', 'extra': '(0)'}), '(models.Author, models.Book, form=BookForm, can_delete\n =True, extra=0)\n', (488, 562), False, 'from django.forms import ModelForm, inlinefo...
from collections import Counter from math import log from tqdm import tqdm import re from evaluation import evaluateSet def build_model(train_set): hmm_model = {i:Counter() for i in 'SBME'} trans = {'SS':0, 'SB':0, 'BM':0, 'BE':0, 'MM':0, 'ME':0, ...
[ "tqdm.tqdm", "evaluation.evaluateSet", "math.log", "collections.Counter", "re.sub" ]
[((2597, 2630), 're.sub', 're.sub', (['"""\\\\d+\\\\.?\\\\d*"""', '"""0"""', 'rstr'], {}), "('\\\\d+\\\\.?\\\\d*', '0', rstr)\n", (2603, 2630), False, 'import re\n'), ((2661, 2695), 're.sub', 're.sub', (['"""[a-zA-Z]+\\\\/"""', '"""1/"""', 'rstr'], {}), "('[a-zA-Z]+\\\\/', '1/', rstr)\n", (2667, 2695), False, 'import r...
# Copyright 2018 Google Inc. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # #...
[ "third_party.nucleus.io.python.bedgraph_writer.BedGraphWriter.to_file", "third_party.nucleus.io.python.bedgraph_reader.BedGraphReader.from_file" ]
[((3826, 3881), 'third_party.nucleus.io.python.bedgraph_reader.BedGraphReader.from_file', 'bedgraph_reader.BedGraphReader.from_file', (['bedgraph_path'], {}), '(bedgraph_path)\n', (3866, 3881), False, 'from third_party.nucleus.io.python import bedgraph_reader\n'), ((5301, 5352), 'third_party.nucleus.io.python.bedgraph_...
from ConfigSpace import ConfigurationSpace, CategoricalHyperparameter import time import warnings import os import numpy as np import pickle as pkl from sklearn.metrics.scorer import balanced_accuracy_scorer from solnml.utils.logging_utils import get_logger from solnml.components.evaluators.base_evaluator import _Base...
[ "solnml.components.feature_engineering.task_space.get_task_hyperparameter_space", "os.path.exists", "solnml.components.feature_engineering.parse.parse_config", "numpy.mean", "pickle.dump", "warnings.catch_warnings", "pickle.load", "sklearn.model_selection.ShuffleSplit", "solnml.components.utils.clas...
[((1220, 1266), 'solnml.components.utils.class_loader.get_combined_candidtates', 'get_combined_candidtates', (['_regressors', '_addons'], {}), '(_regressors, _addons)\n', (1244, 1266), False, 'from solnml.components.utils.class_loader import get_combined_candidtates\n'), ((1511, 1557), 'solnml.components.utils.class_lo...
# # MLDB-1104-input-data-spec.py # mldb.ai inc, 2015 # This file is part of MLDB. Copyright 2015 mldb.ai inc. All rights reserved. # import unittest import datetime import random from mldb import mldb, ResponseException class InputDataSpecTest(unittest.TestCase): @classmethod def setUpClass(cls): cls...
[ "mldb.mldb.create_dataset", "random.randint", "mldb.mldb.put", "mldb.mldb.run_tests", "datetime.datetime.now", "mldb.mldb.log", "mldb.mldb.get" ]
[((5813, 5829), 'mldb.mldb.run_tests', 'mldb.run_tests', ([], {}), '()\n', (5827, 5829), False, 'from mldb import mldb, ResponseException\n'), ((458, 529), 'mldb.mldb.create_dataset', 'mldb.create_dataset', (["{'type': 'sparse.mutable', 'id': 'kmeans_example'}"], {}), "({'type': 'sparse.mutable', 'id': 'kmeans_example'...
import os.path from pi3d import * from pi3d.Buffer import Buffer from pi3d.Shape import Shape from pi3d.Texture import Texture CUBE_PARTS = ['front', 'right', 'top', 'bottom', 'left', 'back'] BOTTOM_INDEX = 3 def loadECfiles(path, fname, suffix='jpg', nobottom=False): """Helper for loading environment cube faces. ...
[ "pi3d.Texture.Texture", "pi3d.Buffer.Buffer" ]
[((1046, 1056), 'pi3d.Texture.Texture', 'Texture', (['f'], {}), '(f)\n', (1053, 1056), False, 'from pi3d.Texture import Texture\n'), ((3594, 3666), 'pi3d.Buffer.Buffer', 'Buffer', (['self', 'self.vertices', 'self.tex_coords', 'self.indices', 'self.normals'], {}), '(self, self.vertices, self.tex_coords, self.indices, se...
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ He copiado y modificado software ajeno. Gran parte de este script es una modificación y/o mejora del original, por ello doy los debidos créditos al autor del software original: MIT License Copyright (c) 2016 - 2017 <NAME> Permission is hereby granted, ...
[ "discord.ext.commands.group", "discord.utils.get", "discord.ext.commands.has_any_role", "discord.ext.commands.command" ]
[((1823, 1926), 'discord.ext.commands.group', 'commands.group', ([], {'hidden': '(True)', 'aliases': "['purgame']", 'description': '"""Elimina mis odiosos mensajes (MOD)"""'}), "(hidden=True, aliases=['purgame'], description=\n 'Elimina mis odiosos mensajes (MOD)')\n", (1837, 1926), False, 'from discord.ext import c...
#!/usr/bin/env python3 import argparse import gc import numpy as np import os import pandas as pd import pysam # Number of SVs to process before resetting pysam (close and re-open file). Avoids a memory leak in pysam. PYSAM_RESET_INTERVAL = 1000 def get_read_depth(df_subset, bam_file_name, mapq, ref_filename=None)...
[ "numpy.mean", "argparse.ArgumentParser", "pysam.AlignmentFile", "os.path.isfile", "numpy.zeros", "pandas.read_table", "numpy.std", "gc.collect" ]
[((899, 971), 'pysam.AlignmentFile', 'pysam.AlignmentFile', (['bam_file_name', '"""r"""'], {'reference_filename': 'ref_filename'}), "(bam_file_name, 'r', reference_filename=ref_filename)\n", (918, 971), False, 'import pysam\n'), ((1255, 1295), 'numpy.zeros', 'np.zeros', (['df_subset.shape[0]', 'np.float64'], {}), '(df_...
#!/usr/bin/env python import vtk from vtk.test import Testing from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() # Create the RenderWindow, Renderer and both Actors ren1 = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren1) iren = vtk.vtkRenderWindowInteractor() iren.SetRen...
[ "vtk.util.misc.vtkGetDataRoot", "vtk.vtkImageLuminance", "vtk.vtkImageActor", "vtk.vtkRenderWindowInteractor", "vtk.vtkRenderWindow", "vtk.vtkRenderer", "vtk.vtkTIFFReader" ]
[((119, 135), 'vtk.util.misc.vtkGetDataRoot', 'vtkGetDataRoot', ([], {}), '()\n', (133, 135), False, 'from vtk.util.misc import vtkGetDataRoot\n'), ((196, 213), 'vtk.vtkRenderer', 'vtk.vtkRenderer', ([], {}), '()\n', (211, 213), False, 'import vtk\n'), ((223, 244), 'vtk.vtkRenderWindow', 'vtk.vtkRenderWindow', ([], {})...
#!/usr/local/anaconda/bin/python #copyright: <NAME>, <EMAIL> from random import random from tkinter import * from copy import deepcopy class Boggle(): ''' @param dict F @param dict T @param int size @param int cellWidth @param array clone @param list soln @param cube ActionNow @...
[ "random.random" ]
[((2303, 2311), 'random.random', 'random', ([], {}), '()\n', (2309, 2311), False, 'from random import random\n')]
# Copyright 2021 The TensorFlow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to ...
[ "tensorflow.expand_dims", "tensorflow.shape", "tensorflow.transpose", "tensorflow.math.sqrt", "tensorflow.math.square", "tensorflow.keras.utils.register_keras_serializable", "tensorflow.math.maximum", "tensorflow.math.reduce_mean", "tensorflow.math.greater_equal", "tensorflow.math.reduce_sum", "...
[((1521, 1585), 'tensorflow.keras.utils.register_keras_serializable', 'tf.keras.utils.register_keras_serializable', ([], {'package': '"""Similarity"""'}), "(package='Similarity')\n", (1563, 1585), True, 'import tensorflow as tf\n'), ((2697, 2761), 'tensorflow.keras.utils.register_keras_serializable', 'tf.keras.utils.re...
import hashlib import hmac from satang_pro_signer import preparer class Signer: def __init__(self, secret: bytes): self.secret = secret def sign(self, obj) -> bytes: parsed = preparer.Preparer(obj).encode() msg = bytes(parsed, encoding='utf-8') try: # better perf...
[ "satang_pro_signer.preparer.Preparer", "hmac.digest", "hmac.new" ]
[((347, 386), 'hmac.digest', 'hmac.digest', (['self.secret', 'msg', '"""sha512"""'], {}), "(self.secret, msg, 'sha512')\n", (358, 386), False, 'import hmac\n'), ((203, 225), 'satang_pro_signer.preparer.Preparer', 'preparer.Preparer', (['obj'], {}), '(obj)\n', (220, 225), False, 'from satang_pro_signer import preparer\n...
# pylint:disable=missing-module-docstring,missing-class-docstring,missing-function-docstring from django import forms #- from .base import compare_template, SimpleTestCase class DummyForm(forms.Form): choice1 = forms.ChoiceField( required=False, help_text="Optional helper text here") ch...
[ "django.forms.ChoiceField" ]
[((216, 288), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'required': '(False)', 'help_text': '"""Optional helper text here"""'}), "(required=False, help_text='Optional helper text here')\n", (233, 288), False, 'from django import forms\n'), ((328, 609), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], ...
from requests import post from random import randint from json import loads, dumps import asyncio,base64,glob,json,math,urllib3,os,pathlib,random,sys,concurrent.futures,time from tqdm import tqdm from Crypto.Cipher import AES from Crypto.Util.Padding import pad, unpad urllib3.disable_warnings(urllib3.exceptions.Insecu...
[ "json.loads", "requests.post", "base64.b64encode", "json.dumps", "urllib3.disable_warnings", "Crypto.Cipher.AES.new", "Crypto.Util.Padding.unpad", "random.randint", "random._floor" ]
[((270, 337), 'urllib3.disable_warnings', 'urllib3.disable_warnings', (['urllib3.exceptions.InsecureRequestWarning'], {}), '(urllib3.exceptions.InsecureRequestWarning)\n', (294, 337), False, 'import asyncio, base64, glob, json, math, urllib3, os, pathlib, random, sys, concurrent.futures, time\n'), ((1188, 1228), 'Crypt...
# -*- coding: utf-8 -*- """ Functions to install dependencies for non-standard models (e.g., Centermask2) and get compatible Detectron2 configs for them. """ import sys import subprocess try: from detectron2.config import get_cfg except ModuleNotFoundError: print('WARNING: Detectron2 not installed on (virtual?...
[ "subprocess.run", "swint.add_swint_config", "sys.path.insert", "centermask.config.get_cfg" ]
[((1896, 1905), 'centermask.config.get_cfg', 'get_cfg', ([], {}), '()\n', (1903, 1905), False, 'from centermask.config import get_cfg\n'), ((1910, 1941), 'swint.add_swint_config', 'swint.add_swint_config', (['out_cfg'], {}), '(out_cfg)\n', (1932, 1941), False, 'import swint\n'), ((3067, 3076), 'centermask.config.get_cf...
#!/usr/bin/env python from preprocess import get_negative_samples, get_positive_samples from utils import init_spark from preprocess import get_dataset_df from pyspark.ml.classification import RandomForestClassifier from pyspark.ml.tuning import ParamGridBuilder, TrainValidationSplit, \ Cr...
[ "preprocess.get_positive_samples", "pyspark.ml.Pipeline", "class_weighter.ClassWeighter", "pyspark.ml.classification.RandomForestClassifier", "utils.init_spark", "preprocess.get_negative_samples", "preprocess.get_dataset_df", "random_forest.get_feature_importances" ]
[((532, 544), 'utils.init_spark', 'init_spark', ([], {}), '()\n', (542, 544), False, 'from utils import init_spark\n'), ((613, 640), 'preprocess.get_positive_samples', 'get_positive_samples', (['spark'], {}), '(spark)\n', (633, 640), False, 'from preprocess import get_negative_samples, get_positive_samples\n'), ((725, ...
import torch as th from unittest import TestCase from pro_gan_pytorch import CustomLayers as cL device = th.device("cuda" if th.cuda.is_available() else "cpu") class Test_equalized_conv2d(TestCase): def setUp(self): self.conv_block = cL._equalized_conv2d(21, 3, k_size=(3, 3), pad=1) # print th...
[ "pro_gan_pytorch.CustomLayers._equalized_deconv2d", "torch.isnan", "pro_gan_pytorch.CustomLayers._equalized_linear", "pro_gan_pytorch.CustomLayers.MinibatchStdDev", "torch.cuda.is_available", "pro_gan_pytorch.CustomLayers._equalized_conv2d", "torch.isinf", "pro_gan_pytorch.CustomLayers.PixelwiseNorm",...
[((127, 149), 'torch.cuda.is_available', 'th.cuda.is_available', ([], {}), '()\n', (147, 149), True, 'import torch as th\n'), ((251, 300), 'pro_gan_pytorch.CustomLayers._equalized_conv2d', 'cL._equalized_conv2d', (['(21)', '(3)'], {'k_size': '(3, 3)', 'pad': '(1)'}), '(21, 3, k_size=(3, 3), pad=1)\n', (271, 300), True,...
import os from serif.model.relation_mention_model import RelationMentionModel from serif.theory.enumerated_type import Tense, Modality # Modified from DogFoodFinderRelationMentionModel class AIDARelationMentionModel(RelationMentionModel): '''adds TACRED relations to TACRED entities''' def __init__(self, mapp...
[ "os.path.basename" ]
[((2648, 2676), 'os.path.basename', 'os.path.basename', (['words_file'], {}), '(words_file)\n', (2664, 2676), False, 'import os\n'), ((1895, 1923), 'os.path.basename', 'os.path.basename', (['words_file'], {}), '(words_file)\n', (1911, 1923), False, 'import os\n')]
import math import gmpy2 # How many you want to find MAX_COUNT = 500 K_COUNT = 3.7 # d = 1000 yields ~264 #for parallel C++ K_COST = 4.14 * 1e-11 # d = 5000 takes ~400s K_FILTER_COST = 1.0 * 1e-9 # d = 5000, sieve = 30M takes 10.3s def optimal_sieve(d, expected_cost): non_trivial_a_b = d * 2...
[ "gmpy2.next_prime", "math.sqrt", "gmpy2.mpz", "math.log" ]
[((468, 480), 'gmpy2.mpz', 'gmpy2.mpz', (['(5)'], {}), '(5)\n', (477, 480), False, 'import gmpy2\n'), ((1339, 1351), 'math.log', 'math.log', (['(10)'], {}), '(10)\n', (1347, 1351), False, 'import math\n'), ((2397, 2409), 'math.sqrt', 'math.sqrt', (['d'], {}), '(d)\n', (2406, 2409), False, 'import math\n'), ((570, 601),...
import pandas as pd import numpy as np function2idx = {"negative": 0, "ferritin": 1, "gpcr": 2, "p450": 3, "protease": 4} input_dir = '../data/raw/' data_dir = '../data/processed/' max_seq_len = 800 def read_and_concat_data(): df_cysteine = pd.read_csv(input_dir + 'uniprot-cysteine+protease+AND+reviewed_yes.tab...
[ "pandas.DataFrame", "pandas.concat", "numpy.savetxt", "pandas.read_csv" ]
[((3531, 3598), 'numpy.savetxt', 'np.savetxt', (["(data_dir + 'sequence.txt')", 'df.sequence.values'], {'fmt': '"""%s"""'}), "(data_dir + 'sequence.txt', df.sequence.values, fmt='%s')\n", (3541, 3598), True, 'import numpy as np\n'), ((3599, 3666), 'numpy.savetxt', 'np.savetxt', (["(data_dir + 'function.txt')", 'df.func...
import typing import re from .CD_relations import cardinal_relation, inverse_directions from .regions import Region, region_union from .expression_walker import PatternWalker from .expressions import Constant REFINE_OVERLAPPING = True class RegionSolver(PatternWalker[Region]): type_name = 'Region' def __n...
[ "re.search" ]
[((3071, 3096), 're.search', 're.search', (['regexp', 'k.name'], {}), '(regexp, k.name)\n', (3080, 3096), False, 'import re\n')]
# ----------------------------------------------------------------------------- # # P A G E B O T E X A M P L E S # # Copyright (c) 2017 <NAME> <https://github.com/thomgb> # www.pagebot.io # Licensed under MIT conditions # # Supporting DrawBot, www.drawbot.com # Supporting Flat, xxyxyz.org/flat...
[ "inspect.getmembers", "pagebot.getContext", "pagebot.toolbox.color.Color", "pagebot.fonttoolbox.objects.font.findFont", "inspect.isclass" ]
[((695, 707), 'pagebot.getContext', 'getContext', ([], {}), '()\n', (705, 707), False, 'from pagebot import getContext\n'), ((843, 875), 'pagebot.fonttoolbox.objects.font.findFont', 'findFont', (['"""BungeeInline-Regular"""'], {}), "('BungeeInline-Regular')\n", (851, 875), False, 'from pagebot.fonttoolbox.objects.font ...
import os import enum import hashlib from urllib.parse import urljoin from flask import url_for, current_app as app from mcarch.app import db, get_b2bucket class StoredFile(db.Model): """Represents a file stored in some sort of storage medium.""" __tablename__ = 'stored_file' id = db.Column(db.Integer, p...
[ "hashlib.sha256", "mcarch.app.db.relationship", "mcarch.app.db.String", "os.path.join", "mcarch.app.db.session.add", "urllib.parse.urljoin", "mcarch.app.get_b2bucket", "mcarch.app.db.session.commit", "mcarch.app.db.ForeignKey", "mcarch.app.db.Column" ]
[((297, 336), 'mcarch.app.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (306, 336), False, 'from mcarch.app import db, get_b2bucket\n'), ((543, 566), 'mcarch.app.db.relationship', 'db.relationship', (['"""User"""'], {}), "('User')\n", (558, 566), False, 'from ...
# Generated by Django 2.2 on 2020-03-08 16:45 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('mainapp', '0005_auto_20200308_1735'), ] operations = [ migrations.AlterField( model_name='clue', name='paragraph1', ...
[ "django.db.models.TextField" ]
[((336, 402), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""paragraph1"""'}), "(blank=True, null=True, verbose_name='paragraph1')\n", (352, 402), False, 'from django.db import migrations, models\n'), ((526, 592), 'django.db.models.TextField', 'models.Te...
import subprocess import scipy.io.wavfile as wav import sys import numpy as np # import pyaudio import time import wave import os from pydub import AudioSegment import pafy from youtube_transcript_api import YouTubeTranscriptApi from youtube_dl import YoutubeDL dirname = os.path.dirname(os.path.abspath(__file__)) s...
[ "youtube_transcript_api.YouTubeTranscriptApi.get_transcripts", "youtube_dl.YoutubeDL", "os.path.dirname", "os.path.abspath", "sys.path.append" ]
[((319, 343), 'sys.path.append', 'sys.path.append', (['dirname'], {}), '(dirname)\n', (334, 343), False, 'import sys\n'), ((292, 317), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (307, 317), False, 'import os\n'), ((486, 557), 'youtube_transcript_api.YouTubeTranscriptApi.get_transcripts', ...
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import wagtail.wagtailcore.fields class Migration(migrations.Migration): dependencies = [ ('cms_pages', '0005_auto_20150829_1516'), ] operations = [ migrations.AddField( ...
[ "django.db.models.CharField" ]
[((722, 766), 'django.db.models.CharField', 'models.CharField', ([], {'default': '""""""', 'max_length': '(255)'}), "(default='', max_length=255)\n", (738, 766), False, 'from django.db import migrations, models\n')]
from autorop import PwnState, arutil from pwn import ROP def puts(state: PwnState) -> PwnState: """Leak libc addresses using ``puts``. This function leaks the libc addresses of ``__libc_start_main`` and ``puts`` using ``puts``, placing them in ``state.leaks``. Arguments: state: The current `...
[ "autorop.arutil.align_call", "autorop.arutil.leak_helper" ]
[((1209, 1254), 'autorop.arutil.leak_helper', 'arutil.leak_helper', (['state', 'leaker', 'LEAK_FUNCS'], {}), '(state, leaker, LEAK_FUNCS)\n', (1227, 1254), False, 'from autorop import PwnState, arutil\n'), ((1136, 1177), 'autorop.arutil.align_call', 'arutil.align_call', (['rop', '"""puts"""', '[address]'], {}), "(rop, ...
# Copyright 2013-2014 Massachusetts Open Cloud Contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the # License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicab...
[ "haas.api.node_register_nic", "haas.api.node_register", "haas.api.node_connect_network", "haas.api.project_detach_node", "haas.config.cfg.add_section", "functools.wraps", "haas.config.cfg.set", "haas.api.port_register", "haas.api.project_create", "haas.api.project_connect_node", "haas.api.port_c...
[((1538, 1546), 'functools.wraps', 'wraps', (['f'], {}), '(f)\n', (1543, 1546), False, 'from functools import wraps\n'), ((1912, 1947), 'haas.api.project_create', 'api.project_create', (['"""anvil-nextgen"""'], {}), "('anvil-nextgen')\n", (1930, 1947), False, 'from haas import model, api\n'), ((2398, 2449), 'haas.api.p...
import numpy as np import matplotlib.pyplot as plt from extract import HurricaneExtraction #npy_file = './Data/NpyData/LIDIA/20172450002.npz' npy_file = './Data/NpyData/IRMA/20172531622.npz' data = HurricaneExtraction.read_extraction_data(npy_file) data = HurricaneExtraction.normalize_using_physics(data) for d in ...
[ "matplotlib.pyplot.imshow", "extract.HurricaneExtraction.normalize_using_physics", "matplotlib.pyplot.figure", "extract.HurricaneExtraction.read_extraction_data", "matplotlib.pyplot.show" ]
[((201, 251), 'extract.HurricaneExtraction.read_extraction_data', 'HurricaneExtraction.read_extraction_data', (['npy_file'], {}), '(npy_file)\n', (241, 251), False, 'from extract import HurricaneExtraction\n'), ((260, 309), 'extract.HurricaneExtraction.normalize_using_physics', 'HurricaneExtraction.normalize_using_phys...
from urllib.request import urlopen import json import re url = urlopen("https://raw.githubusercontent.com/Templarian/MaterialDesign/master/meta.json") meta = [(i['name'], i['codepoint']) for i in json.loads(url.read()) if re.search('^weather-', i['name'])] print('''--- esphome: # ... includes: - weather_icon_...
[ "urllib.request.urlopen", "re.search" ]
[((64, 161), 'urllib.request.urlopen', 'urlopen', (['"""https://raw.githubusercontent.com/Templarian/MaterialDesign/master/meta.json"""'], {}), "(\n 'https://raw.githubusercontent.com/Templarian/MaterialDesign/master/meta.json'\n )\n", (71, 161), False, 'from urllib.request import urlopen\n'), ((223, 256), 're.se...
# common functions import sys import json # taken from sp_lib def read_json_file(file_path): try: with open(file_path, 'r') as json_file: readstr = json_file.read() json_dict = json.loads(readstr) return json_dict except OSError as e: print('Unable to read url js...
[ "json.loads", "json.dump" ]
[((214, 233), 'json.loads', 'json.loads', (['readstr'], {}), '(readstr)\n', (224, 233), False, 'import json\n'), ((497, 583), 'json.dump', 'json.dump', (['src_dict', 'json_file'], {'ensure_ascii': '(False)', 'indent': '(2)', 'sort_keys': 'sort_keys'}), '(src_dict, json_file, ensure_ascii=False, indent=2, sort_keys=\n ...
# -*- encoding: utf-8 -*- import dsl from shapely.wkt import loads as wkt_loads from . import FixtureTest class SuppressHistoricalClosed(FixtureTest): def test_cartoon_museum(self): # Cartoon Art Museum (closed) self.generate_fixtures(dsl.way(368173967, wkt_loads('POINT (-122.400856246311 37.786...
[ "shapely.wkt.loads" ]
[((278, 334), 'shapely.wkt.loads', 'wkt_loads', (['"""POINT (-122.400856246311 37.78696485494709)"""'], {}), "('POINT (-122.400856246311 37.78696485494709)')\n", (287, 334), True, 'from shapely.wkt import loads as wkt_loads\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Sat Apr 13 11:03:51 2019 @author: ivanpauno """ import matplotlib.pyplot as plt import numpy as np def main(): # A = sqrt(10^(.1*alpha_min-1)/10^(.1*alpha_max-1)) A = np.logspace(np.log10(2), np.log10(100), num=200) ws_array = [1.1, 1.5, 2, 3...
[ "numpy.ceil", "numpy.log10", "numpy.log", "numpy.arccosh", "matplotlib.pyplot.subplots", "matplotlib.pyplot.show" ]
[((526, 543), 'numpy.ceil', 'np.ceil', (['n_butter'], {}), '(n_butter)\n', (533, 543), True, 'import numpy as np\n'), ((558, 574), 'numpy.ceil', 'np.ceil', (['n_cheby'], {}), '(n_cheby)\n', (565, 574), True, 'import numpy as np\n'), ((973, 983), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (981, 983), True, ...
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ _____________________________________________________________________________ Created By : <NAME> - Bacnv6 Created Date: Mon November 03 10:00:00 VNT 2020 Project : AkaOCR core _____________________________________________________________________________ Thi...
[ "signal.signal", "torch.cuda.max_memory_allocated", "torch.cuda.reset_max_memory_allocated", "torch.cuda.synchronize", "gc.collect", "signal.alarm", "time.time", "torch.cuda.empty_cache" ]
[((1536, 1581), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'signal_handler'], {}), '(signal.SIGALRM, signal_handler)\n', (1549, 1581), False, 'import signal\n'), ((1587, 1608), 'signal.alarm', 'signal.alarm', (['seconds'], {}), '(seconds)\n', (1599, 1608), False, 'import signal\n'), ((1766, 1778), 'gc.collec...
# -*- coding: utf-8 -*- ''' @time: 2019/9/8 18:45 @ author: javis ''' import os class Config: # for data_process.py #root = r'D:\ECG' root = r'data' train_dir = os.path.join(root, 'ecg_data/') # test_dir = os.path.join(root, 'ecg_data/testA') # train_label = os.path.join(root, 'hf_round1_labe...
[ "os.path.join" ]
[((180, 211), 'os.path.join', 'os.path.join', (['root', '"""ecg_data/"""'], {}), "(root, 'ecg_data/')\n", (192, 211), False, 'import os\n'), ((469, 499), 'os.path.join', 'os.path.join', (['root', '"""ecg_data"""'], {}), "(root, 'ecg_data')\n", (481, 499), False, 'import os\n'), ((1057, 1083), 'os.path.join', 'os.path.j...
import cv2 import matplotlib.pyplot as plt import easyocr reader = easyocr.Reader(['en'], gpu=False) image = cv2.imread('results/JK_21_05/page_1.jpg') gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) dilated = cv2.dilate(image, None, iterations=1) eroded = cv2.erode(image, None, iterations=1) res = reader.readtext(erod...
[ "cv2.rectangle", "cv2.erode", "cv2.imshow", "easyocr.Reader", "cv2.putText", "cv2.waitKey", "cv2.destroyAllWindows", "cv2.cvtColor", "cv2.dilate", "cv2.imread" ]
[((67, 100), 'easyocr.Reader', 'easyocr.Reader', (["['en']"], {'gpu': '(False)'}), "(['en'], gpu=False)\n", (81, 100), False, 'import easyocr\n'), ((109, 150), 'cv2.imread', 'cv2.imread', (['"""results/JK_21_05/page_1.jpg"""'], {}), "('results/JK_21_05/page_1.jpg')\n", (119, 150), False, 'import cv2\n'), ((159, 198), '...
from PyQt5.QtCore import QAbstractTableModel, QAbstractItemModel from PyQt5.QtCore import Qt, QModelIndex, pyqtSlot class LoadTypesProcess(QAbstractTableModel): def __init__(self): super().__init__() self.csv_values = [] self.header_model = HeaderModel() def index(self, row, column, p...
[ "PyQt5.QtCore.QAbstractTableModel.flags", "PyQt5.QtCore.QModelIndex" ]
[((326, 339), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (337, 339), False, 'from PyQt5.QtCore import Qt, QModelIndex, pyqtSlot\n'), ((418, 431), 'PyQt5.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (429, 431), False, 'from PyQt5.QtCore import Qt, QModelIndex, pyqtSlot\n'), ((504, 517), 'PyQt5...
#!/usr/bin/env python # mix of: # https://www.programcreek.com/python/example/88577/gi.repository.Gst.Pipeline # https://github.com/GStreamer/gst-python/blob/master/examples/helloworld.py # http://lifestyletransfer.com/how-to-launch-gstreamer-pipeline-in-python/ import sys import collections from pprint import pprint...
[ "collections.namedtuple", "gi.repository.Gst.init", "gi.repository.Gst.Pipeline", "gi.repository.Gst.ElementFactory.make", "gi.repository.GLib.MainLoop", "gi.require_version", "sys.stderr.write", "sys.exit", "pprint.pprint", "sys.stdout.write" ]
[((332, 364), 'gi.require_version', 'gi.require_version', (['"""Gst"""', '"""1.0"""'], {}), "('Gst', '1.0')\n", (350, 364), False, 'import gi\n'), ((804, 818), 'gi.repository.Gst.init', 'Gst.init', (['None'], {}), '(None)\n', (812, 818), False, 'from gi.repository import GObject, Gst, GLib\n'), ((912, 969), 'collection...
# -*- coding: utf-8 -*- u"""Test simulationSerial :copyright: Copyright (c) 2016 RadiaSoft LLC. All Rights Reserved. :license: http://www.apache.org/licenses/LICENSE-2.0.html """ from __future__ import absolute_import, division, print_function import pytest pytest.importorskip('srwl_bl') #: Used for a sanity check o...
[ "pykern.pkdebug.pkdpretty", "sirepo.sr_unit.flask_client", "pykern.pkunit.pkok", "pytest.importorskip", "pykern.pkconfig.reset_state_for_testing", "copy.deepcopy", "re.search" ]
[((260, 290), 'pytest.importorskip', 'pytest.importorskip', (['"""srwl_bl"""'], {}), "('srwl_bl')\n", (279, 290), False, 'import pytest\n'), ((536, 558), 'sirepo.sr_unit.flask_client', 'sr_unit.flask_client', ([], {}), '()\n', (556, 558), False, 'from sirepo import sr_unit\n'), ((1102, 1121), 'copy.deepcopy', 'copy.dee...
import unittest from kubragen import KubraGen from kubragen.jsonpatch import FilterJSONPatches_Apply, ObjectFilter, FilterJSONPatch from kubragen.provider import Provider_Generic from kg_nodeexporter import NodeExporterBuilder, NodeExporterOptions class TestBuilder(unittest.TestCase): def setUp(self): s...
[ "kg_nodeexporter.NodeExporterBuilder", "kubragen.jsonpatch.ObjectFilter", "kg_nodeexporter.NodeExporterOptions", "kubragen.provider.Provider_Generic" ]
[((424, 461), 'kg_nodeexporter.NodeExporterBuilder', 'NodeExporterBuilder', ([], {'kubragen': 'self.kg'}), '(kubragen=self.kg)\n', (443, 461), False, 'from kg_nodeexporter import NodeExporterBuilder, NodeExporterOptions\n'), ((347, 365), 'kubragen.provider.Provider_Generic', 'Provider_Generic', ([], {}), '()\n', (363, ...
import os import json import time import torch # Called when the deployed service starts def init(): global model global device # Get the path where the deployed model can be found. model_filename = 'obj_segmentation.pkl' model_path = os.path.join(os.environ['AZUREML_MODEL_DIR'], model_filename) ...
[ "json.loads", "torch.load", "os.path.join", "torch.tensor", "torch.cuda.is_available", "torch.no_grad", "time.time", "torch.device" ]
[((257, 318), 'os.path.join', 'os.path.join', (["os.environ['AZUREML_MODEL_DIR']", 'model_filename'], {}), "(os.environ['AZUREML_MODEL_DIR'], model_filename)\n", (269, 318), False, 'import os\n'), ((420, 463), 'torch.load', 'torch.load', (['model_path'], {'map_location': 'device'}), '(model_path, map_location=device)\n...
from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ This module contains (and isolates) logic used to find entities based on entity type, list selection criteria and search terms. """ __author__ = "<NAME> (<EMAIL>)" __copyright__ = "Copyright 2014, <NAME...
[ "logging.getLogger", "pyparsing.QuotedString", "annalist.models.entitytypeinfo.EntityTypeInfo", "annalist.util.valid_id", "pyparsing.Group", "pyparsing.Word", "utils.py3porting.is_string", "doctest.testmod", "pyparsing.Literal", "pyparsing.StringEnd" ]
[((406, 433), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (423, 433), False, 'import logging\n'), ((20952, 20969), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (20967, 20969), False, 'import doctest\n'), ((2941, 2981), 'annalist.models.entitytypeinfo.EntityTypeInfo', 'Entity...
import argparse import h5py import sys import os from savu.version import __version__ class NXcitation(object): def __init__(self, description, doi, endnote, bibtex): self.description = description.decode('UTF-8') self.doi = doi.decode('UTF-8') self.endnote = endnote.decode('UTF-8') ...
[ "os.path.exists", "argparse.ArgumentParser", "os.path.join", "h5py.File", "os.path.dirname", "sys.exit" ]
[((3175, 3200), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3198, 3200), False, 'import argparse\n'), ((3878, 3901), 'h5py.File', 'h5py.File', (['in_file', '"""r"""'], {}), "(in_file, 'r')\n", (3887, 3901), False, 'import h5py\n'), ((2838, 2849), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\...
import time import pymysql # for pulling UCSC data import pandas as pd from pathlib import Path import logging # app from .progress_bar import * # tqdm, context-friendly LOGGER = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) logging.getLogger('numexpr').setLevel(logging.WARNING) # these login st...
[ "logging.getLogger", "pandas.read_pickle", "logging.basicConfig", "pickle.dump", "pandas.read_csv", "pathlib.Path", "pymysql.connect", "pandas.DataFrame.from_dict", "time.time" ]
[((180, 207), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (197, 207), False, 'import logging\n'), ((208, 247), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (227, 247), False, 'import logging\n'), ((7686, 7726), 'pathlib.Path',...
# encoding: utf-8 """ @author: BrikerMan @contact: <EMAIL> @blog: https://eliyar.biz @version: 1.0 @license: Apache Licence @file: test.py.py @time: 2019-01-25 14:43 """ import unittest from tests import * from kashgari.utils.logger import init_logger init_logger() if __name__ == '__main__': unittest.main()
[ "unittest.main", "kashgari.utils.logger.init_logger" ]
[((255, 268), 'kashgari.utils.logger.init_logger', 'init_logger', ([], {}), '()\n', (266, 268), False, 'from kashgari.utils.logger import init_logger\n'), ((301, 316), 'unittest.main', 'unittest.main', ([], {}), '()\n', (314, 316), False, 'import unittest\n')]
from itertools import count, tee class Bouncy: def __init__(self, porcentage): """ print the number bouncy :type porcentage: int -> this is porcentage of the bouncy """ nums = count(1) rebound = self.sum_number(map(lambda number: float(self.is_rebound(number)), coun...
[ "itertools.count", "itertools.tee" ]
[((222, 230), 'itertools.count', 'count', (['(1)'], {}), '(1)\n', (227, 230), False, 'from itertools import count, tee\n'), ((767, 780), 'itertools.tee', 'tee', (['iterable'], {}), '(iterable)\n', (770, 780), False, 'from itertools import count, tee\n'), ((316, 324), 'itertools.count', 'count', (['(1)'], {}), '(1)\n', ...
import tkinter as tk class View(): def __init__(self): window = tk.Tk() self.frame = tk.Frame(master=window, width=200, height=200) self.frame.pack() def show_grid(self, grid): for i in range(4): for j in range(4): label = tk.Label(master=self.frame,...
[ "tkinter.Tk", "tkinter.Frame", "tkinter.Label" ]
[((77, 84), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (82, 84), True, 'import tkinter as tk\n'), ((106, 152), 'tkinter.Frame', 'tk.Frame', ([], {'master': 'window', 'width': '(200)', 'height': '(200)'}), '(master=window, width=200, height=200)\n', (114, 152), True, 'import tkinter as tk\n'), ((293, 337), 'tkinter.Label'...
import numpy as np def eval_rerr(X, X_hat, X0=None): """ :param X: tensor, X0 or X0+noise :param X_hat: output for apporoximation :param X0: true signal, tensor :return: the relative error = ||X- X_hat||_F/ ||X_0||_F """ if X0 is not None: error = X0 - X_hat return np.linalg....
[ "numpy.size" ]
[((493, 507), 'numpy.size', 'np.size', (['error'], {}), '(error)\n', (500, 507), True, 'import numpy as np\n'), ((561, 571), 'numpy.size', 'np.size', (['X'], {}), '(X)\n', (568, 571), True, 'import numpy as np\n'), ((339, 353), 'numpy.size', 'np.size', (['error'], {}), '(error)\n', (346, 353), True, 'import numpy as np...
"""helpers module """ import json import pcap import yaml def get_adapters_names(): """Finds all adapters on the system :return: A list of the network adapters available on the system """ return pcap.findalldevs() def config_loader_yaml(config_name): """Loads a .yml configuration file :p...
[ "pcap.findalldevs", "yaml.load" ]
[((216, 234), 'pcap.findalldevs', 'pcap.findalldevs', ([], {}), '()\n', (232, 234), False, 'import pcap\n'), ((516, 537), 'yaml.load', 'yaml.load', (['config_yml'], {}), '(config_yml)\n', (525, 537), False, 'import yaml\n')]
import spacy def find_entities(input_phrase, language): models = { 'en': 'en_core_web_sm', 'pl': 'pl_core_news_sm', 'fr': 'fr_core_news_sm', 'de': 'de_core_news_sm', 'it': 'it_core_news_sm', } if language in models: nlp = spacy.load(models[language]) doc = nlp(input_phrase)...
[ "spacy.load" ]
[((264, 292), 'spacy.load', 'spacy.load', (['models[language]'], {}), '(models[language])\n', (274, 292), False, 'import spacy\n')]
# -*- coding:utf-8 -*- # Usage : python ~~.py import sys import os import pickle import collections import pandas as pd import numpy as np from itertools import chain from itertools import combinations from itertools import compress from itertools import product from sklearn.metrics import accuracy_score from multiproc...
[ "pickle.dump", "multiprocessing.freeze_support", "pickle.load", "multiprocessing.Pool" ]
[((3712, 3722), 'multiprocessing.Pool', 'Pool', (['proc'], {}), '(proc)\n', (3716, 3722), False, 'from multiprocessing import Pool\n'), ((4564, 4580), 'multiprocessing.freeze_support', 'freeze_support', ([], {}), '()\n', (4578, 4580), False, 'from multiprocessing import freeze_support\n'), ((2121, 2143), 'pickle.load',...
from directory_forms_api_client.actions import PardotAction from directory_forms_api_client.helpers import Sender from django.conf import settings from django.http import HttpResponseRedirect from django.shortcuts import redirect from django.urls import reverse, reverse_lazy from django.utils.decorators import method_d...
[ "django.urls.reverse", "django.utils.decorators.method_decorator", "django.urls.reverse_lazy", "django.shortcuts.redirect", "directory_forms_api_client.helpers.Sender", "core.datastructures.NotifySettings" ]
[((2376, 2422), 'django.utils.decorators.method_decorator', 'method_decorator', (['never_cache'], {'name': '"""dispatch"""'}), "(never_cache, name='dispatch')\n", (2392, 2422), False, 'from django.utils.decorators import method_decorator\n'), ((1414, 1464), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""domestic:uk-...
import unicodedata import warnings import logging import re import argparse import sbol3 import openpyxl import tyto from .helper_functions import toplevel_named, strip_sbol2_version, is_plasmid, url_to_identity, strip_filetype_suffix from .workarounds import type_to_standard_extension BASIC_PARTS_COLLECTION = 'Basi...
[ "logging.getLogger", "sbol3.SubComponent", "logging.debug", "re.compile", "sbol3.string_to_display_id", "sbol3.CombinatorialDerivation", "logging.info", "tyto.SO.get_uri_by_term", "argparse.ArgumentParser", "sbol3.VariableFeature", "sbol3.Document", "sbol3.Collection", "sbol3.LocalSubCompone...
[((8982, 9006), 're.compile', 're.compile', (['"""RC\\\\(.+\\\\)"""'], {}), "('RC\\\\(.+\\\\)')\n", (8992, 9006), False, 'import re\n'), ((11408, 11450), 're.compile', 're.compile', (['"""Part (\\\\d+) (.+) Part (\\\\d+)"""'], {}), "('Part (\\\\d+) (.+) Part (\\\\d+)')\n", (11418, 11450), False, 'import re\n'), ((3107,...
import copy import json import os from collections import UserDict from signalworks.tracking import Event, Partition, TimeValue, Value, Wave class MultiTrack(UserDict): """ A dictionary containing time-synchronous tracks of equal duration and fs """ def __init__(self, mapping=None): if mappi...
[ "openxdf.OpenXDF", "collections.UserDict", "os.path.splitext", "json.load", "collections.UserDict.__setitem__", "copy.deepcopy", "collections.UserDict.__init__", "json.dump" ]
[((373, 405), 'collections.UserDict.__init__', 'UserDict.__init__', (['self', 'mapping'], {}), '(self, mapping)\n', (390, 405), False, 'from collections import UserDict\n'), ((2522, 2560), 'collections.UserDict.__setitem__', 'UserDict.__setitem__', (['self', 'key', 'value'], {}), '(self, key, value)\n', (2542, 2560), F...
import cv2 import matplotlib.pyplot as plt import glob import os filepath ="afm_dataset4/20211126/" files = [line.rstrip() for line in open((filepath+"sep_trainlist.txt"))] files = glob.glob("orig_img/20211112/*") def variance_of_laplacian(image): # compute the Laplacian of the image and then return the focus # me...
[ "cv2.imwrite", "cv2.Laplacian", "os.makedirs", "matplotlib.pyplot.figure", "cv2.cvtColor", "cv2.imread", "glob.glob" ]
[((184, 216), 'glob.glob', 'glob.glob', (['"""orig_img/20211112/*"""'], {}), "('orig_img/20211112/*')\n", (193, 216), False, 'import glob\n'), ((1434, 1446), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1444, 1446), True, 'import matplotlib.pyplot as plt\n'), ((492, 508), 'cv2.imread', 'cv2.imread', (['...
import os def check_path(path): if not path or not path.strip() or os.path.exists(path): return os.makedirs(path) pass
[ "os.path.exists", "os.makedirs" ]
[((113, 130), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (124, 130), False, 'import os\n'), ((72, 92), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (86, 92), False, 'import os\n')]
# Author: <NAME>, <NAME>, <NAME> # Date: 2020/11/27 """Compare the performance of different classifier and train the best model given cross_validate results . Usage: src/clf_comparison.py <input_file> <input_file1> <output_file> <output_file1> Options: <input_file> Path (including filename and file extension) to ...
[ "sklearn.model_selection.GridSearchCV", "sklearn.svm.SVC", "sklearn.metrics.f1_score", "pandas.read_csv", "pandas.DataFrame", "sklearn.model_selection.cross_validate", "sklearn.neighbors.KNeighborsClassifier", "sklearn.metrics.make_scorer", "sklearn.ensemble.RandomForestClassifier", "sklearn.linea...
[((1218, 1233), 'docopt.docopt', 'docopt', (['__doc__'], {}), '(__doc__)\n', (1224, 1233), False, 'from docopt import docopt\n'), ((1179, 1210), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (1200, 1210), False, 'import warnings\n'), ((1341, 1364), 'pandas.read_csv', 'pd.read...
import glob import os import numpy as np import nibabel as nb import argparse def get_dir_list(train_path): fnames = glob.glob(train_path) list_train = [] for k, f in enumerate(fnames): list_train.append(os.path.split(f)[0]) return list_train def ParseData(list_data): ''' Creates a...
[ "argparse.ArgumentParser", "nibabel.load", "os.path.join", "os.path.split", "numpy.sum", "numpy.concatenate", "numpy.expand_dims", "numpy.save", "glob.glob" ]
[((2640, 2665), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2663, 2665), False, 'import argparse\n'), ((122, 143), 'glob.glob', 'glob.glob', (['train_path'], {}), '(train_path)\n', (131, 143), False, 'import glob\n'), ((1359, 1376), 'nibabel.load', 'nb.load', (['fname[0]'], {}), '(fname[0])...
#moduleForShowingJudges #cmd /K "$(FULL_CURRENT_PATH)" #cd ~/Documents/GitHub/Keyboard-Biometric-Project/Project_Tuples #sudo python -m pip install statistics #python analyzeData.py """ Author: <NAME> and <NAME> Date: 3/09/2018 Program Description: This code can record the Press Time and Flight Time of a tuple as a ...
[ "moduleForDeconstructingTimelines.userSummary", "moduleForDeconstructingTimelines.clearAll", "moduleForAuthenticatingUsers.verify", "moduleForRecordingWithGUI.start_recording", "os.chdir", "platform.system", "moduleForSavingTimelines.saveTimeLine", "moduleForPlotting.plot" ]
[((1745, 1773), 'moduleForRecordingWithGUI.start_recording', 'GUI.start_recording', (['passage'], {}), '(passage)\n', (1764, 1773), True, 'import moduleForRecordingWithGUI as GUI\n'), ((1774, 1796), 'os.chdir', 'os.chdir', (['"""judgeslib/"""'], {}), "('judgeslib/')\n", (1782, 1796), False, 'import os\n'), ((1797, 1862...
import unittest def get_formatted_name(first, last, middle = ""): """生成整洁的姓名""" if middle: full_name = f"{first} {middle} {last}" else: full_name = f"{first} {last}" return full_name.title() class NamesTestCase(unittest.TestCase): #创建一个测试类,继承于unittest.TestCase 这样才能Python自动测试 """测试...
[ "unittest.main" ]
[((903, 918), 'unittest.main', 'unittest.main', ([], {}), '()\n', (916, 918), False, 'import unittest\n')]
from __future__ import unicode_literals from django.db import models from hospital.models import Hospital # Create your models here. class Donor(models.Model): name = models.CharField(max_length = 200) username = models.CharField(max_length = 200) password = models.CharField(max_length = 200) gender =...
[ "django.db.models.DecimalField", "django.db.models.DateField", "django.db.models.CharField", "django.db.models.ForeignKey" ]
[((173, 205), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (189, 205), False, 'from django.db import models\n'), ((223, 255), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (239, 255), False, 'from django.d...
from unittest import TestCase from lf3py.test.helper import data_provider from tests.helper.example.flowapi import perform_api class TestHandler(TestCase): @data_provider([ ( { 'path': '/models', 'httpMethod': 'GET', 'headers': {}, ...
[ "lf3py.test.helper.data_provider", "tests.helper.example.flowapi.perform_api" ]
[((165, 383), 'lf3py.test.helper.data_provider', 'data_provider', (["[({'path': '/models', 'httpMethod': 'GET', 'headers': {},\n 'queryStringParameters': {}}, {'statusCode': 200, 'headers': {\n 'Content-Type': 'application/json'}, 'body': {'models': [{'id': 1234}]}})]"], {}), "([({'path': '/models', 'httpMethod':...
from anndata import AnnData import numpy as np import pandas as pd from scipy.sparse import csr_matrix from joblib import delayed from tqdm import tqdm import sys import igraph from .utils import ProgressParallel from .. import logging as logg from .. import settings def pseudotime(adata: AnnData, n_jobs: int = 1, ...
[ "pandas.Series", "numpy.sqrt", "numpy.apply_along_axis", "numpy.argwhere", "joblib.delayed", "pandas.concat" ]
[((3533, 3565), 'pandas.Series', 'pd.Series', ([], {'index': 'adata.obs_names'}), '(index=adata.obs_names)\n', (3542, 3565), True, 'import pandas as pd\n'), ((7500, 7513), 'pandas.concat', 'pd.concat', (['df'], {}), '(df)\n', (7509, 7513), True, 'import pandas as pd\n'), ((2709, 2751), 'pandas.concat', 'pd.concat', (['...
# coding=utf-8 # Copyright 2021 The OneFlow Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless require...
[ "oneflow.arange", "oneflow.Generator", "oneflow.randperm" ]
[((5675, 5691), 'oneflow.Generator', 'flow.Generator', ([], {}), '()\n', (5689, 5691), True, 'import oneflow as flow\n'), ((2821, 2837), 'oneflow.Generator', 'flow.Generator', ([], {}), '()\n', (2835, 2837), True, 'import oneflow as flow\n'), ((5762, 5809), 'oneflow.randperm', 'flow.randperm', (['bucket_size'], {'gener...
from os import path from setuptools import find_namespace_packages, setup this_directory = path.abspath(path.dirname(__file__)) with open('README.md', encoding='utf-8') as f: long_description = f.read() setup( name='lambda-learner', namespace_packages=['linkedin'], version='0.0.1', long_description...
[ "setuptools.find_namespace_packages", "os.path.dirname" ]
[((104, 126), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (116, 126), False, 'from os import path\n'), ((723, 785), 'setuptools.find_namespace_packages', 'find_namespace_packages', ([], {'where': '"""src"""', 'exclude': "['test*', 'doc']"}), "(where='src', exclude=['test*', 'doc'])\n", (746, ...
# -*- coding: utf-8 -*- """ Independent model based on Geodesic Regression model R_G """ import torch from torch import nn, optim from torch.autograd import Variable from torch.utils.data import DataLoader import torch.nn.functional as F from dataGenerators import ImagesAll, TestImages, my_collate from axisAngle impo...
[ "poseModels.model_3layer", "scipy.io.savemat", "torch.nn.CrossEntropyLoss", "torch.nn.MSELoss", "progressbar.ProgressBar", "tensorboardX.SummaryWriter", "argparse.ArgumentParser", "axisAngle.geodesic_loss", "numpy.stack", "numpy.concatenate", "featureModels.resnet_model", "torch.nn.functional....
[((620, 681), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Pure Regression Models"""'}), "(description='Pure Regression Models')\n", (643, 681), False, 'import argparse\n'), ((1552, 1590), 'os.path.join', 'os.path.join', (['"""results"""', 'args.save_str'], {}), "('results', args.save_...
# from django.shortcuts import render, redirect, get_object_or_404 from .forms import CharacterForm from rick_and_morty_app.models import Character from django.views.generic import ListView, CreateView, UpdateView, DetailView, DeleteView from django.urls import reverse_lazy # new # Create your views here. class HomeP...
[ "django.urls.reverse_lazy" ]
[((556, 586), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""character_list"""'], {}), "('character_list')\n", (568, 586), False, 'from django.urls import reverse_lazy\n'), ((851, 881), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""character_list"""'], {}), "('character_list')\n", (863, 881), False, 'from django...
from apps.flow.settings import config if config.SERVER_ENV != 'dev': from gevent import monkey monkey.patch_all() else: pass from apps.flow.views.deploy import deploy from apps.flow.views.flow import flow from library.api.tFlask import tflask def create_app(): app = tflask(config) register_blue...
[ "library.api.tFlask.tflask", "gevent.monkey.patch_all" ]
[((105, 123), 'gevent.monkey.patch_all', 'monkey.patch_all', ([], {}), '()\n', (121, 123), False, 'from gevent import monkey\n'), ((288, 302), 'library.api.tFlask.tflask', 'tflask', (['config'], {}), '(config)\n', (294, 302), False, 'from library.api.tFlask import tflask\n')]
""" File to house a requester connection """ from logging import getLogger import zmq from service_framework.utils.connection_utils import BaseConnection from service_framework.utils.msgpack_utils import msg_pack, msg_unpack from service_framework.utils.socket_utils import get_requester_socket LOG = getLogger(__name...
[ "logging.getLogger", "service_framework.utils.socket_utils.get_requester_socket", "zmq.Context", "service_framework.utils.msgpack_utils.msg_pack" ]
[((304, 323), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (313, 323), False, 'from logging import getLogger\n'), ((3578, 3591), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (3589, 3591), False, 'import zmq\n'), ((3615, 3678), 'service_framework.utils.socket_utils.get_requester_socket', 'get...
# from sklearn.cluster._kmeans import * import copy from typing import Union import torch import torch.nn as nn from sklearn.cluster._robustq import * from .quantizer import Quantizer __all__ = ['MiniBatchRobustqTorch', 'RobustqTorch'] class ClusterQuantizerBase(Quantizer): def __init__(self, n_feature=1, n_cl...
[ "utee.misc.time_measurement", "module.quantization.quant_functions.linear_quantize", "module.quantization.quant_functions.compute_integral_part", "torch.as_tensor", "numpy.unique", "torch.set_printoptions", "torch.device", "sklearn.show_versions", "torch.from_numpy", "torch.tensor", "torch.cuda....
[((11625, 11684), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'formatter': "{'float': '{: 0.3f}'.format}"}), "(formatter={'float': '{: 0.3f}'.format})\n", (11644, 11684), True, 'import numpy as np\n'), ((11689, 11714), 'torch.set_printoptions', 'torch.set_printoptions', (['(3)'], {}), '(3)\n', (11711, 11714)...
import time import os import spidev as SPI import SSD1306 from PIL import Image, ImageDraw, ImageFont # 调用相关库文件 from datetime import datetime PATH = os.path.dirname(__file__) RST = 19 DC = 16 bus = 0 device = 0 # 树莓派管脚配置 disp = SSD1306.SSD1306(rst=RST, dc=DC, spi=SPI.SpiDev(bus, device)) disp.begin() disp.clear() ...
[ "datetime.datetime", "spidev.SpiDev", "PIL.Image.new", "os.path.join", "PIL.ImageFont.truetype", "time.sleep", "os.path.dirname", "PIL.ImageDraw.Draw", "datetime.datetime.now" ]
[((152, 177), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (167, 177), False, 'import os\n'), ((346, 360), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (358, 360), False, 'from datetime import datetime\n'), ((704, 745), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""com...
# -*- coding: utf-8 -*- import torch import argparse import numpy as np from model import PointCloudNet from code.utils import fp_sampling, knn_patch, helper_function import os parser = argparse.ArgumentParser() parser.add_argument('--num_points', default=1024, type=int, help='Number of points ...
[ "os.path.exists", "argparse.ArgumentParser", "torch.mean", "torch.unsqueeze", "model.PointCloudNet", "code.utils.helper_function.get_best_epoch", "torch.cuda.is_available", "torch.sum", "os.mkdir", "torch.squeeze", "numpy.loadtxt", "torch.cat" ]
[((190, 215), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (213, 215), False, 'import argparse\n'), ((1444, 1495), 'torch.mean', 'torch.mean', (['patches[:, :, 0:3]'], {'dim': '(1)', 'keepdim': '(True)'}), '(patches[:, :, 0:3], dim=1, keepdim=True)\n', (1454, 1495), False, 'import torch\n'), ...
import importlib from uvicorn.workers import UvicornWorker class DynamicUvicornWorker(UvicornWorker): """ This class is called `DynamicUvicornWorker` because it assigns values according to the module available Union['asyncio', 'uvloop'] It also set `lifespan` to `off` :) """ spam_spec = impo...
[ "importlib.util.find_spec" ]
[((316, 350), 'importlib.util.find_spec', 'importlib.util.find_spec', (['"""uvloop"""'], {}), "('uvloop')\n", (340, 350), False, 'import importlib\n')]
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applica...
[ "tensorflow.python.framework.errors.raise_exception_on_not_ok_status", "threading.Lock", "tensorflow.python.util.compat.as_bytes", "os.path.join", "os.path.isdir" ]
[((1727, 1761), 'tensorflow.python.util.compat.as_bytes', 'compat.as_bytes', (['metadata_paths[0]'], {}), '(metadata_paths[0])\n', (1742, 1761), False, 'from tensorflow.python.util import compat\n'), ((1877, 1920), 'tensorflow.python.util.compat.as_bytes', 'compat.as_bytes', (["('%s.source_files' % prefix)"], {}), "('%...
""" bridge-like integrator for amuse the bridge class provides a bridge like coupling between different gravitational integrators. In this way a system composed of multiple components can be evolved taking account of the self gravity of the whole system self consistently, while choosing the most appropiate i...
[ "amuse.datamodel.Channels", "amuse.units.quantities.AdaptingVectorQuantity", "amuse.datamodel.ParticlesSuperset", "threading.Thread", "amuse.datamodel.Particles", "amuse.support.exceptions.AmuseException" ]
[((10042, 10063), 'amuse.datamodel.Particles', 'datamodel.Particles', ([], {}), '()\n', (10061, 10063), False, 'from amuse import datamodel\n'), ((10249, 10284), 'amuse.units.quantities.AdaptingVectorQuantity', 'quantities.AdaptingVectorQuantity', ([], {}), '()\n', (10282, 10284), False, 'from amuse.units import quanti...
import matplotlib.pyplot as plt import matplotlib.image as mpimg import numpy as np from PIL import Image import pandas as pd from sklearn.decomposition import PCA from scipy.spatial.distance import pdist, squareform from scipy.sparse.linalg import eigs from numpy import linalg as LA from mpl_toolkits.mplot3d import Ax...
[ "matplotlib.pyplot.ylabel", "sklearn.decomposition.PCA", "matplotlib.pyplot.xlabel", "matplotlib.image.imread", "numpy.zeros", "matplotlib.pyplot.subplots", "matplotlib.pyplot.show" ]
[((419, 457), 'numpy.zeros', 'np.zeros', ([], {'shape': '(numImages, 490 * 490)'}), '(shape=(numImages, 490 * 490))\n', (427, 457), True, 'import numpy as np\n'), ((727, 758), 'sklearn.decomposition.PCA', 'PCA', ([], {'n_components': 'numComponents'}), '(n_components=numComponents)\n', (730, 758), False, 'from sklearn....
from pydub import AudioSegment from pydub.silence import split_on_silence def segment(filename,foldername): """ filename : str foldername: str folder to put all the chunks """ sound_file = AudioSegment.from_file(filename) sound_file = sound_file.set_channels(1) sound_file = sound_file.set_frame_rate(16000) ...
[ "pydub.AudioSegment.from_file", "pydub.silence.split_on_silence" ]
[((198, 230), 'pydub.AudioSegment.from_file', 'AudioSegment.from_file', (['filename'], {}), '(filename)\n', (220, 230), False, 'from pydub import AudioSegment\n'), ((335, 405), 'pydub.silence.split_on_silence', 'split_on_silence', (['sound_file'], {'min_silence_len': '(1000)', 'silence_thresh': '(-60)'}), '(sound_file,...
# coding: utf-8 import os size = os.path.getsize("test.txt") with open("test.txt", mode="r") as f: print(f.read(size))
[ "os.path.getsize" ]
[((35, 62), 'os.path.getsize', 'os.path.getsize', (['"""test.txt"""'], {}), "('test.txt')\n", (50, 62), False, 'import os\n')]
# -*- coding: utf-8 -*- # !/usr/bin/python __author__ = 'ma_keling' # Version : 1.0.0 # Start Time : 2018-12-20 # Update Time : # Change Log : ## 1. ## 2. ## 3. import arcpy import CalculateLods def execute(): in_map = arcpy.GetParameter(0) arcpy.AddMessage("Input map : ...
[ "arcpy.GetParameter", "CalculateLods.calculate_lods_for_feature" ]
[((263, 284), 'arcpy.GetParameter', 'arcpy.GetParameter', (['(0)'], {}), '(0)\n', (281, 284), False, 'import arcpy\n'), ((359, 380), 'arcpy.GetParameter', 'arcpy.GetParameter', (['(1)'], {}), '(1)\n', (377, 380), False, 'import arcpy\n'), ((414, 477), 'CalculateLods.calculate_lods_for_feature', 'CalculateLods.calculate...
import io import logging import time from typing import List, Optional from custom_components.xiaomi_cloud_map_extractor.common.map_data import MapData from custom_components.xiaomi_cloud_map_extractor.types import Colors, Drawables, ImageConfig, Sizes, Texts try: from miio import RoborockVacuum, DeviceException ...
[ "logging.getLogger", "io.BytesIO", "custom_components.xiaomi_cloud_map_extractor.common.map_data_parser.MapDataParser.create_empty", "custom_components.xiaomi_cloud_map_extractor.common.xiaomi_cloud_connector.XiaomiCloudConnector", "time.sleep", "miio.Vacuum" ]
[((1310, 1337), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1327, 1337), False, 'import logging\n'), ((2244, 2271), 'miio.Vacuum', 'RoborockVacuum', (['host', 'token'], {}), '(host, token)\n', (2258, 2271), True, 'from miio import Vacuum as RoborockVacuum, DeviceException\n'), ((2298,...
from Locators.checkout_overview_locator import CheckoutOverviewLocator from Objects.product import Product from Pages.base_page_object import BasePage from Utils.utility import Utils class CheckoutOverViewPage(BasePage): def __init__(self, driver): super().__init__(driver) def get_product_overview_info(self...
[ "Locators.checkout_overview_locator.CheckoutOverviewLocator.PRODUCT_PRICE_LABEL", "Locators.checkout_overview_locator.CheckoutOverviewLocator.PRODUCT_NAME_LABEL", "Locators.checkout_overview_locator.CheckoutOverviewLocator.PRODUCT_DESC_LABEL", "Locators.checkout_overview_locator.CheckoutOverviewLocator.PRODUC...
[((645, 676), 'Objects.product.Product', 'Product', (['name', 'desc', 'price', 'qty'], {}), '(name, desc, price, qty)\n', (652, 676), False, 'from Objects.product import Product\n'), ((799, 845), 'Utils.utility.Utils.convert_string_to_float', 'Utils.convert_string_to_float', (['self', 'price_lbl'], {}), '(self, price_l...
# -*- coding: utf-8 -*- # https://github.com/Kodi-vStream/venom-xbmc-addons import xbmcaddon, xbmcgui, xbmc """System d'importation from resources.lib.comaddon import addon, dialog, VSlog, xbmcgui, xbmc """ """ from resources.lib.comaddon import addon addons = addon() en haut de page. utiliser une fonction comad...
[ "xbmc.getCondVisibility", "xbmcvfs.exists", "xbmc.getInfoLabel", "xbmcvfs.delete", "xbmc.sleep", "xbmc.executebuiltin", "xbmcgui.getCurrentWindowId", "xbmcvfs.File" ]
[((5509, 5549), 'xbmc.executebuiltin', 'xbmc.executebuiltin', (['"""Container.Refresh"""'], {}), "('Container.Refresh')\n", (5528, 5549), False, 'import xbmcaddon, xbmcgui, xbmc\n'), ((5574, 5623), 'xbmc.executebuiltin', 'xbmc.executebuiltin', (['"""ActivateWindow(busydialog)"""'], {}), "('ActivateWindow(busydialog)')\...
# type: ignore[attr-defined] from solids import example_one_solid # pylint: disable=import-error from dagster import pipeline @pipeline def example_one_pipeline(): example_one_solid()
[ "solids.example_one_solid" ]
[((172, 191), 'solids.example_one_solid', 'example_one_solid', ([], {}), '()\n', (189, 191), False, 'from solids import example_one_solid\n')]
import random import torch from game import Game from agent import RLAgent from moves import Moves game=Game() agent=RLAgent() moves=Moves() num_win=0 #initialize no. of win by human num_lose=0 #initialize no. of win by ai but loss by human num_tie=0 random.seed(1000) def check_board_and_may_update_state_values():...
[ "agent.RLAgent", "moves.Moves", "random.seed", "torch.save", "game.Game" ]
[((106, 112), 'game.Game', 'Game', ([], {}), '()\n', (110, 112), False, 'from game import Game\n'), ((119, 128), 'agent.RLAgent', 'RLAgent', ([], {}), '()\n', (126, 128), False, 'from agent import RLAgent\n'), ((135, 142), 'moves.Moves', 'Moves', ([], {}), '()\n', (140, 142), False, 'from moves import Moves\n'), ((255,...
# receive_msg.py # # SPDX-FileCopyrightText: Copyright 2021 <NAME> # # SPDX-License-Identifier: MIT # # Receive message from IOTA tangle # import iota_client import os import pprint # Config msg_meta = False env_node_address = 'HORNET_NODE_ADDRESS' # Print Message data def show_message(message, meta=False): if...
[ "iota_client.Client", "pprint.pprint", "argparse.ArgumentParser", "os.getenv" ]
[((452, 474), 'pprint.pprint', 'pprint.pprint', (['message'], {}), '(message)\n', (465, 474), False, 'import pprint\n'), ((566, 638), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Receive message from IOTA tangle."""'}), "(description='Receive message from IOTA tangle.')\n", (589, 638),...
# -*- coding: utf-8 -*- """ Turn on and off systemd suspend inhibitor. Configuration parameters: format: display format for this module (default '[\?color=state SUSPEND [\?if=state OFF|ON]]') lock_types: specify state to inhibit, comma separated list https://www.freedesktop.org/wiki/Software/sy...
[ "os.close", "dbus.SystemBus", "py3status.module_test.module_test" ]
[((2578, 2600), 'py3status.module_test.module_test', 'module_test', (['Py3status'], {}), '(Py3status)\n', (2589, 2600), False, 'from py3status.module_test import module_test\n'), ((2403, 2419), 'os.close', 'close', (['self.lock'], {}), '(self.lock)\n', (2408, 2419), False, 'from os import close\n'), ((1326, 1337), 'dbu...
import peeweedb import astropy.units as u def get_by_basename(db, table, basename): """Get data from SQL database by basename. Returns a list of dict""" if isinstance(table, str): assert table in db.get_tables(), "Sanity Check Failed: Table queried does not exist" table = peeweedb.tabl...
[ "astropy.units.arcmin.to" ]
[((716, 734), 'astropy.units.arcmin.to', 'u.arcmin.to', (['u.deg'], {}), '(u.deg)\n', (727, 734), True, 'import astropy.units as u\n')]
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2021-2022 Hewlett Packard Enterprise, Inc. All rights reserved. # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type DOCUMENTATION = ''...
[ "ansible_collections.community.general.plugins.module_utils.ilo_redfish_utils.iLORedfishUtils" ]
[((4197, 4246), 'ansible_collections.community.general.plugins.module_utils.ilo_redfish_utils.iLORedfishUtils', 'iLORedfishUtils', (['creds', 'root_uri', 'timeout', 'module'], {}), '(creds, root_uri, timeout, module)\n', (4212, 4246), False, 'from ansible_collections.community.general.plugins.module_utils.ilo_redfish_u...
import torch import torch.nn.functional as F from torch import nn from util.misc import (NestedTensor, nested_tensor_from_tensor_list, accuracy, get_world_size, interpolate, is_dist_avail_and_initialized) from .backbone import build_backbone from .matcher import build_mat...
[ "torch.nn.ReLU", "torch.full_like", "numpy.array", "torch.cuda.is_available", "numpy.arange", "torch.nn.Sigmoid", "util.misc.get_world_size", "numpy.meshgrid", "torch.nn.functional.mse_loss", "util.misc.is_dist_avail_and_initialized", "torch.distributed.all_reduce", "torch.nn.Upsample", "tor...
[((3171, 3200), 'numpy.meshgrid', 'np.meshgrid', (['shift_x', 'shift_y'], {}), '(shift_x, shift_y)\n', (3182, 3200), True, 'import numpy as np\n'), ((3541, 3570), 'numpy.meshgrid', 'np.meshgrid', (['shift_x', 'shift_y'], {}), '(shift_x, shift_y)\n', (3552, 3570), True, 'import numpy as np\n'), ((596, 662), 'torch.nn.Co...
import datetime from data_sqlalchemy.modelbase import SqlAlchemyBase import sqlalchemy as sa class Word(SqlAlchemyBase): __tablename__ = "words" # id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) id = sa.Column(sa.String, primary_key=True) created_date = sa.Column(sa.DateTime, default...
[ "sqlalchemy.String", "sqlalchemy.Column" ]
[((232, 270), 'sqlalchemy.Column', 'sa.Column', (['sa.String'], {'primary_key': '(True)'}), '(sa.String, primary_key=True)\n', (241, 270), True, 'import sqlalchemy as sa\n'), ((290, 355), 'sqlalchemy.Column', 'sa.Column', (['sa.DateTime'], {'default': 'datetime.datetime.now', 'index': '(True)'}), '(sa.DateTime, default...
import bpy from bpy.props import * from ...preferences import get_pref def update_node(self, context): try: self.node.node_dict[self.name] = self.value # update node tree self.node.update_parms() except Exception as e: print(e) class RenderNodeSocketInterface(bpy.types.NodeSo...
[ "bpy.utils.unregister_class", "bpy.utils.register_class" ]
[((8909, 8938), 'bpy.utils.register_class', 'bpy.utils.register_class', (['cls'], {}), '(cls)\n', (8933, 8938), False, 'import bpy\n'), ((8991, 9022), 'bpy.utils.unregister_class', 'bpy.utils.unregister_class', (['cls'], {}), '(cls)\n', (9017, 9022), False, 'import bpy\n')]
from keras.models import load_model import numpy as np import pandas as pd from keras.preprocessing.image import ImageDataGenerator from sklearn.cluster import KMeans from time import time # Takes a pandas dataframe containing the cluster assignment and ground truth for each data point # and returns the purity of the ...
[ "sklearn.cluster.KMeans", "keras.models.load_model", "keras.preprocessing.image.ImageDataGenerator", "numpy.array", "pandas.DataFrame", "numpy.log2", "time.time" ]
[((1556, 1730), 'pandas.DataFrame', 'pd.DataFrame', (["{'cluster': clusters, 'cluster_size': cluster_sizes, 'most_common_class':\n most_common_classes, 'purity': cluster_purities, 'total_purity':\n total_purity}"], {}), "({'cluster': clusters, 'cluster_size': cluster_sizes,\n 'most_common_class': most_common_c...
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # """Evohome RF - Opentherm processor.""" import logging import struct from typing import Any from .const import __dev_mode__ DEV_MODE = __dev_mode__ _LOGGER = logging.getLogger(__name__) if DEV_MODE: _LOGGER.setLevel(logging.DEBUG) # Data structure shamelessy cop...
[ "logging.getLogger", "struct.unpack" ]
[((211, 238), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (228, 238), False, 'import logging\n'), ((25371, 25395), 'struct.unpack', 'struct.unpack', (['""">H"""', 'buf'], {}), "('>H', buf)\n", (25384, 25395), False, 'import struct\n'), ((25575, 25599), 'struct.unpack', 'struct.unpack',...
import bokeh.io import bokeh.plotting import bokeh.layouts import bokeh.palettes import seaborn as sns import numpy as np import os import matplotlib.pyplot as plt import matplotlib def plotting_style(grid=True): """ Sets the style to the publication style """ rc = {'axes.facecolor': '#E3DCD0', ...
[ "seaborn.set_style", "matplotlib.pyplot.rc" ]
[((1032, 1085), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text.latex"""'], {'preamble': '"""\\\\usepackage{sfmath}"""'}), "('text.latex', preamble='\\\\usepackage{sfmath}')\n", (1038, 1085), True, 'import matplotlib.pyplot as plt\n'), ((1090, 1139), 'matplotlib.pyplot.rc', 'plt.rc', (['"""mathtext"""'], {'fontset': '"""s...
# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, softw...
[ "mock.patch", "reno.create._pick_note_file_name", "reno.create._edit_file", "fixtures.EnvironmentVariable", "reno.create._make_note_file", "fixtures.TempDir" ]
[((699, 727), 'mock.patch', 'mock.patch', (['"""os.path.exists"""'], {}), "('os.path.exists')\n", (709, 727), False, 'import mock\n'), ((965, 993), 'mock.patch', 'mock.patch', (['"""os.path.exists"""'], {}), "('os.path.exists')\n", (975, 993), False, 'import mock\n'), ((1089, 1140), 'reno.create._pick_note_file_name', ...
import math import torch.nn as nn from .modules import QConv2d, QLinear def make_layers(cfg, batch_norm=False, wbit=4, abit=4): layers = list() in_channels = 3 for v in cfg: if v == 'M': layers += [nn.MaxPool2d(kernel_size=2, stride=2)] elif v == 'A': layers += [nn....
[ "torch.nn.BatchNorm2d", "torch.nn.ReLU", "torch.nn.Dropout", "torch.nn.Sequential", "math.sqrt", "torch.nn.MaxPool2d", "torch.nn.Linear", "torch.nn.AvgPool2d" ]
[((682, 704), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (695, 704), True, 'import torch.nn as nn\n'), ((232, 269), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)'}), '(kernel_size=2, stride=2)\n', (244, 269), True, 'import torch.nn as nn\n'), ((1375, 138...
#!/usr/bin/python ''' Central Templates Ansible Module ''' # MIT License # # Copyright (c) 2020 Aruba, a Hewlett Packard Enterprise company # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software w...
[ "json.loads", "ansible.module_utils.central_http.CentralApi" ]
[((9181, 9199), 'ansible.module_utils.central_http.CentralApi', 'CentralApi', (['module'], {}), '(module)\n', (9191, 9199), False, 'from ansible.module_utils.central_http import CentralApi\n'), ((12141, 12167), 'json.loads', 'json.loads', (["result['resp']"], {}), "(result['resp'])\n", (12151, 12167), False, 'import js...
from django.db import models from django.utils.translation import ugettext_lazy as _ from habitat.timezone import get_timezone timezone = get_timezone() class MissionDate(models.Model): date = models.CharField( verbose_name=_(timezone.DATE_VERBOSE_NAME), help_text=_(timezone.DATE_HELP_TEXT), ...
[ "django.utils.translation.ugettext_lazy", "habitat.timezone.get_timezone" ]
[((140, 154), 'habitat.timezone.get_timezone', 'get_timezone', ([], {}), '()\n', (152, 154), False, 'from habitat.timezone import get_timezone\n'), ((822, 855), 'django.utils.translation.ugettext_lazy', '_', (['timezone.DATETIME_VERBOSE_NAME'], {}), '(timezone.DATETIME_VERBOSE_NAME)\n', (823, 855), True, 'from django.u...
#!/usr/bin/env python from __future__ import print_function import sys import serial import time from math import sin, cos, pi import argparse import ast from comms import * from boards import * from livegraph import livegraph if __name__ == '__main__': parser = argparse.ArgumentParser(description='Drive motor m...
[ "argparse.ArgumentParser", "ast.literal_eval", "serial.Serial", "livegraph.livegraph", "time.time" ]
[((270, 397), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Drive motor module(s) with a given control mode and plot current measurements."""'}), "(description=\n 'Drive motor module(s) with a given control mode and plot current measurements.'\n )\n", (293, 397), False, 'import ar...