content stringlengths 5 1.05M |
|---|
from math import floor
import ansiscape as a
from ansiscape.types import SequenceType
def make_example() -> SequenceType:
foreground_rgb = a.sequence()
background_rgb = a.sequence()
square = 10
for row in range(0, square):
for column in range(0, (square * square)):
block = floo... |
"""
pyPESTO
=======
Parameter Estimation TOolbox for python.
"""
from .version import __version__ # noqa: F401
from .objective import (ObjectiveOptions,
Objective,
AmiciObjective,
PetabImporter)
from .problem import Problem
from .result import ... |
# coding=utf-8
# Copyright 2021 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable... |
from typing import List, Optional
import aiosqlite
from lotus.types.blockchain_format.coin import Coin
from lotus.types.blockchain_format.sized_bytes import bytes32
from lotus.types.coin_record import CoinRecord
from lotus.types.full_block import FullBlock
from lotus.util.db_wrapper import DBWrapper
from lotus.util.i... |
import time
import os
from flask import Flask, request, Response
from flask_restful import Api, Resource
from waitress import serve
from uuid import uuid4
MY_DIR = os.path.dirname(os.path.realpath(__file__))
RES_DIR = os.path.join(MY_DIR, "res")
items = []
ditems = dict()
class Item(Resource):
def __get_item_if_e... |
import warnings
import matplotlib
import matplotlib.pyplot as plt
import pyDeltaRCM
# filter out the warning raised about no netcdf being found
warnings.filterwarnings("ignore", category=UserWarning)
n = 1
cm = matplotlib.cm.get_cmap('tab10')
# init delta model
with pyDeltaRCM.shared_tools._docs_temp_directory(... |
from django.conf.urls import url, include
from src import settings
from django.conf.urls.static import static
from . import views
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'^dashboard/', views.dashboard, name='dashboard'),
url(r'^delete/(?P<file_id>[\d ]+)/$', views.delete_file, name='dele... |
import argparse
import pandas
from fuzzywuzzy import process
def colFix(x):
"""Convert column name to string, removing dots and spaces"""
if type(x) is int:
return 'd{}'.format(x)
else:
return x.replace('.', '').replace(' ', '').replace('-', '')
def prepareTable(excel_file, feather_file)... |
"""
The :mod:`view` module contains the classes used to represent a MMIF view
as a live Python object.
In MMIF, views are created by apps in a pipeline that are annotating
data that was previously present in the MMIF file.
"""
from datetime import datetime
from typing import Dict, Union, Optional, Generator, List, cas... |
# -*- coding: utf-8 -*-
from datetime import datetime
from app import db
from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
class Baby(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(32), unique=True)
birthday = ... |
def foo(a, b):
"""
:param a:
:param b:
"""
pass
|
import turtle
import random
import constants
class Player(turtle.Turtle):
CRASHED = "crashed"
READY = "ready"
DEAD = "dead"
def __init__(self, name, start_x, start_y, color):
super(Player, self).__init__()
self.penup()
self.shape("square")
self.color(color)
se... |
import logging
from collections import namedtuple
import geopandas as gp
import osmium
import pandas as pd
import shapely.wkb as wkblib
from pyproj import Transformer, CRS
from shapely.geometry import MultiPoint
from shapely.ops import transform, nearest_points
from osmox import helpers
OSMTag = namedtuple('OSMtag',... |
# %load penguinsimple.py
from bokeh.models import ColumnDataSource, Range1d
from bokeh.plotting import figure
from bokeh.io import export_png, show, output_notebook
import pandas as pd
import numpy as np
def line(x,b):
return (250/200)*x+b
def margin(x,y):
return (250/200)*x-y+(400/200)
penguins = '../../d... |
# class A:
# def __init__(self, s = "welcome"):
# self.s = s
# def print(self):
# print(self.s)
# a = A()
# a.print()
# def main():
# myCount = Count()
# times = 0
# for i in range(0, 100):
# increment(myCount, times)
# print("myCount.count =", myCount.count, ... |
from django.urls import path
from . import views
app_name = "frontend"
urlpatterns = [
path('', views.homepage, name="index"),
]
|
'''
Created on Jul 1, 2009
This module contains tests for the face recognition algorithms.
@author: bolme
'''
import unittest
import pyvision as pv
import numpy as np
pv.disableCommercialUseWarnings()
from pyvision.analysis.FaceAnalysis.FaceDatabase import ScrapShotsDatabase
from pyvision.analysis.FaceAnalysis... |
import apache_beam as beam
import kubeflow_batch_predict.dataflow.batch_prediction as batch_prediction
import code_search.do_fns.embeddings as embeddings
import code_search.transforms.github_bigquery as github_bigquery
class GithubBatchPredict(beam.PTransform):
"""Batch Prediction for Github dataset"""
def __in... |
from zahlabut.LogTool import *
import os
from configparser import ConfigParser
from datetime import datetime
import unittest
conf_file = 'conf_unittest.ini'
class NginxTestCases(unittest.TestCase):
def setUp(self):
test_start_time = datetime.today().strftime('%Y-%m-%d %H:%M:%S')
parser = ConfigPars... |
'Build the bundled capnp distribution'
import subprocess
import os
import tempfile
def build_libcapnp(bundle_dir, build_dir, verbose=False):
bundle_dir = os.path.abspath(bundle_dir)
capnp_dir = os.path.join(bundle_dir, 'capnproto-c++')
build_dir = os.path.abspath(build_dir)
with tempfile.TemporaryFile() as f... |
from typing import List
if __name__ == '__main__':
class Solution:
def removeDuplicates(self, nums: List[int]) -> int:
j = 0
len_nums = len(nums)
if len_nums == 0 or len_nums ==1:
return len_nums
else:
for i in range(len(nums)-... |
"""add BotwSettings.renomination_cooldown
Revision ID: 33ab173d34a2
Revises: 42180b951b16
Create Date: 2021-02-19 00:00:34.097366
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "33ab173d34a2"
down_revision = "42180b951b16"
branch_labels = None
depends_on = None... |
from vapory import *
import numpy as np, PIL, PIL.ImageFont, PIL.Image
def letter_pixels(letter):
font = PIL.ImageFont.load_default()
(width, height) = font.getsize(letter)
txt_image = np.array(font.getmask(letter)).reshape((height, width))
xx,yy = np.logical_not(np.pad(txt_image, 1, 'constant')).no... |
"""Export remote accesses in file xlsx"""
from cbw_api_toolbox.cbw_file_xlsx import CBWXlsx
API_KEY = ''
SECRET_KEY = ''
API_URL = ''
FILE_XLSX = "" #Optional parameter
XLSX = CBWXlsx(API_URL, API_KEY, SECRET_KEY)
print(XLSX.export_remote_accesses_xlsx())
|
from setuptools import setup, find_packages
setup(
name='scaleout-cli',
version='0.0.1',
description="""Scaleout CLI""",
author='Morgan Ekmefjord',
author_email='morgan@scaleout.se',
url='https://www.scaleoutsystems.com',
include_package_data=True,
py_modules=['scaleout'],
python_re... |
# -*- coding: utf-8 -*-
#
# Undent - Dedent and format multiline strings into human-readable output
#
# Ansgar Grunseid
# grunseid.com
# grunseid@gmail.com
#
# License: MIT
#
from undent.api import undent
from undent.__version__ import (
__title__,
__version__,
__license__,
__author__,
__contact_... |
from django.db import models
# Create your models here.
class Maker(models.Model):
name = models.CharField(max_length=10)
country = models.CharField(max_length=10)
def __str__(self):
return self.name
class PModel(models.Model):
maker = models.ForeignKey(Maker, on_delete=models.CASCADE)
... |
"""
Well Registry ORM object.
"""
from django.conf import settings
from django.core.validators import RegexValidator
from django.db import models
from smart_selects.db_fields import ChainedForeignKey
class AgencyLookup(models.Model):
"""Model definition for the agency table, lookup only"""
agency_cd = mode... |
import asyncio
import contextlib
import discord
from redbot.core import commands
from redbot.core.commands import Context
from redbot.core.i18n import Translator
from redbot.core.utils.menus import start_adding_reactions
from redbot.core.utils.predicates import ReactionPredicate, MessagePredicate
from ...utils.parser... |
import os
import hashlib
import hmac
import time
from adminapi.cmduser import get_auth_token
from adminapi.filters import BaseFilter
try:
from urllib.request import urlopen, Request
from urllib.error import HTTPError, URLError
except ImportError:
from urllib2 import urlopen, Request, HTTPError, URLError
... |
# -*- coding: utf-8 -*-
"""
Created on Mon May 01 23:02:52 2017
@author: dom
"""
from sys import argv
def isvalid(line):
# Checks if a read line can be used
if line[0] == '#':
return False
elem = line.split(';')
if not len(elem) == 3:
return False
if not float(elem[2]) > 0:
... |
# Copyright 2021-xx iiPython
# Prism Internal Engine
# Modules
import os
import random
import secrets
import discord
from typing import Union
from prism.config import config
from discord.ext import commands, tasks
from .utils import Utils
from ..logging import logger
from ..database import Database
# Bot class
class... |
# -*- coding:utf-8 -*-
# Author:lixuecheng
import asyncio
import time
import requests
import concurrent.futures
import os
import random
class A:
async def a(self, a1):
time.sleep(0.1)
# print(a1)
return a1 + 1
async def b(self, b1):
d = await self.a(b1)
return d + 1
... |
from heapq import heappush
class KNearestDecks(object):
""" Finds the decks that are most similar to a set of cards. """
MAX_CARDS_IN_DECK = 30
def __init__(self):
self.decks = {}
self.card_count = {}
def update_deck(self, deck_entry):
""" Adds a card to its corresponding de... |
import os
curr_dir = os.path.dirname(__file__)
data_dir = curr_dir + '/../COVID-19/csse_covid_19_data/csse_covid_19_time_series/'
confirmed_covid_jhu_time_series = data_dir + 'time_series_covid19_recovered_global.csv'
deaths_covid_jhu_time_series = data_dir + 'time_series_covid19_deaths_global.csv'
recovered_covid_jh... |
import re
import pytest
from reddiggit import post
@pytest.mark.usefixtures('client')
class TestNegative:
def test_null_topic(self, client):
"""Test if error message shows when topic is null while posting."""
assert len(post.posts) == 0
response = client.post('/post/submit', data=dict(
... |
#!/usr/bin/env python
import glob
import os
import signal
import six
import sys
import logging
import tempfile
import time
import re
from redis import StrictRedis
from datetime import datetime
if os.name == 'posix' and sys.version_info[0] < 3:
import subprocess32 as subprocess
else:
import subprocess
try:
... |
from pytest import fixture
from linkedin_scraper.parsers import EmploymentParser
@fixture()
def employment_parser():
parser = EmploymentParser()
parser.professions_list = []
return parser
def test_regular_linkedin_format(employment_parser):
assert ('Python Developer', 'Foo Software') == employment_... |
from helper import *
### 1a
def grader1a(A, B, C, T, V, W):
try:
aae(A@B@C, H@T@V@W@H)
except:
return 'T, V, W are not correct'
else:
if np.all(V == W):
return 'Unitary matrices are not unique'
elif np.all(V == T):
return 'Unitary matrices are n... |
'''
do nothing, for debug
'''
class I2c():
def __init__(self, scl, sda):
pass
def address(self, addr):
pass
def writeWordReg(self, reg, data):
pass
|
import fgrequests
urls = ['https://www.google.com']
response = fgrequests.build(urls, verify=False)
assert type(response) == list |
# -*- coding: utf-8 -*-
from collections import defaultdict, namedtuple
from datetime import datetime
import logging
import os
import re
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from flask import has_request_context, request
from roger import auth, config, files, models, notif... |
from folium.map import Marker
from folium.utilities import parse_options
from branca.element import MacroElement
from jinja2 import Template
class JavascriptMarker(Marker):
"""Add a Marker in the shape of a boat.
Parameters
----------
location: tuple of length 2, default None
The latitude and ... |
# -*- coding: utf-8 -*-
# @Author: XP
import logging
import os
import jittor
import utils.data_loaders as dataloader_jt
from jittor import nn
from datetime import datetime
from tqdm import tqdm
from time import time
from tensorboardX import SummaryWriter
from core.test_c3d import test_net
from utils.average_meter impo... |
'''The ``methods.account`` module provides simple implementations of
common methods around account creation and management.
'''
import create
import login
import logout
import update
|
"""
Takes a preprocessed file and generate a file which contaitn the
name of the biffile, read name and percent value, and a possible metadata
"""
from scipy.optimize import curve_fit
def get_target_percent(percent_file, g_percent_value,nbin=101,mixture=True,minimum_percent_highsample=0.5):
#print(percent_file)
... |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import AutoMinorLocator
from matplotlib.ticker import MultipleLocator
import importlib
import sys
import os
if not '../aux/' in sys.path: sys.path.append('../aux/')
import paths; importlib.reload(paths)
import spec; importlib.reload(spec)
... |
#!/usr/bin/python
import hashlib
import sys, getopt
def main(argv):
inputfile = ''
logfile = ''
try:
opts, args = getopt.getopt(argv,"hi:l:",["ifile=","lfile="])
except getopt.GetoptError:
print 'FeelingPhishyEmailParser.py -i <inputfile> -l <logfile>'
sys.exit(2)
for opt, arg in opts:... |
'''
This is the primary touchpaper module; the `main` function is called when you
run `zlist` on the command line.
'''
import argparse
import cssutils
import logging
import sys
from ._version import get_version
cssutils.log.setLevel(logging.CRITICAL)
'''
Main package routine
Parse CSS files for elements with a d... |
from . import fast_nn
from . import nn
import tensorflow as tf
from tensorflow.contrib.framework.python.ops import arg_scope
import numpy as np
UPDATE_V_STACK = 'update_v_stack'
def undo_zeroth_row_bias_when_downshifting(row_output, row):
'''The down_shifted_conv2d adds a bias to the row of all zeros. This remo... |
from docassemble.webapp.db_object import db
from docassemble.webapp.core.models import Uploads
def get_new_file_number(user_code, file_name, yaml_file_name=None):
new_upload = Uploads(key=user_code, filename=file_name, yamlfile=yaml_file_name)
db.session.add(new_upload)
db.session.commit()
return new_u... |
import unittest
from parse import *
class ParseTestClass(unittest.TestCase):
def setUp(self):
# Load source code for test library
self.library = LibrarySource('tests/example_library')
self.routines = dict(
(r.name, r) for r in self.library.public_subroutines)
self.test... |
import pickle
def output_file(obj, filename):
f = open(filename, "wb")
pickle.dump(obj, f, protocol=pickle.HIGHEST_PROTOCOL)
f.close()
def input_file(filename):
f = open(filename, "rb")
return pickle.load(f)
|
# Import all the models, so that Base has them before being
# imported by Alembic
from app.db.base_class import Base # noqa
from app.db.base_class import FLBase # noqa
from app.models.audit import Audit # noqa
from app.models.item import Item # noqa
from app.models.need import Need # noqa
from app.models.need_note... |
from dataclasses import dataclass
from typing import Generic, Mapping, TypeVar
__all__ = ["Curve"]
T = TypeVar("T")
U = TypeVar("U")
@dataclass
class _Curve(Generic[T, U]):
mapping: Mapping[T, U]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-21 12:46
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('loans', '0001_initial'),
]
ope... |
#!/bin/env python3
# pylint: disable=bad-builtin,deprecated-lambda
import string
def is_ascii(s):
return all(map(lambda c: ord(c) < 127, s))
def is_ascii_punctuation(s):
return all(map(lambda c: c in string.punctuation, s))
def is_ascii_printable(s):
return all(map(lambda c: c in string.printable, s))
... |
# Authors: Hyunwoo Lee <hyunwoo9301@naver.com>
# Released under the MIT license.
from api.common import *
synonym_data_file = 'synonym.txt'
synonym_data = read_data(synonym_data_file)
synonym_dictionary = {}
for row in synonym_data:
for word in row[1].split(','):
synonym_dictionary[word] = row[0]
def tr... |
from typing import Union
import tkinter as tk
import tkinter.ttk as ttk
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import (
FigureCanvasTkAgg, NavigationToolbar2Tk
)
# Implement the default Matplotlib key bindings.
from matplotlib.backend_bases import key_press_handler
def createPlo... |
from .typeo import spoof, typeo
|
from setuptools import setup
setup(
name='property-caching',
version='1.1.0',
description='Property caching',
author='Yola',
author_email='engineers@yola.com',
license='MIT (Expat)',
url='https://github.com/yola/property-caching',
packages=['property_caching'],
test_suite='tests',
... |
#!/usr/bin/python
from pygame import camera
import pygame
import time
from HeadsUpDisplay import HUD, Locations
def main_loop():
camera.init()
cam = camera.Camera(camera.list_cameras()[0])
(width, height) = cam.get_size()
screen = pygame.display.set_mode((width, height))
pygame.display.set_caption('Webcam Thing... |
import label_wav
import csv
import os
# from input_data import AudioProcessor
# audio_processor = AudioProcessor(data_dir='~/projects/tensorflow_data/test.7z')
def prepare_csv_file(csv_dir, csv_headers):
# Delete csv file if it exists
try:
os.remove(csv_dir)
except OSError:
pass
# Add ... |
"""
pipe_base.py
Unify DriverTask and PipeTask with one abstract base class.
"""
# Using print as a function makes it easier to switch between printing
# during development and using logging.{debug, info, ...} in production.
from __future__ import print_function
from abc import ABCMeta, abstractmethod
import os
imp... |
# encoding: utf-8
"""
Some functions that cannot be in pdf.py file to prevent import loop.
"""
from cStringIO import StringIO
import logging
from tempfile import NamedTemporaryFile
from django.conf import settings
from django.core.files import File
from membership.billing import pdf
logger = logging.getLogger("mem... |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# /*******************************************************
# * Copyright (C) 2013-2014 CloudRunner.io <info@cloudrunner.io>
# *
# * Proprietary and confidential
# * This file is part of CloudRunner Server.
# *
# * CloudRunner S... |
"""
XRay Image Container
"""
import os.path
class Image: #pylint: disable=too-few-public-methods
"""
Image file data
"""
def __init__(self, **kwargs):
self.filename = kwargs.get('filename')
abspath_file = os.path.abspath(self.filename)
self.path, _imagename = os.path.split(abs... |
class Node:
def __init__(self, data):
self.data = data
self.right_child = None
self.left_child = None
|
from utils import train, Pars, create_image, create_outputfolder, init_textfile, vector_quantize, clamp_with_grad, replace_grad
from dall_e import map_pixels, unmap_pixels, load_model
from stylegan import g_synthesis
from biggan import BigGAN
from tqdm import tqdm
from omegaconf... |
# Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required... |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
__author__ = 'Richard J. Sears'
VERSION = "0.98 (2021-10-15)"
# This script is part of my plot management set of tools. This
# script is used to move plots from one location to another on
# the same physical machine.
#
# If you are using a drive as a temp drive that falls w... |
from flask import Blueprint
bp = Blueprint("Users",__name__)
from app.users import routes |
import json
import csv
import secrets
import string
from subprocess import check_output, call
fieldnames = ("Azure AD Group","First Name","Last Name","Extension","Voice DID","Fax DID","Caller ID","ID for MS Exchange","Home Phone","Cell Phone",
"Fax Number", "E-mail","Alternate E-mail","User Name","Pas... |
class Window(object):
def __init__(self, win_id, name, geom=None, d_num=None):
self.win_id = win_id
self.name = name
self.geom = geom
self.children = []
self.desktop_number = d_num
return
def __repr__(self):
"""
An inheritable string representatio... |
import asyncio
import io
from enum import IntEnum
from haproxyspoa.payloads.agent_hello import AgentHelloPayload
from haproxyspoa.spoa_data_types import parse_varint, write_varint
class FrameType(IntEnum):
FRAGMENT = 0
HAPROXY_HELLO = 1
HAPROXY_DISCONNECT = 2
HAPROXY_NOTIFY = 3
AGENT_HELLO = 101
... |
"""Tests for main cli """
from subprocess import check_output
from unittest import TestCase, main
import re
from project import __version__
class TestHelp(TestCase):
def test_help(self):
output = check_output(['project','-h'])
self.assertTrue('Usage:' in str(output) )
output = check_out... |
import csv
import pathlib
import pickle
import os
from collections.abc import Iterable
import numpy as np
import pandas as pd
import scipy.stats as stats
def processed_expression_table(df):
df.index.name = 'genes'
return df.groupby('genes').mean()
def expression_ranks(expression_table_df, ascending, rank_... |
import random
import numpy as np
import os
import sys
import torch
from torch.autograd import Variable
import utils
class DataLoader(object):
"""
Handles all aspects of the data. Stores the dataset_params, vocabulary and tags with their mappings to indices.
"""
def __init__(self, data_dir, params, ... |
from textwrap import dedent
from django.core import management
from mock import mock_open, patch
from six import StringIO
from graphene import ObjectType, Schema, String
@patch("graphene_django.management.commands.graphql_schema.Command.save_json_file")
def test_generate_json_file_on_call_graphql_schema(savefile_mo... |
#!/usr/bin/env python3.4 -i
##MIT License
##
##Copyright (c) 2018 Douglas E. Moore
##
##Permission is hereby granted, free of charge, to any person obtaining a copy
##of this software and associated documentation files (the "Software"), to deal
##in the Software without restriction, including without limitation the ri... |
# imos 法
from sys import stdin
from itertools import accumulate
readline = stdin.readline
n = int(readline())
t = [0] * (1000000 + 2)
for _ in range(n):
a, b = map(int, readline().split())
t[a] += 1
t[b + 1] -= 1
print(max(accumulate(t[:-1])))
|
"""Module for implementations of the MetadataExtractor interface."""
from __future__ import absolute_import, unicode_literals
from rdflib.term import Literal
from rdflib.term import URIRef
from gutenberg._domain_model.types import rdf_bind_to_string
from gutenberg._domain_model.vocabulary import DCTERMS
from gutenb... |
import random
import string
import pyperclip
class Credentials:
credentials_list = []
'''
class that defines the credentials blueprint
'''
def __init__(self,appName,appPassword):
self.appName = appName
self.appPassword = appPassword
def save_credential(self):
Credentials.credentials_list.appen... |
import os
import torch
import torchvision
import tqdm
import utils
if __name__ == '__main__':
# Create components builder
builder = utils.builder.Builder()
config = builder.config
model_name = builder.model_name
amp_enabled = config['train']['amp_enabled']
# Device
device = torch.device... |
#!/usr/bin/env python
#-*-*- encoding: utf-8 -*-*-
#
# Copyright (C) 2005 onwards University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individual... |
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "Lic... |
import cv2
import numpy as np
cap = cv2.VideoCapture(0)
while(1):
_, frame = cap.read()
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
lower_red = np.array([160,150,20])
upper_red = np.array([190,255,50])
lower_blue = np.array([110,150,100])
upper_blue = np.array([120,2250,255])
l... |
"""
tests.test_component_demo
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests demo component.
"""
import unittest
import homeassistant.core as ha
import homeassistant.components.automation as automation
import homeassistant.components.automation.state as state
from homeassistant.const import CONF_PLATFORM
class TestAutomationSta... |
# -*- coding: utf-8 -*-
import pynvim
@pynvim.plugin
class TestPlugin(object):
def __init__(self, nvim):
self.nvim = nvim
@pynvim.function('TestFunction', sync=True)
def testfunction(self, args):
return 3
@pynvim.command('TestCommand', nargs='*', range='')
def testcommand(self,... |
counter = 1
while counter < 50:
print("I like Python")
counter = counter + 10
counter = 1
while counter < 10:
print("I like Python")
counter = counter + 1
|
"""
This file aggregates nox commands for various development tasks.
To learn more about nox, visit https://nox.thea.codes/en/stable/index.html
"""
import sys
import nox
sys.path.append("noxfiles")
from ci_nox import *
from dev_nox import *
from docker_nox import *
from docs_nox import *
from utils_nox import *
# S... |
import unittest
from utils.group_by import group_by
class GroupByTestCase(unittest.TestCase):
def setUp(self):
self.maxDiff = None
def test_simple(self):
items = [{"id": 1, "type": "string"}, {"id": 2, "type": "string"}]
expected = {"string": [{"id": 1, "type": "string"}, {"id": 2, "... |
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writin... |
# -*- coding: utf-8 -*-
from tccli.services.tsw.tsw_client import action_caller
|
"""rioxarray version"""
__version__ = "0.4.1.dev0"
|
import json
from aws_xray_sdk.core import patch_all, xray_recorder
from okdata.aws.logging import log_add, logging_wrapper
patch_all()
@logging_wrapper
@xray_recorder.capture("say_hello")
def say_hello(event, context):
log_add(relevant_information="Hello from Python blueprint!")
return {
"statusCod... |
import tensorflow as tf
hparams = tf.contrib.training.HParams(
num_mels=80,
frame_length_ms=50,
frame_shift_ms=12.5,
hop_length=int(16000 * 0.0125), # samples.
win_length=int(16000 * 0.05), # samples.
max_db=100,
ref_db=20,
preemphasis=0.97,
max_abs_value=4.0,
symmetric_mel=Tr... |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-12-29 03:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('feewaiver', '0033_campground'),
]
operations = [
migrations.AddField(
... |
#! -*- coding: utf-8 -*-
import csv
class DataProcessor:
"""Base class for data converters for sequence classification data sets."""
def get_examples(self, data_dir):
"""Gets a collection of :class:`InputExample` for the data set."""
raise NotImplementedError()
def get_labels(self):
... |
from typing import Type
import pgtrigger
from django.db.models import Model
class Notify(pgtrigger.Trigger):
"""A trigger which notifies a channel"""
def get_func(self, model: Type[Model]):
return f'''
{self._build_payload(model)}
{self._pre_notify()}
perform pg_n... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.