code stringlengths 2 1.05M | repo_name stringlengths 5 104 | path stringlengths 4 251 | language stringclasses 1 value | license stringclasses 15 values | size int32 2 1.05M |
|---|---|---|---|---|---|
from urllib2 import (
urlopen,
HTTPError,
URLError,
)
BASEURL = 'http://169.254.169.254/'
DEFAULT_TIMEOUT = 2
DEFAULT_API_VERSION = 'latest'
class MetadataError(Exception):
pass
def path(path=None, api_version=DEFAULT_API_VERSION, timeout=DEFAULT_TIMEOUT):
if not api_version:
api_version = 'latest'
md_path = api_version
if path:
md_path = md_path + "/" + path
try:
u = urlopen(BASEURL + md_path, timeout=timeout)
except HTTPError as e:
if e.code == 404:
raise MetadataError("Path not found: /%s" % path)
else:
raise MetadataError(e)
except URLError as e:
raise MetadataError(e)
if not path:
return "\n".join(map(lambda p: p.strip() + "/", u.readlines()))
return u.read()
class ShortNames(object):
'''Provide commonly-used metadata values by name'''
names = {
'az': '/meta-data/placement/availability-zone',
'instance-id': '/meta-data/instance-id',
}
def __init__(self, api_version=None, timeout=DEFAULT_TIMEOUT):
self.api_version = api_version
self.timeout = timeout
def list(self):
return self.names.keys()
def get(self, name):
if name not in self.names:
raise MetadataError('The shortname "{}" is not defined'.format(name))
return path(self.names[name], self.api_version, self.timeout)
| slank/awsmeta | awsmeta/metadata.py | Python | mit | 1,422 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import pytest
# pylint: disable=attribute-defined-outside-init
class TestOutput(object):
@pytest.fixture(autouse=True)
def init(self, ssh_audit):
self.Output = ssh_audit.Output
self.OutputBuffer = ssh_audit.OutputBuffer
def test_output_buffer_no_lines(self, output_spy):
output_spy.begin()
with self.OutputBuffer() as obuf:
pass
assert output_spy.flush() == []
output_spy.begin()
with self.OutputBuffer() as obuf:
pass
obuf.flush()
assert output_spy.flush() == []
def test_output_buffer_no_flush(self, output_spy):
output_spy.begin()
with self.OutputBuffer():
print(u'abc')
assert output_spy.flush() == []
def test_output_buffer_flush(self, output_spy):
output_spy.begin()
with self.OutputBuffer() as obuf:
print(u'abc')
print()
print(u'def')
obuf.flush()
assert output_spy.flush() == [u'abc', u'', u'def']
def test_output_defaults(self):
out = self.Output()
# default: on
assert out.batch is False
assert out.colors is True
assert out.minlevel == 'info'
def test_output_colors(self, output_spy):
out = self.Output()
# test without colors
out.colors = False
output_spy.begin()
out.info('info color')
assert output_spy.flush() == [u'info color']
output_spy.begin()
out.head('head color')
assert output_spy.flush() == [u'head color']
output_spy.begin()
out.good('good color')
assert output_spy.flush() == [u'good color']
output_spy.begin()
out.warn('warn color')
assert output_spy.flush() == [u'warn color']
output_spy.begin()
out.fail('fail color')
assert output_spy.flush() == [u'fail color']
if not out.colors_supported:
return
# test with colors
out.colors = True
output_spy.begin()
out.info('info color')
assert output_spy.flush() == [u'info color']
output_spy.begin()
out.head('head color')
assert output_spy.flush() == [u'\x1b[0;36mhead color\x1b[0m']
output_spy.begin()
out.good('good color')
assert output_spy.flush() == [u'\x1b[0;32mgood color\x1b[0m']
output_spy.begin()
out.warn('warn color')
assert output_spy.flush() == [u'\x1b[0;33mwarn color\x1b[0m']
output_spy.begin()
out.fail('fail color')
assert output_spy.flush() == [u'\x1b[0;31mfail color\x1b[0m']
def test_output_sep(self, output_spy):
out = self.Output()
output_spy.begin()
out.sep()
out.sep()
out.sep()
assert output_spy.flush() == [u'', u'', u'']
def test_output_levels(self):
out = self.Output()
assert out.getlevel('info') == 0
assert out.getlevel('good') == 0
assert out.getlevel('warn') == 1
assert out.getlevel('fail') == 2
assert out.getlevel('unknown') > 2
def test_output_minlevel_property(self):
out = self.Output()
out.minlevel = 'info'
assert out.minlevel == 'info'
out.minlevel = 'good'
assert out.minlevel == 'info'
out.minlevel = 'warn'
assert out.minlevel == 'warn'
out.minlevel = 'fail'
assert out.minlevel == 'fail'
out.minlevel = 'invalid level'
assert out.minlevel == 'unknown'
def test_output_minlevel(self, output_spy):
out = self.Output()
# visible: all
out.minlevel = 'info'
output_spy.begin()
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 5
# visible: head, warn, fail
out.minlevel = 'warn'
output_spy.begin()
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 3
# visible: head, fail
out.minlevel = 'fail'
output_spy.begin()
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 2
# visible: head
out.minlevel = 'invalid level'
output_spy.begin()
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 1
def test_output_batch(self, output_spy):
out = self.Output()
# visible: all
output_spy.begin()
out.minlevel = 'info'
out.batch = False
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 5
# visible: all except head
output_spy.begin()
out.minlevel = 'info'
out.batch = True
out.info('info color')
out.head('head color')
out.good('good color')
out.warn('warn color')
out.fail('fail color')
assert len(output_spy.flush()) == 4
| arthepsy/ssh-audit | test/test_output.py | Python | mit | 4,631 |
"""
This example opens the connection in async mode (does not work properly in Python 2.7).
"""
import os
import time
from msl.equipment import (
EquipmentRecord,
ConnectionRecord,
Backend,
)
record = EquipmentRecord(
manufacturer='Pico Technology',
model='5244B', # update for your PicoScope
serial='DY135/055', # update for your PicoScope
connection=ConnectionRecord(
backend=Backend.MSL,
address='SDK::ps5000a.dll', # update for your PicoScope
properties={'open_async': True}, # opening in async mode is done in the properties
)
)
# optional: ensure that the PicoTech DLLs are available on PATH
os.environ['PATH'] += os.pathsep + r'C:\Program Files\Pico Technology\SDK\lib'
t0 = time.time()
scope = record.connect()
while True:
now = time.time()
progress = scope.open_unit_progress()
print('Progress: {}%'.format(progress))
if progress == 100:
break
time.sleep(0.02)
print('Took {:.2f} seconds to establish a connection to the PicoScope'.format(time.time()-t0))
# flash the LED light for 5 seconds
scope.flash_led(-1)
time.sleep(5)
| MSLNZ/msl-equipment | msl/examples/equipment/picotech/picoscope/open_unit_async.py | Python | mit | 1,130 |
from collections import OrderedDict
import csv,re,sys
# returns an ordered dict[int,list[int]]
def load_from_csv_file(path_to_file):
# ordered dict so as to keep the same order and avoid 'surprises'
data = OrderedDict()
with open(path_to_file,'r') as csvfile:
reader = csv.reader(csvfile,delimiter=';')
for row in reader:
query_id = row[0].strip()
if _is_python_list(row[1]):
# doc ids
value = map(lambda str: int(str.strip("'")),
row[1].lstrip('[').rstrip(']').split(','))
elif _is_python_string(row[1]):
# just a string
value = row[1].strip()
else:
raise RuntimeError("Csv file at '{0}' does not fit expected structure for parsing".format(path_to_file))
data[query_id] = value
return(data)
def write_to_csv_file(model,output_file):
if isinstance(model,list):
a_dict = OrderedDict()
for lst in model:
a_dict[lst[0]] = lst[1]
model = a_dict
with open(output_file,"w") as outfile:
w = csv.writer(outfile,delimiter=';')
for key,vals in model.iteritems():
w.writerow([key,vals])
def _is_python_list(str_representation):
no_of_open_sq_brackets = str_representation.count('[')
no_of_close_sq_brackets = str_representation.count(']')
if no_of_close_sq_brackets == no_of_open_sq_brackets and (no_of_open_sq_brackets != 0):
return(True)
else:
return(False)
def _is_python_string(str_representation):
if _is_python_list(str_representation):
return(False)
else:
return(True)
| queirozfcom/vector_space_retrieval | vsr/common/helpers/results.py | Python | mit | 1,477 |
"""Microscoper is a wrapper around bioformats using a forked
python-bioformats to extract the raw images from Olympus IX83
CellSense .vsi format, into a more commonly used TIFF format.
Images are bundled together according to their channels.
This code is used internally in SCB Lab, TCIS, TIFR-H.
You're free to modify it and distribute it.
"""
from __future__ import unicode_literals, print_function
import os
import collections
import bioformats as bf
import javabridge as jb
import numpy as np
import tifffile as tf
import tqdm
from .args import arguments
import xml.dom.minidom
def get_files(directory, keyword):
""" Returns all the files in the given directory
and subdirectories, filtering with the keyword.
Usage:
>>> all_vsi_files = get_files(".", ".vsi")
This will have all the .vsi files in the current
directory and all other directories in the current
directory.
"""
file_list = []
for path, subdirs, files in os.walk(directory):
for name in files:
filename = os.path.join(path, name)
if keyword in filename:
file_list.append(filename)
return sorted(file_list)
def get_metadata(filename):
"""Read the meta data and return the metadata object.
"""
meta = bf.get_omexml_metadata(filename)
metadata = bf.omexml.OMEXML(meta)
return metadata
def get_channel(metadata, channel):
"""Return the channel name from the metadata object"""
try:
channel_name = metadata.image().Pixels.Channel(channel).Name
except:
return
if channel_name is None:
return
return channel_name.replace("/", "_")
def read_images(path, save_directory, big, save_separate):
"""Reads images from the .vsi and associated files.
Returns a dictionary with key as channel, and list
of images as values."""
with bf.ImageReader(path) as reader:
# Shape of the data
c_total = reader.rdr.getSizeC()
z_total = reader.rdr.getSizeZ()
t_total = reader.rdr.getSizeT()
# Since we don't support hyperstacks yet...
if 1 not in [z_total, t_total]:
raise TypeError("Only 4D images are currently supported.")
metadata = get_metadata(path)
# This is so we can manually set a description down below.
pbar_c = tqdm.tqdm(range(c_total))
for channel in pbar_c:
images = []
# Get the channel name, so we can name the file after this.
channel_name = get_channel(metadata, channel)
# Update the channel progress bar description with the
# channel name.
pbar_c.set_description(channel_name)
for time in tqdm.tqdm(range(t_total), "T"):
for z in tqdm.tqdm(range(z_total), "Z"):
image = reader.read(c=channel,
z=z,
t=time,
rescale=False)
# If there's no metadata on channel name, save channels
# with numbers,starting from 0.
if channel_name is None:
channel_name = str(channel)
images.append(image)
save_images(np.asarray(images), channel_name, save_directory, big,
save_separate)
return metadata
def save_images(images, channel, save_directory, big=False,
save_separate=False):
"""Saves the images as TIFs with channel name as the filename.
Channel names are saved as numbers when names are not available."""
# Make the output directory, if it doesn't alredy exist.
if not os.path.exists(save_directory):
os.makedirs(save_directory)
# Save a file for every image in a stack.
if save_separate:
filename = save_directory + str(channel) + "_{}.tif"
for num, image in enumerate(images):
with tf.TiffWriter(filename.format(num+1), bigtiff=big) as f:
f.save(image)
# Save a single .tif file for all the images in a channel.
else:
filename = save_directory + str(channel) + ".tif"
with tf.TiffWriter(filename, bigtiff=big) as f:
f.save(images)
def save_metadata(metadata, save_directory):
data = xml.dom.minidom.parseString(metadata.to_xml())
pretty_xml_as_string = data.toprettyxml()
with open(save_directory + "metadata.xml", "w") as xmlfile:
xmlfile.write(pretty_xml_as_string)
def _init_logger():
"""This is so that Javabridge doesn't spill out a lot of DEBUG messages
during runtime.
From CellProfiler/python-bioformats.
"""
rootLoggerName = jb.get_static_field("org/slf4j/Logger",
"ROOT_LOGGER_NAME",
"Ljava/lang/String;")
rootLogger = jb.static_call("org/slf4j/LoggerFactory",
"getLogger",
"(Ljava/lang/String;)Lorg/slf4j/Logger;",
rootLoggerName)
logLevel = jb.get_static_field("ch/qos/logback/classic/Level",
"WARN",
"Lch/qos/logback/classic/Level;")
jb.call(rootLogger,
"setLevel",
"(Lch/qos/logback/classic/Level;)V",
logLevel)
def run():
# Add file extensions to this to be able to read different file types.
extensions = [".vsi"]
arg = arguments()
files = get_files(arg.f, arg.k)
if 0 == len(files):
print("No file matching *{}* keyword.".format(arg.k))
exit()
if arg.list:
for f in files:
print(f)
print("======================")
print("Total files found:", len(files))
print("======================")
exit()
jb.start_vm(class_path=bf.JARS, max_heap_size="2G")
logger = _init_logger()
pbar_files = tqdm.tqdm(files)
for path in pbar_files:
if not any(_ in path for _ in extensions):
continue
file_location = os.path.dirname(os.path.realpath(path))
filename = os.path.splitext(os.path.basename(path))[0]
save_directory = file_location + "/_{}_/".format(filename)
pbar_files.set_description("..." + path[-15:])
# If the user wants to store meta data for existing data,
# the user may pass -om or --onlymetadata argument which
# will bypass read_images() and get metadata on its own.
if arg.onlymetadata:
metadata = get_metadata(path)
# The default behaviour is to store the files with the
# metadata.
else:
metadata = read_images(path, save_directory, big=arg.big,
save_separate=arg.separate)
save_metadata(metadata, save_directory)
jb.kill_vm()
| pskeshu/microscoper | microscoper/io.py | Python | mit | 6,962 |
# Generated by Django 3.1 on 2020-08-25 12:15
import django.db.models.deletion
from django.db import migrations, models
import s3upload.fields
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Cat",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"custom_filename",
s3upload.fields.S3UploadField(blank=True, dest="custom_filename"),
),
],
),
migrations.CreateModel(
name="Kitten",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("video", s3upload.fields.S3UploadField(blank=True, dest="vids")),
("image", s3upload.fields.S3UploadField(blank=True, dest="imgs")),
("pdf", s3upload.fields.S3UploadField(blank=True, dest="files")),
(
"mother",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="example.cat"
),
),
],
),
]
| yunojuno/django-s3-upload | example/migrations/0001_initial.py | Python | mit | 1,676 |
from outsourcer import Code
from . import utils
from .base import Expression
from .constants import BREAK, POS, RESULT, STATUS
class Sep(Expression):
num_blocks = 2
def __init__(
self,
expr,
separator,
discard_separators=True,
allow_trailer=False,
allow_empty=True,
):
self.expr = expr
self.separator = separator
self.discard_separators = discard_separators
self.allow_trailer = allow_trailer
self.allow_empty = allow_empty
def __str__(self):
op = '/?' if self.allow_trailer else '//'
return utils.infix_str(self.expr, op, self.separator)
def operand_string(self):
return f'({self})'
def always_succeeds(self):
return self.allow_empty
def _compile(self, out):
staging = out.var('staging', [])
checkpoint = out.var('checkpoint', POS)
with out.WHILE(True):
with utils.if_fails(out, self.expr):
# If we're not discarding separators, and if we're also not
# allowing a trailing separator, then we need to pop the last
# separator off of our list.
if not self.discard_separators and not self.allow_trailer:
# But only pop if staging is not empty.
with out.IF(staging):
out += staging.pop()
out += BREAK
out += staging.append(RESULT)
out += checkpoint << POS
with utils.if_fails(out, self.separator):
out += BREAK
if not self.discard_separators:
out += staging.append(RESULT)
if self.allow_trailer:
out += checkpoint << POS
success = [
RESULT << staging,
POS << checkpoint,
STATUS << True,
]
if self.allow_empty:
out.extend(success)
else:
with out.IF(staging):
out.extend(success)
| jvs/sourcer | sourcer/expressions/sep.py | Python | mit | 2,062 |
#!/usr/bin/env python3
"""JSON data parser for snippet crawler"""
from BaseLogger import BaseLogger
from DatabaseAccessor import DatabaseAccessor
from bs4 import BeautifulSoup
from config import config_queue_page, config_idle_sleep, config_parse_domain, config_parse_process
from contextlib import closing
from datetime import datetime
from multiprocessing import Process
from platform import node
from time import sleep
class Parser(BaseLogger):
def __init__(self, log_level=None):
BaseLogger.__init__(self, self.__class__.__name__, log_level)
self._db_conn = DatabaseAccessor()
self._log_info("parser start @%s", node())
def close(self):
self._db_conn.close()
self._log_info("parser exit")
self._close_logger()
def process(self):
count_valid = None
count_duplicate = None
job = self._db_conn.queue_page_take_data()
if job != None:
url = job['url']
data_list = job.get('data', [])
self._log_info("parse json data from %s, items count %d", url, len(data_list))
count_valid = 0
count_duplicate = 0
for data_index, data_item in enumerate(data_list):
snippet = self._extract_snippet_record(url, data_item)
if snippet == None:
self._log_warning("fail to extract #%d record of '%s' json data in queue_page", data_index, url)
else:
if not self._db_conn.snippet_create(snippet):
count_duplicate += 1
# self._log_warning("fail to add new snippet %s", snippet["url"])
else:
count_valid += 1
self._log_info("extract %d valid & %d duplicate snippets from %s json data", count_valid, count_duplicate, url)
if not self._db_conn.queue_page_done_data(url):
self._log_warning("fail to mark %s as 'done' in queue_crawl", url)
else:
self._log_warning("grab no json data to parse")
sleep(config_idle_sleep)
return (count_valid, count_duplicate)
def _extract_snippet_record(self, url, data):
try:
snippet = {
"url": config_parse_domain + str(data["group"]["group_id"]),
"date": datetime.fromtimestamp(data["group"]["create_time"]),
"content": data["group"]["content"],
"archive": data,
"source": url.split("?")[0],
"source_name": data["group"]["category_name"],
}
snippet["count"] = {
"digg": data["group"]["digg_count"],
"bury": data["group"]["bury_count"],
"favorite": data["group"]["favorite_count"],
"comment": data["group"]["comment_count"],
}
if len(data["comments"]) > 0:
comment_text = []
comment_digg = []
for comment in data["comments"]:
comment_text.append(comment["text"])
comment_digg.append(comment["digg_count"])
snippet["comments"] = comment_text
snippet["count"]["commdigg"] = comment_digg
if len(snippet["content"].strip()) == 0:
snippet = None
except Exception as e:
snippet = None
return snippet
def main(times=10):
with closing(Parser()) as parser:
if times:
for _ in range(times):
parser.process()
else:
while True:
parser.process()
if __name__ == '__main__':
for _ in range(config_parse_process):
Process(target=main, args=(0,)).start()
| vejuhust/snippet-crawler | Parser.py | Python | mit | 3,773 |
from flask import Flask
import numerals
app = Flask("Numerical converter")
@app.route("/")
def home():
return "Hello from converter"
@app.route("/<arabic>/roman")
def to_roman(arabic):
print("Converting {} to roman".format(arabic))
converted = numerals.convert_arabic_to_roman(int(arabic))
print("Conversion result: ", converted)
return converted
@app.route("/<roman>/arabic")
def to_arabic(roman):
print("Converting {} to arabic".format(roman))
converted = numerals.convert_roman_to_arabic(roman)
print("Conversion result: ", converted)
return str(converted)
if __name__ == "__main__":
app.run() | takemyoxygen/playground | py/numerals/server.py | Python | mit | 647 |
import os
import datetime
def exit():
os._exit(0)
def GetTimeString(m = -1):
if m==0:
s1 = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
else:
s1 = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
return s1
def MakeDir(directory):
if not os.path.exists(directory):
os.makedirs(directory) | dalek7/umbrella | Python/DDUtil.py | Python | mit | 339 |
from pymongo import MongoClient
from dalmongo import configuration
# get the instance of MongoDB client
client = MongoClient(configuration.MONGODB_HOST, configuration.MONGODB_PORT)
# get the main application database
db = getattr(client, configuration.MONGODB_NAME) | RobertoPrevato/flask-three-template | dalmongo/__init__.py | Python | mit | 267 |
#!/usr/bin/env python
import sys, json, psycopg2, argparse
parser = argparse.ArgumentParser(description='Imports word data into the taboo database.')
parser.add_argument('--verified', dest='verified', action='store_true', help='include if these words are verified as good quality')
parser.add_argument('--source', dest='source', help='include to set the source of these imported words')
args = parser.parse_args()
CONN_STR = 'dbname=prod user=prod'
data_str = '\n'.join(sys.stdin.readlines())
data = json.loads(data_str)
conn = psycopg2.connect(CONN_STR)
conn.autocommit = True
cur = conn.cursor()
count = 0
for word in data:
try:
cur.execute("INSERT INTO words (word, skipped, correct, status, source) VALUES(%s, %s, %s, %s, %s) RETURNING wid",
(word, 0, 0, 'approved' if args.verified == True else 'unverified', args.source))
wordid = cur.fetchone()[0]
prohibited_count = 0
for prohibited in data[word]:
prohibited_count = prohibited_count + 1
cur.execute("INSERT INTO prohibited_words (wid, word, rank) VALUES(%s, %s, %s)",
(wordid, prohibited, prohibited_count))
count = count + 1
except Exception as e:
print e
cur.close()
conn.close()
print 'Inserted ' + str(count) + ' words'
| jbowens/taboo | wordgen/data-importer.py | Python | mit | 1,307 |
'''
Copyleft Feb 11, 2017 Arya Iranmehr, PhD Student, Bafna Lab, UC San Diego, Email: airanmehr@gmail.com
'''
import numpy as np;
np.set_printoptions(linewidth=200, precision=5, suppress=True)
import pandas as pd;
pd.options.display.max_rows = 20;
pd.options.display.expand_frame_repr = False
import pylab as plt;
import os;
home = os.path.expanduser('~') + '/'
import Utils.Estimate as est
import Utils.Plots as pplt
import Scripts.KyrgysHAPH.Utils as kutl
import Scripts.KyrgysHAPH.Plot as kplt
kplt.savefig()
reload(est)
a=pd.read_pickle(kutl.path+'/data/freq.df')
def plotSFSall(chrom=None):
f=est.Estimate.getSAFS
a=pd.read_pickle(kutl.path+'/data/freq.df')
if chrom is not None:
suff='.chr{}'.format(chrom)
a=a.loc[[chrom]]
kplt.plotSFSold2(a, fold=False, fname='AFS' + suff);
kplt.plotSFSold2(a, fold=False, fname='Scaled-AFS' + suff, f=f)
kplt.plotSFSold2(a, fold=True, fname='AFS' + suff, );
kplt.plotSFSold2(a, fold=True, fname='Scaled-AFS' + suff, f=f)
def plotChromAll():
a.apply(lambda x: kplt.SFSChromosomwise(x, False, False))
a.apply(lambda x: kplt.SFSChromosomwise(x, False, True))
a.apply(lambda x: kplt.SFSChromosomwise(x, True, False))
a.apply(lambda x: kplt.SFSChromosomwise(x, True, True))
def SFS():
plotSFSall()
plotSFSall('X')
plotSFSall('Y')
plotChromAll() | airanmehr/bio | Scripts/KyrgysHAPH/GenomeAFS.py | Python | mit | 1,365 |
#!/usr/bin/env python3
import rainbow
import hashlib
import string
import time
import random
"""SHA-256 hash function
Precondition: Input plaintext as string
Postcondition: Returns hash as string
"""
def sha256(plaintext):
return hashlib.sha256(bytes(plaintext, 'utf-8')).hexdigest()
"""Returns a reduction function which generates an n-digit lowercase password from a hash
"""
def reduce_lower(n):
"""Reduction function
Precondition: hash is H(previousPlaintext)
Postcondition: returns randomly distributed n-digit lowercase plaintext password
"""
def result(hash, col):
plaintextKey = (int(hash[:9], 16) ^ col) % (26 ** n)
plaintext = ""
for _ in range(n):
plaintext += string.ascii_lowercase[plaintextKey % 26]
plaintextKey //= 26
return plaintext
return result
"""Returns a function which generates a random n-digit lowercase password
"""
def gen_lower(n):
def result():
password = ""
for _ in range(n):
password += random.choice(string.ascii_lowercase)
return password
return result
"""Precondition: Input a function which generates a random password, or input no arguments to generate a random password
Postcondition: Cracks H(password) and prints elapsed time
"""
def test(table, hash_function, gen_password_function, password=""):
if password == "":
password = gen_password_function()
print("Cracking password: {0}\nH(password): {1}".format(password, hash_function(password)))
cracked = table.crack(hash_function(password))
if cracked:
print("Success! Password: {0}".format(cracked))
return True
else:
print("Unsuccessful :(")
return False
# Tests random passwords multiple times and prints success rate and average crack time.
def bulk_test(table, hash_function, gen_password_function, numTests):
start = time.time()
numSuccess = 0
for i in range(numTests):
print("\nTest {0} of {1}".format(i + 1, numTests))
numSuccess += test(table, hash_function, gen_password_function)
print("""\n{0} out of {1} random hashes were successful!\n
Average time per hash (including failures): {2} secs.""" \
.format(numSuccess, numTests, (time.time() - start) / numTests))
table = rainbow.RainbowTable(sha256, reduce_lower(4), gen_lower(4)) | clu8/RainbowTable | crack.py | Python | mit | 2,202 |
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM15.CDPSM.Balanced.IEC61970.Core.IdentifiedObject import IdentifiedObject
class EnergyConsumer(IdentifiedObject):
"""Generic user of energy - a point of consumption on the power system model
"""
def __init__(self, customerCount=0, pfixedPct=0.0, qfixedPct=0.0, qfixed=0.0, pfixed=0.0, LoadResponse=None, *args, **kw_args):
"""Initialises a new 'EnergyConsumer' instance.
@param customerCount: Number of individual customers represented by this Demand
@param pfixedPct: Fixed active power as per cent of load group fixed active power. Load sign convention is used, i.e. positive sign means flow out from a node.
@param qfixedPct: Fixed reactive power as per cent of load group fixed reactive power. Load sign convention is used, i.e. positive sign means flow out from a node.
@param qfixed: Reactive power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.
@param pfixed: Active power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.
@param LoadResponse: The load response characteristic of this load.
"""
#: Number of individual customers represented by this Demand
self.customerCount = customerCount
#: Fixed active power as per cent of load group fixed active power. Load sign convention is used, i.e. positive sign means flow out from a node.
self.pfixedPct = pfixedPct
#: Fixed reactive power as per cent of load group fixed reactive power. Load sign convention is used, i.e. positive sign means flow out from a node.
self.qfixedPct = qfixedPct
#: Reactive power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.
self.qfixed = qfixed
#: Active power of the load that is a fixed quantity. Load sign convention is used, i.e. positive sign means flow out from a node.
self.pfixed = pfixed
self._LoadResponse = None
self.LoadResponse = LoadResponse
super(EnergyConsumer, self).__init__(*args, **kw_args)
_attrs = ["customerCount", "pfixedPct", "qfixedPct", "qfixed", "pfixed"]
_attr_types = {"customerCount": int, "pfixedPct": float, "qfixedPct": float, "qfixed": float, "pfixed": float}
_defaults = {"customerCount": 0, "pfixedPct": 0.0, "qfixedPct": 0.0, "qfixed": 0.0, "pfixed": 0.0}
_enums = {}
_refs = ["LoadResponse"]
_many_refs = []
def getLoadResponse(self):
"""The load response characteristic of this load.
"""
return self._LoadResponse
def setLoadResponse(self, value):
if self._LoadResponse is not None:
filtered = [x for x in self.LoadResponse.EnergyConsumer if x != self]
self._LoadResponse._EnergyConsumer = filtered
self._LoadResponse = value
if self._LoadResponse is not None:
if self not in self._LoadResponse._EnergyConsumer:
self._LoadResponse._EnergyConsumer.append(self)
LoadResponse = property(getLoadResponse, setLoadResponse)
| rwl/PyCIM | CIM15/CDPSM/Balanced/IEC61970/Wires/EnergyConsumer.py | Python | mit | 4,290 |
# -*- coding: utf-8 -*-
"""
Production settings file for project 'project'
"""
from project.settings import *
DEBUG = False
SITE_DOMAIN = 'sveetch.github.io/Sveetoy'
# Directory where all stuff will be builded
PUBLISH_DIR = os.path.join(PROJECT_DIR, '../docs')
# Path where will be moved all the static files, usually this is a directory in
# the ``PUBLISH_DIR``
STATIC_DIR = os.path.join(PROJECT_DIR, PUBLISH_DIR, 'static')
| sveetch/Sveetoy | project/githubpages_settings.py | Python | mit | 428 |
from disco.core import Job
from disco.worker.task_io import task_input_stream
import hustle
import hustle.core
import hustle.core.marble
from hustle.core.marble import Marble, Column, Aggregation
from functools import partial
from hustle.core.pipeworker import HustleStage
import sys
SPLIT = "split"
GROUP_ALL = "group_all"
GROUP_LABEL = "group_label"
GROUP_LABEL_NODE = "group_node_label"
GROUP_NODE = "group_node"
# default number of partitions, users can set this in the settings.yaml
_NPART = 16
def hustle_output_stream(stream, partition, url, params, result_table):
class HustleOutputStream(object):
def __init__(self, stream, url, params, **kwargs):
import tempfile
from wtrie import Trie
self.result_table = result_table
self.result_columns = result_table._field_names
tmpdir = getattr(params, 'tmpdir', '/tmp')
self.filename = tempfile.mktemp(prefix="hustle", dir=tmpdir)
maxsize = getattr(params, 'maxsize', 100 * 1024 * 1024)
self.env, self.txn, self.dbs, self.meta = self.result_table._open(self.filename, maxsize, write=True, lru_size=10000)
self.autoinc = 1
self.url = url
self.vid_trie = Trie()
self.vid16_trie = Trie()
def add(self, k, v):
from hustle.core.marble import _insert_row
data = dict(zip(self.result_columns, list(k) + list(v)))
#print "BOZAK! adding %s %s %s" % (self.result_columns, k, v)
_insert_row(data,
self.txn,
self.dbs,
self.autoinc,
self.vid_trie,
self.vid16_trie)
self.autoinc += 1
def close(self):
import os
import ujson
self.meta.put(self.txn, '_total_rows', str(self.autoinc))
vid_nodes, vid_kids, _ = self.vid_trie.serialize()
vid16_nodes, vid16_kids, _ = self.vid16_trie.serialize()
vn_ptr, vn_len = vid_nodes.buffer_info()
vk_ptr, vk_len = vid_kids.buffer_info()
vn16_ptr, vn16_len = vid16_nodes.buffer_info()
vk16_ptr, vk16_len = vid16_kids.buffer_info()
self.meta.put_raw(self.txn, '_vid_nodes', vn_ptr, vn_len)
self.meta.put_raw(self.txn, '_vid_kids', vk_ptr, vk_len)
self.meta.put_raw(self.txn, '_vid16_nodes', vn16_ptr, vn16_len)
self.meta.put_raw(self.txn, '_vid16_kids', vk16_ptr, vk16_len)
self.meta.put(self.txn, 'name', ujson.dumps(self.result_table._name))
self.meta.put(self.txn, 'fields', ujson.dumps(self.result_table._fields))
self.meta.put(self.txn, 'partition', ujson.dumps(self.result_table._partition))
for index, (subdb, subindexdb, bitmap_dict, column) in self.dbs.iteritems():
if subindexdb:
# process all values for this bitmap index
if column.index_indicator == 2:
bitmap_dict.evictAll()
else:
for val, bitmap in bitmap_dict.iteritems():
subindexdb.put(self.txn, val, bitmap.dumps())
self.txn.commit()
try:
self.env.copy(self.url)
print "Dumped result to %s" % self.url
except Exception as e:
print "Copy error: %s" % e
self.txn.abort()
raise e
self.env.close()
os.unlink(self.filename)
return HustleOutputStream(stream, url, params)
def hustle_input_stream(fd, size, url, params, wheres, gen_where_index, key_names):
from disco import util
from hustle.core.marble import Expr, MarbleStream
from itertools import izip, repeat
empty = ()
try:
scheme, netloc, rest = util.urlsplit(url)
except Exception as e:
print "Error handling hustle_input_stream for %s. %s" % (url, e)
raise e
fle = util.localize(rest, disco_data=params._task.disco_data, ddfs_data=params._task.ddfs_data)
# print "FLOGLE: %s %s" % (url, fle)
otab = None
try:
# import sys
# sys.path.append('/Library/Python/2.7/site-packages/pycharm-debug.egg')
# import pydevd
# pydevd.settrace('localhost', port=12999, stdoutToServer=True, stderrToServer=True)
otab = MarbleStream(fle)
bitmaps = {}
for index, where in enumerate(wheres):
# do not process where clauses that have nothing to do with this marble
if where._name == otab.marble._name:
if type(where) is Expr and not where.is_partition:
bm = where(otab)
bitmaps[index] = (bm, len(bm))
else:
# it is either the table itself, or a partition expression. either way,
# return the entire table
bitmaps[index] = (otab.iter_all(), otab.number_rows)
for index, (bitmap, blen) in bitmaps.iteritems():
prefix_gen = [repeat(index, blen)] if gen_where_index else []
row_iter = prefix_gen + [otab.mget(col, bitmap) if col is not None else repeat(None, blen)
for col in key_names[index]]
for row in izip(*row_iter):
yield row, empty
finally:
if otab:
otab.close()
class SelectPipe(Job):
# profile = True
required_modules = [
('hustle', hustle.__file__),
('hustle.core', hustle.core.__file__),
('hustle.core.pipeline', __file__),
('hustle.core.marble', hustle.core.marble.__file__)]
def get_result_schema(self, project):
import random
from hustle import Table
if self.output_table:
return self.output_table
fields = []
for col_spec in project:
col = col_spec.column
if col.name not in fields:
fields.append(col.schema_string())
name = '-'.join([w._name for w in self.wheres])[:64]
# append a 3-digit random suffix to avoid name collision
self.output_table = Table(name="sub-%s-%03d" % (name, random.randint(0, 999)),
fields=fields)
return self.output_table
def _get_table(self, obj):
"""If obj is a table return its name otherwise figure out what it is and return the tablename"""
if isinstance(obj, Marble):
return obj
else:
return obj.table
def _resolve(self, cols, check, types=(Column, Aggregation)):
rval = []
for i, col in enumerate(cols):
if isinstance(col, types):
rval.append(col)
elif isinstance(col, basestring):
selectcol = next((c for c in check if c.name == col or c.fullname == col), None)
if selectcol:
rval.append(selectcol)
elif isinstance(col, int):
if col < len(check):
rval.append(check[col])
return rval
def _get_key_names(self, project, join):
result = []
for where in self.wheres:
table_name = self._get_table(where)._name
rval = []
if join:
join_column = next(c.name for c in join if c.table._name == table_name)
rval.append(join_column)
rval += tuple(c.column.name if c.table and c.table._name == table_name else None for c in project)
result.append(rval)
return result
def __init__(self,
master,
wheres,
project=(),
order_by=(),
join=(),
distinct=False,
desc=False,
limit=0,
partition=0,
nest=False,
pre_order_stage=()):
from hustle.core.pipeworker import Worker
super(SelectPipe, self).__init__(master=master, worker=Worker())
self.wheres = wheres
self.order_by = self._resolve(order_by, project)
partition = partition or _NPART
binaries = [i for i, c in enumerate(project) if isinstance(c, (Column, Aggregation)) and c.is_binary]
# if nest is true, use output_schema to store the output table
self.output_table = None
# build the pipeline
select_hash_cols = ()
sort_range = _get_sort_range(0, project, self.order_by)
join_stage = []
if join:
joinbins = [i + 2 for i in binaries]
join_stage = [
(GROUP_LABEL, HustleStage('join',
sort=(1, 0),
binaries=joinbins,
process=partial(process_join,
label_fn=partial(_tuple_hash,
cols=sort_range,
p=partition))))]
select_hash_cols = (1,)
efs, gees, ehches, dflts = zip(*[(c.f, c.g, c.h, c.default)
if isinstance(c, Aggregation) else (None, None, None, None)
for c in project])
group_by_stage = []
if any(efs):
# If all columns in project are aggregations, use process_skip_group
# to skip the internal groupby
if all([isinstance(c, Aggregation) for c in project]):
process_group_fn = process_skip_group
group_by_range = []
else:
process_group_fn = process_group
group_by_range = [i for i, c in enumerate(project) if isinstance(c, Column)]
# build the pipeline
group_by_stage = [
(GROUP_LABEL_NODE, HustleStage('group-combine',
sort=group_by_range,
binaries=binaries,
process=partial(process_group_fn,
ffuncs=efs,
ghfuncs=ehches,
deffuncs=dflts,
label_fn=partial(_tuple_hash,
cols=group_by_range,
p=partition)))),
# A Hack here that overrides disco stage's default option 'combine'.
# Hustle needs all inputs with the same label to be combined.
(GROUP_LABEL, HustleStage('group-reduce',
input_sorted=True,
combine=True,
sort=group_by_range,
process=partial(process_group_fn,
ffuncs=efs,
ghfuncs=gees,
deffuncs=dflts)))]
# process the order_by/distinct stage
order_stage = []
if self.order_by or distinct or limit:
order_stage = [
(GROUP_LABEL_NODE, HustleStage('order-combine',
sort=sort_range,
binaries=binaries,
desc=desc,
process=partial(process_order,
distinct=distinct,
limit=limit or sys.maxint))),
(GROUP_ALL, HustleStage('order-reduce',
sort=sort_range,
desc=desc,
input_sorted=True,
combine_labels=True,
process=partial(process_order,
distinct=distinct,
limit=limit or sys.maxint))),
]
if not select_hash_cols:
select_hash_cols = sort_range
pipeline = [(SPLIT, HustleStage('restrict-select',
process=partial(process_restrict,
label_fn=partial(_tuple_hash,
cols=select_hash_cols,
p=partition)),
input_chain=[task_input_stream,
partial(hustle_input_stream,
wheres=wheres,
gen_where_index=join,
key_names=self._get_key_names(project, join))]))
] + join_stage + group_by_stage + list(pre_order_stage) + order_stage
# determine the style of output (ie. if it is a Hustle Table), and modify the last stage accordingly
if nest:
pipeline[-1][1].output_chain = [partial(hustle_output_stream, result_table=self.get_result_schema(project))]
self.pipeline = pipeline
def _tuple_hash(key, cols, p):
r = 0
for c in cols:
r ^= hash(key[c])
return r % p
def process_restrict(interface, state, label, inp, task, label_fn):
from disco import util
# inp contains a set of replicas, let's force local #HACK
input_processed = False
for i, inp_url in inp.input.replicas:
scheme, (netloc, port), rest = util.urlsplit(inp_url)
if netloc == task.host:
input_processed = True
inp.input = inp_url
break
if not input_processed:
raise Exception("Input %s not processed, no LOCAL resource found." % str(inp.input))
for key, value in inp:
out_label = label_fn(key)
# print "RESTRICT: %s %s" % (key, value)
interface.output(out_label).add(key, value)
def process_join(interface, state, label, inp, task, label_fn):
'''Processor function for the join stage.
Note that each key in the 'inp' is orgnized as:
key = (where_index, join_column, other_columns)
Firstly, all keys are divided into different groups based on the join_column.
Then the where_index is used to separate keys from different where clauses.
Finally, merging columns together.
'''
from itertools import groupby
def _merge_record(offset, r1, r2):
return [i if i is not None else j for i, j in zip(r1[offset:], r2[offset:])]
# inp is a list of (key, value) tuples, the join_cloumn is the 2nd item of the key.
for joinkey, rest in groupby(inp, lambda k: k[0][1]):
# To process this join key, we must have values from both tables
first_table = []
for record, value in rest:
# Grab all records from first table by using where index
if record[0] == 0:
first_table.append(record)
else:
if not len(first_table):
break
# merge each record from table 2 with all records from table 1
for first_record in first_table:
# dispose of the where_index and join column
newrecord = _merge_record(2, first_record, record)
newlabel = label_fn(newrecord)
# print "JOIN: %s %s %s" % (newrecord, first_record, record)
interface.output(newlabel).add(newrecord, value)
def process_order(interface, state, label, inp, task, distinct, limit):
from itertools import groupby, islice
empty = ()
if distinct:
for uniqkey, _ in islice(groupby(inp, lambda (k, v): tuple(k)), 0, limit):
# print "ORDERED %s" % repr(uniqkey)
interface.output(label).add(uniqkey, empty)
else:
for key, value in islice(inp, 0, limit):
# print "ORDERED %s" % repr(key)
interface.output(label).add(key, value)
def process_group(interface, state, label, inp, task, ffuncs, ghfuncs, deffuncs, label_fn=None):
"""Process function of aggregation combine stage.
"""
from itertools import groupby
import copy
empty = ()
# import sys
# sys.path.append('/Library/Python/2.7/site-packages/pycharm-debug.egg')
# import pydevd
# pydevd.settrace('localhost', port=12999, stdoutToServer=True, stderrToServer=True)
baseaccums = [default() if default else None for default in deffuncs]
# print "Base: %s" % repr(baseaccums)
# pull the key apart
for group, tups in groupby(inp, lambda (k, _): tuple([e if ef is None else None for e, ef in zip(k, ffuncs)])):
accums = copy.copy(baseaccums)
for record, _ in tups:
# print "REC: %s" % repr(record)
try:
accums = [f(a, v) if f and a is not None else None
for f, a, v in zip(ffuncs, accums, record)]
except Exception as e:
print e
print "YOLO: f=%s a=%s r=%s g=%s" % (ffuncs, accums, record, group)
raise e
accum = [h(a) if h else None for h, a in zip(ghfuncs, accums)]
if label_fn:
label = label_fn(group)
key = tuple(g or a for g, a in zip(group, accum))
# print "KEY: %s" % repr(key)
interface.output(label).add(key, empty)
def process_skip_group(interface, state, label, inp, task, ffuncs, ghfuncs, deffuncs, label_fn=None):
"""Process function of aggregation combine stage without groupby.
"""
empty = ()
accums = [default() if default else None for default in deffuncs]
for record, _ in inp:
try:
accums = [f(a, v) if f and a is not None else None
for f, a, v in zip(ffuncs, accums, record)]
except Exception as e:
raise e
accum = [h(a) if h else None for h, a in zip(ghfuncs, accums)]
interface.output(0).add(tuple(accum), empty)
def _get_sort_range(select_offset, select_columns, order_by_columns):
# sort by all
sort_range = [i + select_offset for i, c in enumerate(select_columns) if isinstance(c, Column) and not c.is_binary]
if order_by_columns:
scols = ["%s%s" % (c.table._name if c.table else '', c.name) for c in select_columns]
ocols = ["%s%s" % (c.table._name if c.table else '', c.name) for c in order_by_columns]
rcols = set(scols) - set(ocols)
# make sure to include the columns *not* in the order_by expression as well
# this is to ensure that 'distinct' will work
sort_range = tuple(select_offset + scols.index(c) for c in ocols) +\
tuple(select_offset + scols.index(c) for c in rcols)
return sort_range
| txiner/db-xiner | hustle/core/pipeline.py | Python | mit | 19,643 |
import unittest
import os
import subprocess
from test.stub_config import StubConfig, StubConfigOnStubProject
from test.stub_environment import StubEnvironment
from test.stub_stdout import StubStdout
from googkit.commands.update_deps import UpdateDepsCommand
from googkit.compat.unittest import mock
class TestUpdateDepsCommand(unittest.TestCase):
def setUp(self):
self.env = StubEnvironment()
self.cmd = UpdateDepsCommand(self.env)
self.cmd.config = StubConfig()
def test_needs_project_config(self):
self.assertTrue(UpdateDepsCommand.needs_project_config())
def test_update_deps_js(self):
MockPopen = mock.MagicMock()
MockPopen.return_value.returncode = 0
with mock.patch('subprocess.Popen', new=MockPopen) as mock_popen:
self.cmd.update_deps()
arg_format_dict = {
'depswriter_path': StubConfig.DEPSWRITER,
'js_dev_path': StubConfig.JS_DEV_DIR,
'relpath_from_base_js_to_js_dev': os.path.relpath(StubConfig.JS_DEV_DIR, os.path.dirname(StubConfig.BASE_JS)),
'deps_js_path': StubConfig.DEPS_JS
}
expected = ' '.join([
'python',
'{depswriter_path}',
'--root_with_prefix="{js_dev_path}',
'{relpath_from_base_js_to_js_dev}"',
'--output_file="{deps_js_path}"'
]).format(**arg_format_dict)
mock_popen.assert_called_once_with(expected, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
def test_update_tests(self):
self.assertEqual(
self.cmd.update_tests('DUMMY', ['dummy1', 'dummy2']),
'var testFiles = [\'dummy1\', \'dummy2\'];')
self.assertEqual(
self.cmd.update_tests('DUMMY', []),
'var testFiles = [];')
def test_update_testrunner(self):
# Use stub config for stub project directories.
self.cmd.config = StubConfigOnStubProject()
self.cmd.update_tests = mock.MagicMock()
self.cmd.update_tests.return_value = 'changed'
# Data will be given by open with for-in statement
read_data = '''\
DUMMY
change me/*@test_files@*/
DUMMY'''
# Expected data for write()
expected_wrote = '''\
DUMMY
changed/*@test_files@*/
DUMMY'''
# Use mock_open
mock_open = mock.mock_open(read_data=read_data)
# Context Manager is a return value of the mock_open.__enter__
mock_fp = mock_open.return_value.__enter__.return_value
# Read lines has "\n" at each last
mock_fp.__iter__.return_value = iter([(line + '\n') for line in read_data.split('\n')])
with mock.patch('googkit.commands.update_deps.open', mock_open, create=True), \
mock.patch('os.path.exists') as mock_exists:
mock_exists.return_value = True
self.cmd.update_testrunner()
# Expected the path is a related path from all_tests.html to js_dev/example_test.html
expected_file = os.path.join('js_dev', 'example_test.html')
self.cmd.update_tests.assert_called_once_with(
' change me/*@test_files@*/\n',
[expected_file])
# Expected open was called twice (for reading and writing)
mock_open.assert_any_call(StubConfigOnStubProject.TESTRUNNER)
mock_open.assert_any_call(StubConfigOnStubProject.TESTRUNNER, 'w')
self.assertEqual(mock_open.call_count, 2)
# Expected correct data was wrote
self.assertEqual(
mock_fp.write.call_args_list,
[mock.call(line + '\n',) for line in expected_wrote.split('\n')])
def test_run_internal(self):
dummy_project_root = os.path.normcase('/dir1/dir2')
self.cmd.update_deps = mock.MagicMock()
self.cmd.update_testrunner = mock.MagicMock()
with mock.patch('sys.stdout', new_callable=StubStdout), \
mock.patch('googkit.lib.path.project_root', return_value=dummy_project_root), \
mock.patch('googkit.commands.update_deps.working_directory'):
self.cmd.run_internal()
self.cmd.update_deps.assert_called_once_with()
self.cmd.update_testrunner.assert_called_once_with()
if __name__ == '__main__':
unittest.main()
| googkit/googkit | test/commands/test_update_deps.py | Python | mit | 4,284 |
"""Context processors, these get called and add things to template contexts"""
from django.conf import settings
def analytics_and_ads(request):
""" Adds the google analytics code to the context """
out = {}
if request.user.is_authenticated() and request.user.settings.no_analytics:
out["analytics_code"] = ""
else:
out["analytics_code"] = settings.ANALYTICS_CODE
if request.user.is_authenticated() and request.user.settings.no_ads:
out["ad_client"] = ""
else:
out["ad_client"] = settings.AD_CLIENT
out["ad_slot_top"] = settings.AD_SLOT_TOP
out["ad_slot_bottom"] = settings.AD_SLOT_BOTTOM
return out
def add_webstore_url(request):
return {"webstore_url":settings.CHROME_EXTENSION_WEBSTORE}
| RossBrunton/BMAT | bmat/context_processors.py | Python | mit | 786 |
# jsb socket related plugins
#
#
""" this package contains all the socket related plugins. """
import os
(f, tail) = os.path.split(__file__)
__all__ = []
for i in os.listdir(f):
if i.endswith('.py'):
__all__.append(i[:-3])
elif os.path.isdir(f + os.sep + i) and not i.startswith('.'):
__all__.append(i)
try:
__all__.remove('__init__')
except:
pass
__plugs__ = __all__
| melmothx/jsonbot | jsb/plugs/socket/__init__.py | Python | mit | 406 |
import subprocess
def runBash(cmd):
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
out = p.stdout.read().strip()
return out
| chasemp/sup | suplib/run.py | Python | mit | 152 |
from django.apps import AppConfig
class BattlenetclientConfig(AppConfig):
name = 'battlenetclient'
| maksimbulva/sc2streamhelper_info | battlenetclient/apps.py | Python | mit | 105 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# PEP 8 check with Pylint
"""Word to pinyin.
"""
from numpy import mat, zeros, where
from pypinyin import pinyin, lazy_pinyin
# from .mytools import time_me
def sum_cosine(matrix, threshold):
"""Calculate the parameters of the semantic Jaccard model based on the
Cosine similarity matrix of semantic word segmentation.
根据语义分词Cosine相似性矩阵计算语义 jaccard 模型的各个参数。
Args:
matrix: Semantic Cosine similarity matrix. 语义分词Cosine相似性矩阵。
threshold: Threshold for semantic matching. 达到语义匹配标准的阈值。
Returns:
total: The semantic intersection of two sentence language fragments.
两个句子语言片段组成集合的语义交集。
num_not_match: The total number of fragments or the maximum value of two sets
that do not meet the semantic matching criteria controlled by the threshold.
两个集合中没有达到语义匹配标准(由阈值threshold控制)的总片段个数或者两者中取最大值。
total_dif: The degree of semantic difference between two sets.
两个集合的语义差异程度。
"""
total = 0
count = 0
row = matrix.shape[0]
col = matrix.shape[1]
zero_row = zeros([1, col])
zero_col = zeros([row, 1])
max_score = matrix.max()
while max_score > threshold:
total += max_score
count += 1
pos = where(matrix == max_score)
i = pos[0][0]
j = pos[1][0]
matrix[i, :] = zero_row
matrix[:, j] = zero_col
max_score = matrix.max()
num = (row - count) if row > col else (col - count)
return dict(total=total, num_not_match=num, total_dif=max_score)
def match_pinyin(pinyin1, pinyin2):
"""Similarity score between two pinyin.
计算两个拼音的相似度得分。
"""
assert pinyin1 != "", "pinyin1 can not be empty"
assert pinyin2 != "", "pinyin2 can not be empty"
pv_match = 0
if len(pinyin1) < len(pinyin2):
len_short = len(pinyin1)
len_long = len(pinyin2)
pv_long = pinyin2
pv_short = pinyin1
else:
len_short = len(pinyin2)
len_long = len(pinyin1)
pv_long = pinyin1
pv_short = pinyin2
for i in range(0, len_short):
if pv_short[i] == pv_long[i]:
pv_match += 1
score = pv_match/len_long
return score
def jaccard_pinyin(pv1, pv2, threshold=0.7):
"""Similarity score between two pinyin vectors with jaccard.
计算两个拼音向量的语义 jaccard 相似度得分。
According to the semantic jaccard model to calculate the similarity.
The similarity score interval for each two pinyin sentences was [0, 1].
根据语义jaccard模型来计算相似度。每两个拼音向量的相似度得分区间为为[0, 1]。
"""
sv_matrix = []
sv_rows = []
for pinyin1 in pv1:
for pinyin2 in pv2:
score = match_pinyin(pinyin1, pinyin2)
sv_rows.append(score)
sv_matrix.append(sv_rows)
sv_rows = []
matrix = mat(sv_matrix)
result = sum_cosine(matrix, threshold)
total = result["total"]
total_dif = result["total_dif"]
num = result["num_not_match"]
sim = total/(total + num*(1-total_dif))
return sim
def pinyin_cut(sentence, pattern=None):
"""Cut the sentence into phonetic vectors.
将句子切分为拼音向量。
"""
return lazy_pinyin(sentence)
# @time_me()
def similarity_pinyin(sentence1, sentence2):
"""Similarity score between two based on pinyin vectors with jaccard.
基于拼音向量的语义 jaccard 句子相似度得分。
"""
pv1 = pinyin_cut(sentence1)
pv2 = pinyin_cut(sentence2)
return jaccard_pinyin(pv1, pv2)
if __name__ == '__main__':
print(similarity_pinyin("我想办理粤通卡", "办理悦通卡"))
| Decalogue/chat | chat/word2pinyin.py | Python | mit | 4,070 |
######################################################################
#
# Copyright (C) 2013
# Associated Universities, Inc. Washington DC, USA,
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Library General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public
# License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 675 Massachusetts Ave, Cambridge, MA 02139, USA.
#
# Correspondence concerning VLA Pipelines should be addressed as follows:
# Please register and submit helpdesk tickets via: https://help.nrao.edu
# Postal address:
# National Radio Astronomy Observatory
# VLA Pipeline Support Office
# PO Box O
# Socorro, NM, USA
#
######################################################################
# MAKE GAIN TABLE FOR FLUX DENSITY BOOTSTRAPPING
# Make a gain table that includes gain and opacity corrections for final
# amp cal, for flux density bootstrapping
logprint ("Starting EVLA_pipe_fluxgains.py", logfileout='logs/fluxgains.log')
time_list=runtiming('fluxgains', 'start')
QA2_fluxgains='Pass'
#logprint ("Making fresh calibrators.ms", logfileout='logs/fluxgains.log')
#
#syscommand='rm -rf calibrators.ms'
#os.system(syscommand)
#
#default('split')
#vis=ms_active
#outputvis='calibrators.ms'
#datacolumn='corrected'
#field=''
#spw=''
#width=int(max(channels))
#antenna=''
#timebin='0s'
#timerange=''
#scan=calibrator_scan_select_string
#intent=''
#array=''
#uvrange=''
#correlation=''
#observation=''
#keepflags=False
#split()
logprint ("Setting models for standard primary calibrators", logfileout='logs/fluxgains.log')
tb.open('calibrators.ms')
positions = []
for ii in range(0,len(field_positions[0][0])):
positions.append([field_positions[0][0][ii], field_positions[1][0][ii]])
standard_source_names = [ '3C48', '3C138', '3C147', '3C286' ]
standard_source_fields = find_standards(positions)
ii=0
for fields in standard_source_fields:
for myfield in fields:
spws = field_spws[myfield]
for myspw in spws:
reference_frequency = center_frequencies[myspw]
EVLA_band = find_EVLA_band(reference_frequency)
logprint ("Center freq for spw "+str(myspw)+" = "+str(reference_frequency)+", observing band = "+EVLA_band, logfileout='logs/fluxgains.log')
model_image = standard_source_names[ii]+'_'+EVLA_band+'.im'
logprint ("Setting model for field "+str(myfield)+" spw "+str(myspw)+" using "+model_image, logfileout='logs/fluxgains.log')
try:
default('setjy')
vis='calibrators.ms'
field=str(myfield)
spw=str(myspw)
selectdata=False
scalebychan=True
standard='Perley-Butler 2013'
model=model_image
listmodels=False
usescratch=scratch
setjy()
except:
logprint('no data found for field ' + str(myfield)+" spw "+str(myspw), logfileout='logs/fluxgains.log')
ii=ii+1
tb.close()
logprint ("Making gain tables for flux density bootstrapping", logfileout='logs/fluxgains.log')
logprint ("Short solint = "+new_gain_solint1, logfileout='logs/fluxgains.log')
logprint ("Long solint = "+gain_solint2, logfileout='logs/fluxgains.log')
print ""
print "Finding a reference antenna"
print ""
refantspw=''
refantfield=calibrator_field_select_string
findrefant=RefAntHeuristics(vis='calibrators.ms',field=refantfield,geometry=True,flagging=True)
RefAntOutput=findrefant.calculate()
refAnt=str(RefAntOutput[0])+','+str(RefAntOutput[1])+','+str(RefAntOutput[2])+','+str(RefAntOutput[3])
logprint ("The pipeline will use antenna(s) "+refAnt+" as the reference", logfileout='logs/fluxgains.log')
# Derive amp gain table. Note that gaincurves and opacity
# corrections have already been applied during applycal and split in
# semiFinalBPdcals/solint.py.
# Need to add check for 3C84 in here, when heuristics have been sorted out
default('gaincal')
vis='calibrators.ms'
caltable='fluxphaseshortgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=new_gain_solint1
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
docallib=False
#gaintable=filter(None, [priorcals,'delay.k','BPcal.b'])
gaintable=['']
gainfield=['']
interp=['']
spwmap=[]
parang=False
gaincal()
default('gaincal')
vis='calibrators.ms'
caltable='fluxgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=gain_solint2
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=5.0
solnorm=False
gaintype='G'
smodel=[]
calmode='ap'
append=False
docallib=False
#gaintable=filter(None, [priorcals,'delay.k','BPcal.b','fluxphaseshortgaincal.g'])
gaintable=['fluxphaseshortgaincal.g']
gainfield=['']
interp=['']
spwmap=[]
parang=False
gaincal()
logprint ("Gain table fluxgaincal.g is ready for flagging", logfileout='logs/fluxgains.log')
# Calculate fractions of flagged solutions for final QA2; note, can
# tolerate higher fraction of flagged solutions for this step than in
# other gain tables
flaggedGainSolns=getCalFlaggedSoln('fluxgaincal.g')
if (flaggedGainSolns['all']['total'] == 0):
QA2_fluxgains='Fail'
elif (flaggedGainSolns['antmedian']['fraction'] > 0.2):
QA2_fluxgains='Partial'
logprint ("QA2 score: "+QA2_fluxgains, logfileout='logs/fluxgains.log')
logprint ("Finished EVLA_pipe_fluxgains.py", logfileout='logs/fluxgains.log')
time_list=runtiming('fluxgains', 'end')
pipeline_save()
| e-koch/VLA_Lband | 16B/pipeline4.7.1_custom/EVLA_pipe_fluxgains.py | Python | mit | 6,065 |
from setuptools import setup
setup(
name="python-cmr",
version="0.4.1",
license="MIT",
url="https://github.com/jddeal/python-cmr",
description="Python wrapper to the NASA Common Metadata Repository (CMR) API.",
long_description=open("README.rst").read(),
author="Justin Deal, Matt Isnor",
author_email="deal.justin@gmail.com, isnor.matt@gmail.com",
packages=["cmr"],
install_requires=[
"requests",
]
)
| jddeal/python-cmr | setup.py | Python | mit | 455 |
import wx # type: ignore
from gooey.gui import formatters
from gooey.gui.components.widgets.bases import TextContainer
from gooey.python_bindings import types as t
class Slider(TextContainer):
"""
An integer input field
"""
widget_class = wx.Slider
def getWidget(self, *args, **options):
widget = self.widget_class(self,
minValue=self._options.get('min', 0),
maxValue=self._options.get('max', 100),
style=wx.SL_MIN_MAX_LABELS | wx.SL_VALUE_LABEL)
return widget
def getWidgetValue(self):
return self.widget.GetValue()
def setValue(self, value):
self.widget.SetValue(value)
def formatOutput(self, metatdata, value):
return formatters.general(metatdata, str(value))
def getUiState(self) -> t.FormField:
widget: wx.Slider = self.widget
return t.Slider(
id=self._id,
type=self.widgetInfo['type'],
value=self.getWidgetValue(),
min=widget.GetMin(),
max=widget.GetMax(),
error=self.error.GetLabel() or None,
enabled=self.IsEnabled(),
visible=self.IsShown()
)
| chriskiehl/Gooey | gooey/gui/components/widgets/slider.py | Python | mit | 1,260 |
import kronos
import random
@kronos.register('0 0 * * *')
def complain():
complaints = [
"I forgot to migrate our applications's cron jobs to our new server! Darn!",
"I'm out of complaints! Damnit!"
]
print random.choice(complaints)
| Weatherlyzer/weatherlyzer | base/cron.py | Python | mit | 264 |
#
# IIT Kharagpur - Hall Management System
# System to manage Halls of residences, Warden grant requests, student complaints
# hall worker attendances and salary payments
#
# MIT License
#
"""
@ authors: Madhav Datt, Avikalp Srivastava
"""
from ..database import db_func as db
from ..database import password_validation as pv
import worker
class MessManager(worker.Worker):
"""Contains details of Worker Instance
Attributes:
worker_ID: Integer to uniquely identify worker
name: String
hall_ID: Integer to uniquely identify hall
monthly_salary: Float
"""
def __init__(self, name, hall_ID, password, monthly_salary,
rebuild=False, worker_ID=None):
"""
Init MessManager with details as recruited by HMC or Warden
"""
# The rebuild flag, if true, denotes that the object is being made from
# data already present in the database
# If False, a new data row is added to the specific table
if not rebuild:
self.worker_ID = db.add("worker")
db.update("worker", self.worker_ID, "worker_type", "M")
self.password = password
else:
self.worker_ID = worker_ID
self._password = password
self.monthly_salary = monthly_salary
worker.Worker.__init__(self, self.worker_ID, name, hall_ID)
# password getter and setter functions
@property
def password(self):
return self._password
@password.setter
def password(self, password):
self._password = pv.hash_password(password)
db.update("worker", self.worker_ID, "password", self.password)
# monthly_salary getter and setter functions
@property
def monthly_salary(self):
return self._monthly_salary
@monthly_salary.setter
def monthly_salary(self, monthly_salary):
self._monthly_salary = monthly_salary
db.update("worker", self.worker_ID, "monthly_salary", self.monthly_salary)
def compute_mess_payment(self, student_table):
"""
Compute total money due to hall in form of mess payments
Sum of each student resident's mess charge
Pass parameter student_table = dbr.rebuild("student")
"""
mess_total = 0.
for key in student_table:
if student_table[key].hall_ID == self.hall_ID:
mess_total = mess_total + student_table[key].mess_charge
return mess_total
| madhav-datt/kgp-hms | src/workers/mess_manager.py | Python | mit | 2,480 |
"""Parent class and utility class for producing a scansion pattern for a line of Latin verse.
Some useful methods
* Perform a conservative i to j transformation
* Performs elisions
* Accents vowels by position
* Breaks the line into a list of syllables by calling a Syllabifier class which may be injected
into this classes constructor.
"""
import logging
import re
from typing import Any, Dict, List
import cltk.prosody.lat.string_utils as string_utils
from cltk.prosody.lat.metrical_validator import MetricalValidator
from cltk.prosody.lat.scansion_constants import ScansionConstants
from cltk.prosody.lat.scansion_formatter import ScansionFormatter
from cltk.prosody.lat.syllabifier import Syllabifier
from cltk.prosody.lat.verse import Verse
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
__author__ = ["Todd Cook <todd.g.cook@gmail.com>"]
__license__ = "MIT License"
class VerseScanner:
"""
The scansion symbols used can be configured by passing a suitable constants class to
the constructor.
"""
def __init__(
self, constants=ScansionConstants(), syllabifier=Syllabifier(), **kwargs
):
self.constants = constants
self.remove_punct_map = string_utils.remove_punctuation_dict()
self.punctuation_substitutions = string_utils.punctuation_for_spaces_dict()
self.metrical_validator = MetricalValidator(constants)
self.formatter = ScansionFormatter(constants)
self.syllabifier = syllabifier
self.inverted_amphibrach_re = re.compile(
r"{}\s*{}\s*{}".format(
self.constants.STRESSED,
self.constants.UNSTRESSED,
self.constants.STRESSED,
)
)
self.syllable_matcher = re.compile(
r"[{}]".format(
self.constants.VOWELS
+ self.constants.ACCENTED_VOWELS
+ self.constants.LIQUIDS
+ self.constants.MUTES
)
)
def transform_i_to_j(self, line: str) -> str:
"""
Transform instances of consonantal i to j
:param line:
:return:
>>> print(VerseScanner().transform_i_to_j("iactātus"))
jactātus
>>> print(VerseScanner().transform_i_to_j("bracchia"))
bracchia
"""
words = line.split(" ")
space_list = string_utils.space_list(line)
corrected_words = []
for word in words:
found = False
for prefix in self.constants.PREFIXES:
if word.startswith(prefix) and word != prefix:
corrected_words.append(
self.syllabifier.convert_consonantal_i(prefix)
)
corrected_words.append(
self.syllabifier.convert_consonantal_i(word[len(prefix) :])
)
found = True
break
if not found:
corrected_words.append(self.syllabifier.convert_consonantal_i(word))
new_line = string_utils.join_syllables_spaces(corrected_words, space_list)
char_list = string_utils.overwrite(
list(new_line),
r"\b[iī][{}]".format(
self.constants.VOWELS + self.constants.ACCENTED_VOWELS
),
"j",
)
char_list = string_utils.overwrite(
char_list, r"\b[I][{}]".format(self.constants.VOWELS_WO_I), "J"
)
char_list = string_utils.overwrite(
char_list,
r"[{}][i][{}]".format(self.constants.VOWELS_WO_I, self.constants.VOWELS),
"j",
1,
)
return "".join(char_list)
def transform_i_to_j_optional(self, line: str) -> str:
"""
Sometimes for the demands of meter a more permissive i to j transformation is warranted.
:param line:
:return:
>>> print(VerseScanner().transform_i_to_j_optional("Italiam"))
Italjam
>>> print(VerseScanner().transform_i_to_j_optional("Lāvīniaque"))
Lāvīnjaque
>>> print(VerseScanner().transform_i_to_j_optional("omnium"))
omnjum
"""
words = line.split(" ")
space_list = string_utils.space_list(line)
corrected_words = []
for word in words:
found = False
for prefix in self.constants.PREFIXES:
if word.startswith(prefix) and word != prefix:
corrected_words.append(
self.syllabifier.convert_consonantal_i(prefix)
)
corrected_words.append(
self.syllabifier.convert_consonantal_i(word[len(prefix) :])
)
found = True
break
if not found:
corrected_words.append(self.syllabifier.convert_consonantal_i(word))
new_line = string_utils.join_syllables_spaces(corrected_words, space_list)
# the following two may be tunable and subject to improvement
char_list = string_utils.overwrite(
list(new_line),
"[bcdfgjkmpqrstvwxzBCDFGHJKMPQRSTVWXZ][i][{}]".format(
self.constants.VOWELS_WO_I
),
"j",
1,
)
char_list = string_utils.overwrite(
char_list,
"[{}][iI][{}]".format(self.constants.LIQUIDS, self.constants.VOWELS_WO_I),
"j",
1,
)
return "".join(char_list)
def accent_by_position(self, verse_line: str) -> str:
"""
Accent vowels according to the rules of scansion.
:param verse_line: a line of unaccented verse
:return: the same line with vowels accented by position
>>> print(VerseScanner().accent_by_position(
... "Arma virumque cano, Troiae qui primus ab oris").lstrip())
Ārma virūmque canō Trojae qui primus ab oris
"""
line = verse_line.translate(self.punctuation_substitutions)
line = self.transform_i_to_j(line)
marks = list(line)
# locate and save dipthong positions since we don't want them being accented
dipthong_positions = []
for dipth in self.constants.DIPTHONGS:
if dipth in line:
dipthong_positions.append(line.find(dipth))
# Vowels followed by 2 consonants
# The digraphs ch, ph, th, qu and sometimes gu and su count as single consonants.
# see http://people.virginia.edu/~jdk3t/epicintrog/scansion.htm
marks = string_utils.overwrite(
marks,
"[{}][{}][{}]".format(
self.constants.VOWELS,
self.constants.CONSONANTS,
self.constants.CONSONANTS_WO_H,
),
self.constants.STRESSED,
)
# one space (or more for 'dropped' punctuation may intervene)
marks = string_utils.overwrite(
marks,
r"[{}][{}]\s*[{}]".format(
self.constants.VOWELS,
self.constants.CONSONANTS,
self.constants.CONSONANTS_WO_H,
),
self.constants.STRESSED,
)
# ... if both consonants are in the next word, the vowel may be long
# .... but it could be short if the vowel is not on the thesis/emphatic part of the foot
# ... see Gildersleeve and Lodge p.446
marks = string_utils.overwrite(
marks,
r"[{}]\s*[{}][{}]".format(
self.constants.VOWELS,
self.constants.CONSONANTS,
self.constants.CONSONANTS_WO_H,
),
self.constants.STRESSED,
)
# x is considered as two letters
marks = string_utils.overwrite(
marks, "[{}][xX]".format(self.constants.VOWELS), self.constants.STRESSED
)
# z is considered as two letters
marks = string_utils.overwrite(
marks, r"[{}][zZ]".format(self.constants.VOWELS), self.constants.STRESSED
)
original_verse = list(line)
for idx, word in enumerate(original_verse):
if marks[idx] == self.constants.STRESSED:
original_verse[idx] = self.constants.VOWELS_TO_ACCENTS[
original_verse[idx]
]
# make sure dipthongs aren't accented
for idx in dipthong_positions:
if original_verse[idx + 1] in self.constants.ACCENTS_TO_VOWELS:
original_verse[idx + 1] = self.constants.ACCENTS_TO_VOWELS[
original_verse[idx + 1]
]
return "".join(original_verse)
def elide_all(self, line: str) -> str:
"""
Given a string of space separated syllables, erase with spaces the syllable portions
that would disappear according to the rules of elision.
:param line:
:return:
"""
marks = list(line.translate(self.remove_punct_map))
all_vowels = self.constants.VOWELS + self.constants.ACCENTED_VOWELS
tmp = "".join(marks)
# Elision rules are compound but not cummulative: we place all elision edits into a list
# of candidates, and then merge, taking the least of each section of the line.
candidates = [
tmp,
self.elide(
tmp,
r"[{}][{}]\s+[{}]".format(
self.constants.CONSONANTS, all_vowels, all_vowels
),
1,
1,
),
self.elide(
tmp,
r"[{}][{}]\s+[hH]".format(self.constants.CONSONANTS, all_vowels),
1,
1,
),
self.elide(tmp, r"[aāuū]m\s+[{}]".format(all_vowels), 2),
self.elide(tmp, r"ae\s+[{}]".format(all_vowels), 2),
self.elide(tmp, r"[{}]\s+[{}]".format(all_vowels, all_vowels), 1),
self.elide(tmp, r"[uū]m\s+h", 2),
]
results = string_utils.merge_elisions(candidates)
return results
def calc_offset(self, syllables_spaces: List[str]) -> Dict[int, int]:
"""
Calculate a dictionary of accent positions from a list of syllables with spaces.
:param syllables_spaces:
:return:
"""
line = string_utils.flatten(syllables_spaces)
mydict = {} # type: Dict[int, int]
# #defaultdict(int) #type: Dict[int, int]
for idx, syl in enumerate(syllables_spaces):
target_syllable = syllables_spaces[idx]
skip_qu = string_utils.starts_with_qu(target_syllable)
matches = list(self.syllable_matcher.finditer(target_syllable))
for position, possible in enumerate(matches):
if skip_qu:
skip_qu = False
continue
(start, end) = possible.span()
if (
target_syllable[start:end]
in self.constants.VOWELS + self.constants.ACCENTED_VOWELS
):
part = line[: len("".join(syllables_spaces[:idx]))]
offset = len(part) + start
if (
line[offset]
not in self.constants.VOWELS + self.constants.ACCENTED_VOWELS
):
LOG.error("Problem at line {} offset {}".format(line, offset))
mydict[idx] = offset
return mydict
def produce_scansion(
self, stresses: list, syllables_wspaces: List[str], offset_map: Dict[int, int]
) -> str:
"""
Create a scansion string that has stressed and unstressed syllable positions in locations
that correspond with the original texts syllable vowels.
:param stresses list of syllable positions
:param syllables_wspaces list of syllables with spaces escaped for punctuation or elision
:param offset_map dictionary of syllable positions, and an offset amount which is the
number of spaces to skip in the original line before inserting the accent.
"""
scansion = list(" " * len(string_utils.flatten(syllables_wspaces)))
unstresses = string_utils.get_unstresses(stresses, len(syllables_wspaces))
try:
for idx in unstresses:
location = offset_map.get(idx)
if location is not None:
scansion[location] = self.constants.UNSTRESSED
for idx in stresses:
location = offset_map.get(idx)
if location is not None:
scansion[location] = self.constants.STRESSED
except Exception as e:
LOG.error(
"problem with syllables; check syllabification {}, {}".format(
syllables_wspaces, e
)
)
return "".join(scansion)
def flag_dipthongs(self, syllables: List[str]) -> List[int]:
"""
Return a list of syllables that contain a dipthong
:param syllables:
:return:
"""
long_positions = []
for idx, syl in enumerate(syllables):
for dipthong in self.constants.DIPTHONGS:
if dipthong in syllables[idx]:
if not string_utils.starts_with_qu(syllables[idx]):
long_positions.append(idx)
return long_positions
def elide(self, line: str, regexp: str, quantity: int = 1, offset: int = 0) -> str:
"""
Erase a section of a line, matching on a regex, pushing in a quantity of blank spaces,
and jumping forward with an offset if necessary.
If the elided vowel was strong, the vowel merged with takes on the stress.
:param line:
:param regexp:
:param quantity:
:param offset:
:return:
>>> print(VerseScanner().elide("uvae avaritia", r"[e]\s*[a]"))
uv āvaritia
>>> print(VerseScanner().elide("mare avaritia", r"[e]\s*[a]"))
mar avaritia
"""
matcher = re.compile(regexp)
positions = matcher.finditer(line)
new_line = line
for match in positions:
(start, end) = match.span() # pylint: disable=unused-variable
if (start > 0) and new_line[
start - 1 : start + 1
] in self.constants.DIPTHONGS:
vowel_to_coerce = new_line[end - 1]
new_line = (
new_line[: (start - 1) + offset]
+ (" " * (quantity + 2))
+ self.constants.stress_accent_dict[vowel_to_coerce]
+ new_line[end:]
)
else:
new_line = (
new_line[: start + offset]
+ (" " * quantity)
+ new_line[start + quantity + offset :]
)
return new_line
def correct_invalid_start(self, scansion: str) -> str:
"""
If a hexameter, hendecasyllables, or pentameter scansion starts with spondee,
an unstressed syllable in the third position must actually be stressed,
so we will convert it: - - | U -> - - | -
:param scansion:
:return:
>>> print(VerseScanner().correct_invalid_start(
... " - - U U - - U U U U U U - -").strip())
- - - - - - U U U U U U - -
"""
mark_list = string_utils.mark_list(scansion)
raw_scansion = scansion.replace(" ", "")
if raw_scansion.startswith(self.constants.SPONDEE + self.constants.UNSTRESSED):
new_scansion = list(
self.constants.SPONDEE + self.constants.SPONDEE + raw_scansion[4:]
)
corrected = "".join(new_scansion)
new_sequence = list(" " * len(scansion))
for idx, car in enumerate(corrected):
new_sequence[mark_list[idx]] = car
return "".join(new_sequence)
return scansion
def correct_first_two_dactyls(self, scansion: str) -> str:
"""
If a hexameter or pentameter starts with spondee,
an unstressed syllable in the third position must actually be stressed,
so we will convert it: - - | U -> - - | -
And/or if the starting pattern is spondee + trochee + stressed, then the unstressed
trochee can be corrected: - - | - u | - -> - - | - -| -
:param scansion:
:return:
>>> print(VerseScanner().correct_first_two_dactyls(
... " - - U U - - U U U U U U - -")) # doctest: +NORMALIZE_WHITESPACE
- - - - - - U U U U U U - -
"""
mark_list = string_utils.mark_list(scansion)
new_line = self.correct_invalid_start(scansion)
raw_scansion = new_line.replace(" ", "")
if raw_scansion.startswith(
self.constants.SPONDEE + self.constants.TROCHEE + self.constants.STRESSED
):
new_scansion = list(
self.constants.SPONDEE
+ self.constants.SPONDEE
+ self.constants.STRESSED
+ raw_scansion[5:]
)
corrected = "".join(new_scansion)
new_sequence = list(" " * len(scansion))
for idx, car in enumerate(corrected):
new_sequence[mark_list[idx]] = car
return "".join(new_sequence)
return new_line
def assign_candidate(self, verse: Verse, candidate: str) -> Verse:
"""
Helper method; make sure that the verse object is properly packaged.
:param verse:
:param candidate:
:return:
"""
verse.scansion = candidate
verse.valid = True
verse.accented = self.formatter.merge_line_scansion(
verse.original, verse.scansion
)
return verse
| D-K-E/cltk | src/cltk/prosody/lat/verse_scanner.py | Python | mit | 18,030 |
from concurrency.get_websites import get_number_of_links
import time
# Run get_number_of_links and compare it to a serial version
# stub out load_url with a sleep function so the time is always the same
# Show that the concurrent version takes less time than the serial
import unittest
from unittest.mock import patch, MagicMock
from bs4 import BeautifulSoup
from concurrency.get_websites import get_number_of_links, get_number_of_links_serial
class TestConcurrency(unittest.TestCase):
def setUp(self):
self.loadtime = 1
self.fake_urls = ['url1','url2', 'url3']
@patch('concurrency.get_websites.BeautifulSoup')
@patch('concurrency.get_websites.load_url')
def test_concurrent_slower_than_serial(self, mock_load_url, bs_mock):
""" Time the collection of data from websites """
bs_data = MagicMock(return_value="<html><a href='foo'>Baz</a></html>")
bs_mock.return_value = bs_data
mock_load_url.side_effect = lambda foo: time.sleep(self.loadtime)
concurrent_start = time.time()
list(get_number_of_links(self.fake_urls))
concurrent_total = time.time() - concurrent_start
serial_start = time.time()
get_number_of_links_serial(self.fake_urls)
serial_total = time.time() - serial_start
print("Concurrent collection: {}".format(concurrent_total))
print("Serial collection: {}".format(serial_total))
self.assertLess(concurrent_total, serial_total)
if __name__ == "__main__":
unittest.main() | b-ritter/python-notes | concurrency/tests_integration/test_collection_times.py | Python | mit | 1,528 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fiveLessons.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| rednerrus/fiveLessons | manage.py | Python | mit | 254 |
#!/usr/bin/env python
import argparse
import json
import time
import logging
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTShadowClient
import RPi.GPIO as GPIO
parser = argparse.ArgumentParser(description='Lightbulb control unit.')
parser.add_argument('-e', '--endpoint', required=True, help='The AWS Iot endpoint.')
parser.add_argument('-r', '--rootCA', required=True, help='Root CA file path.')
parser.add_argument('-c', '--cert', required=True, help='Certificate file path.')
parser.add_argument('-k', '--key', required=True, help='Private key file path.')
args = parser.parse_args()
def lightbulbShadowCallback_Update(payload, responseStatus, token):
if responseStatus == "timeout":
print("Update request " + token + " time out!")
if responseStatus == "accepted":
payloadDict = json.loads(payload)
print("~~~~~~~~~~~~~~~~~~~~~~~")
print("Update request with token: " + token + " accepted!")
print("property: " + str(payloadDict["state"]["desired"]["color"]))
print("~~~~~~~~~~~~~~~~~~~~~~~\n\n")
if responseStatus == "rejected":
print("Update request " + token + " rejected!")
def lightBulbShadowCallback_Delete(payload, responseStatus, token):
if responseStatus == "timeout":
print("Delete request " + token + " time out!")
if responseStatus == "accepted":
print("~~~~~~~~~~~~~~~~~~~~~~~")
print("Delete request with token: " + token + " accepted!")
print("~~~~~~~~~~~~~~~~~~~~~~~\n\n")
if responseStatus == "rejected":
print("Delete request " + token + " rejected!")
# Configure logging
logger = logging.getLogger("AWSIoTPythonSDK.core")
logger.setLevel(logging.DEBUG)
streamHandler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
streamHandler.setFormatter(formatter)
logger.addHandler(streamHandler)
# Init AWSIoTMQTTShadowClient
lightBulbShadowClient = AWSIoTMQTTShadowClient("controlUnitClient")
lightBulbShadowClient.configureEndpoint(args.endpoint, 8883)
lightBulbShadowClient.configureCredentials(args.rootCA, args.key, args.cert)
# AWSIoTMQTTShadowClient configuration
lightBulbShadowClient.configureAutoReconnectBackoffTime(1, 32, 20)
lightBulbShadowClient.configureConnectDisconnectTimeout(10) # 10 sec
lightBulbShadowClient.configureMQTTOperationTimeout(5) # 5 sec
# Connect to AWS IoT
lightBulbShadowClient.connect()
# Create a deviceShadow with persistent subscription
ControlUnit = lightBulbShadowClient.createShadowHandlerWithName("rpi-sense-hat", True)
# Delete shadow JSON doc
ControlUnit.shadowDelete(lightBulbShadowCallback_Delete, 5)
# Update shadow
def updateShadow(color):
JSONPayload = '{"state":{"desired":{"color":"' + color + '"}}}'
ControlUnit.shadowUpdate(JSONPayload, lightbulbShadowCallback_Update, 5)
RED = 9
GREEN = 10
BLUE = 11
GPIO.setmode(GPIO.BCM)
GPIO.setup(RED, GPIO.IN)
GPIO.setup(GREEN, GPIO.IN)
GPIO.setup(BLUE, GPIO.IN)
lastButton = None
while True:
if (lastButton != RED and GPIO.input(RED) == False):
lastButton = RED
updateShadow("red")
if (lastButton != GREEN and GPIO.input(GREEN) == False):
lastButton = GREEN
updateShadow("green")
if (lastButton != BLUE and GPIO.input(BLUE)== False):
lastButton = BLUE
updateShadow("blue")
time.sleep(0.05);
| stephenjelfs/aws-iot-gddev2016 | controlUnit.py | Python | mit | 3,371 |
# coding: utf-8
import threading
import time
from datetime import datetime
from flask import Flask
from influxdb import InfluxDBClient
from oslo_service import periodic_task
from oslo_config import cfg
from oslo_log import log
from oslo_service import service
from keystoneauth1.identity import v3
from keystoneauth1 import session
from keystoneclient.v3 import client as keystone_client
from neutronclient.v2_0 import client as neutron_client
from novaclient import client as nova_client
import glanceclient as glance_client
import config
wsgi_app = Flask(__name__)
CONF = cfg.CONF
LOG = log.getLogger(__name__)
metrics_map = {}
class ServiceManager(service.Service):
def __init__(self):
super(ServiceManager, self).__init__()
def start(self):
LOG.info('start')
if CONF.influxdb.enable:
self.influxdb_periodic_tasks = InfluxdbPeriodicTasks()
self.tg.add_dynamic_timer(self._get_influxdb_periodic_tasks,
initial_delay=0,
periodic_interval_max=120)
if not CONF.rabbitmq_manager.enable_prometheus_exporter:
self.prometheus_exporter_thread = self._spawn_prometheus_exporter()
else:
self.prometheus_exporter_thread = None
self.periodic_tasks = ServicePeriodicTasks()
self.tg.add_dynamic_timer(self._get_periodic_tasks,
initial_delay=0,
periodic_interval_max=120)
def wait(self):
LOG.info('wait')
def stop(self):
LOG.info('stop')
if self.prometheus_exporter_thread is not None:
self.prometheus_exporter_thread.join()
super(ServiceManager, self).stop()
def _get_periodic_tasks(self, raise_on_error=False):
ctxt = {}
return self.periodic_tasks.periodic_tasks(ctxt, raise_on_error=raise_on_error)
def _get_influxdb_periodic_tasks(self, raise_on_error=False):
ctxt = {}
return self.influxdb_periodic_tasks.periodic_tasks(ctxt, raise_on_error=raise_on_error)
def _spawn_prometheus_exporter(self):
t = threading.Thread(target=wsgi_app.run, kwargs={
'host': CONF.openstack_deploy_manager.bind_host,
'port': CONF.openstack_deploy_manager.bind_port
})
t.daemon = True
t.start()
return t
#
# influxdb reporter
#
class InfluxdbPeriodicTasks(periodic_task.PeriodicTasks):
def __init__(self):
super(InfluxdbPeriodicTasks, self).__init__(CONF)
self.influxdb = InfluxDBClient(
CONF.influxdb.host,
CONF.influxdb.port,
CONF.influxdb.user,
CONF.influxdb.password,
CONF.influxdb.database,
)
def periodic_tasks(self, context, raise_on_error=False):
return self.run_periodic_tasks(context, raise_on_error=raise_on_error)
@periodic_task.periodic_task(spacing=60)
def report(self, context):
LOG.info('Report metrics to influxdb')
json_body = []
for measurement, metrics in metrics_map.items():
json_body.append({
"measurement": measurement.split(':')[0],
"tags": metrics["tags"],
"fields": {
"value": metrics["value"],
}
})
if len(json_body) > 0:
self.influxdb.write_points(json_body)
#
# prometheus exporter
#
@wsgi_app.route("/")
def status():
return "OK"
@wsgi_app.route("/metrics")
def metrics():
pmetrics = ''
for measurement, metrics in metrics_map.items():
labels = ''
for k, v in metrics['tags'].items():
labels += '{0}="{1}",'.format(k, v)
labels = labels[:-1]
pmetrics += '{0}{{{1}}} {2}\n'.format(measurement.split(':')[0], labels, metrics['value'])
return pmetrics
#
# service tasks
#
class ServicePeriodicTasks(periodic_task.PeriodicTasks):
def __init__(self):
super(ServicePeriodicTasks, self).__init__(CONF)
auth = v3.Password(auth_url=CONF.openstack_auth.auth_url,
username=CONF.openstack_auth.username,
password=CONF.openstack_auth.password,
project_name=CONF.openstack_auth.project_name,
user_domain_id=CONF.openstack_auth.user_domain_id,
project_domain_id=CONF.openstack_auth.project_domain_id,
)
sess = session.Session(auth=auth, verify=False)
self.keystone = keystone_client.Client(session=sess)
self.neutron = neutron_client.Client(session=sess)
self.nova = nova_client.Client('2.1', session=sess)
self.glance = glance_client.Client('2', session=sess)
def periodic_tasks(self, context, raise_on_error=False):
return self.run_periodic_tasks(context, raise_on_error=raise_on_error)
@periodic_task.periodic_task(spacing=30)
def check(self, context):
LOG.info('Start check openstack')
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
start_time = time.time()
self.keystone.services.list()
elapsed_time = time.time() - start_time
metrics_map['openstack_keystone_service_list_latency'] = {
'tags': {"svc": "keystone"},
'value': elapsed_time,
'time': timestamp,
}
start_time = time.time()
self.neutron.list_networks()
elapsed_time = time.time() - start_time
metrics_map['openstack_neutron_network_list_latency'] = {
'tags': {"svc": "neutron"},
'value': elapsed_time,
'time': timestamp,
}
start_time = time.time()
self.nova.flavors.list()
elapsed_time = time.time() - start_time
metrics_map['openstack_nova_flavor_list_latency'] = {
'tags': {"svc": "nova"},
'value': elapsed_time,
'time': timestamp,
}
start_time = time.time()
self.glance.images.list()
elapsed_time = time.time() - start_time
metrics_map['openstack_glance_image_list_latency'] = {
'tags': {"svc": "glance"},
'value': elapsed_time,
'time': timestamp,
}
LOG.info(metrics_map)
@periodic_task.periodic_task(spacing=30)
def check_k8s(self, context):
LOG.info('Start check k8s')
# TODO
def main():
config.init()
launcher = service.launch(CONF, ServiceManager())
launcher.wait()
if __name__ == '__main__':
main()
| syunkitada/openstack-helm | openstack/lib/k8s_openstack_monitor_manager.py | Python | mit | 6,662 |
# Imports
from django.conf.urls import url
from .models import OurFoto
from .views import HomeFoto, ShowFoto, DeleteFoto, AddFoto, \
EditFoto, SearchFoto
# Urls for app
urlpatterns = [
url(r'^$', HomeFoto.as_view(model = OurFoto), name = 'index'),
url(r'^foto/(?P<pk>\d+)/$', ShowFoto.as_view(model = OurFoto), name = 'foto'),
url(r'^add_foto/$', AddFoto.as_view(), name = 'add_foto'),
url(r'^edit_foto/(?P<pk>\d+)/$', EditFoto.as_view(model = OurFoto), name = 'edit_foto'),
url(r'^search_foto/$', SearchFoto.as_view(), name = 'search_foto'),
url(r'^delete_foto/(?P<pk>\d+)/$', DeleteFoto.as_view(model = OurFoto), name = 'delete_foto')
]
| Sergey19940808/OurFoto | repository_our_fotos/urls.py | Python | mit | 671 |
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse
from .models import *
import requests
import json
user_id = '139169754@N02'
api_key = '41dd3aff041c00c52febdef9786a9ca0'
api_secret = '0f5a3b5047f760f7'
def index(request):
context = {}
context['photos'] = []
method = 'flickr.people.getPublicPhotos'
query = 'https://api.flickr.com/services/rest/?&method=%s&api_key=%s&user_id=%s&format=json&nojsoncallback=1'%(method, api_key, user_id)
query += '&extras=url_z'
response = requests.get(query)
if response.ok:
response = json.loads(response.text)
for link in response['photos']['photo']:
context['photos'].append(str(link['url_z']))
return render(request, 'photos/index.html', context)
| xurichard/mysite | photos/views.py | Python | mit | 753 |
"""
Tests for main.py
"""
import pathlib
import main
def test_get_id():
path = pathlib.Path("./nbs/chapters/00-Introduction-to-the-course.ipynb")
assert main.get_id(path) == "00"
def test_get_id_with_no_id():
path = pathlib.Path("./nbs/other/Assessment.ipynb")
assert main.get_id(path) == "assessment"
def test_get_name():
path = pathlib.Path("./nbs/chapters/00-Introduction-to-the-course.ipynb")
assert main.get_name(path) == "Introduction to the course"
def test_get_with_no_id():
path = pathlib.Path("./nbs/other/Assessment.ipynb")
assert main.get_name(path) == "Assessment"
def test_convert_html():
path = pathlib.Path("./nbs/other/Assessment.ipynb")
html_output = main.convert_html(path)
assert len(html_output) == 2
assert type(html_output) is tuple
assert type(html_output[0]) is str
def test_render_template():
path = pathlib.Path("./nbs/other/Assessment.ipynb")
path_id = main.get_id(path)
nb, _ = main.convert_html(path)
nb = nb.replace("{{root}}", main.ROOT)
html = main.render_template("content.html", {"nb": nb,
"root": main.ROOT,
"id": path_id,})
assert type(html) is str
assert main.ROOT in html
assert path_id in html
assert nb in html
| drvinceknight/gt | test_main.py | Python | mit | 1,347 |
class ClassC(object):
FAMILY_INHERIT = {'a'}
def c(self):
return 'family_b: C'
@classmethod
def super_family(cls):
return cls.module.super_family
| IvIePhisto/Ancestration | tests/adopt_into_b.py | Python | mit | 181 |
import logging
import pprint
from flask import jsonify, make_response, request
from flask_restful import Resource, reqparse, fields, marshal
from app.models import BucketList
from app.common.db import save_record, delete_record
from app.common.auth.authorize import login_required
logger = logging.getLogger(__name__)
bucketlist_item_fields = {"id": fields.Integer,
"name": fields.String,
"done": fields.Boolean,
"bucketlist_id": fields.Integer,
"created_at": fields.DateTime,
"updated_at": fields.DateTime
}
# Field marshal for bucketlist item
bucketlist_fields = {"id": fields.Integer,
"name": fields.String,
"description": fields.String,
"created_at": fields.DateTime,
"updated_at": fields.DateTime,
"items": fields.List(fields.Nested(bucketlist_item_fields))
}
# Field marshal for bucketlist item
class BucketListsResource(Resource):
""" This class handles creation and getting of bucketlists. """
method_decorators = [login_required] # applies to all inherited resources
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument("name",
type=str,
required=True,
help="bucketlist name is required",
location="json")
self.parser.add_argument("description",
type=str,
required=True,
help="bucketlist description is required",
location="json")
def get(self, user_id=None, response=None):
""" This function handles get requests. """
if user_id is not None:
self.reqparse = reqparse.RequestParser()
self.reqparse.add_argument(
'page', type=int, location='args',
default=1
)
self.reqparse.add_argument(
'limit',
type=int,
default=20,
location='args'
)
self.reqparse.add_argument(
'q', type=str,
location='args'
)
args = self.reqparse.parse_args()
q = args['q']
page = args['page']
limit = args['limit']
# Pagination logic
if q:
bucketlist = BucketList.query.filter(
BucketList.name.\
ilike('%' + q + '%'),\
BucketList.user_id==user_id)\
.paginate(page, limit, False)
else:
bucketlist = BucketList.query.filter_by(user_id=user_id)\
.paginate(page, limit, False)
if bucketlist.has_next:
url = request.url.split("?limit")[0]
next_page = url + '?limit=' + \
str(limit) + '&page=' + str(page + 1)
else:
next_page = 'Null'
if bucketlist.has_prev:
url = request.url.split("?limit")[0]
prev_page = url + '?limit=' + \
str(limit) + '&page=' + str(page - 1)
else:
prev_page = 'Null'
return {'meta': {'next_page': next_page,
'prev_page': prev_page,
'total_pages': bucketlist.pages
},
'bucketlists': marshal(bucketlist.items, bucketlist_fields)
}, 200
return make_response(jsonify({
"status": response[0],
"message": response[1]
}), response[2])
def post(self, user_id=None, response=None):
""" This function handles post requests. """
args = self.parser.parse_args()
name = args["name"]
description = args["description"]
if user_id is not None:
if BucketList.query.filter_by(user_id=user_id, name=name).first():
response = ("failed",
"Bucketlist with a similar name exists", 409)
else:
bucketlist = BucketList(name, description, user_id)
save_record(bucketlist)
response = ("success", "Bucketlist created successfully", 201)
return make_response(jsonify({
"status": response[0],
"message": response[1]
}), response[2])
class BucketListResource(Resource):
""" This class gets a single bucketlist. """
method_decorators = [login_required] # applies to all inherited resources
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument("name",
type=str,
required=True,
help="bucketlist name is required",
location="json")
self.parser.add_argument("description",
type=str,
required=True,
help="bucketlist description is required",
location="json")
def get(self, id=None, user_id=None, response=None):
""" This function handles get requests. """
if user_id and id is not None:
bucketlist = BucketList.query.filter_by(id=id,
user_id=user_id).first()
if bucketlist:
return marshal(bucketlist, bucketlist_fields), 200
else:
response = ("failed","Bucketlist not found", 404)
else:
response = ("failed",
"Please login to access your bucketlists", 401)
return make_response(jsonify({
"status": response[0],
"message": response[1]
}), response[2])
def put(self, id=None, user_id=None, response=None):
""" This function handles put requests. """
args = self.parser.parse_args()
name = args["name"]
description = args["description"]
if user_id and id is not None:
bucketlist = BucketList.query.filter_by(id=id,
user_id=user_id).first()
if bucketlist:
if BucketList.query.filter_by(user_id=user_id,
name=name).first():
response = ("failed",
"Bucketlist with a similar name exists", 409)
else:
bucketlist.name = name
bucketlist.description = description
# save the newly updated record
save_record(bucketlist)
response = ("success",
"bucketlist updated successfully", 200)
else:
response = ("failed", "Bucketlist not found", 404)
else:
response = ("failed",
"Please login to access your bucketlists", 401)
return make_response(jsonify({
"status": response[0],
"message": response[1]
}), response[2])
def delete(self, id=None, user_id=None, response=None):
""" This function handles delete requests. """
if user_id and id is not None:
bucketlist = BucketList.query.filter_by(id=id,
user_id=user_id).first()
if bucketlist:
delete_record(bucketlist)
response = ("success", "Bucketlist deleted successfully", 200)
else:
response = ("failed", "Bucketlist not found", 404)
else:
response = ("failed",
"Please login to access your bucketlists", 401)
return make_response(jsonify({
"status": response[0],
"message": response[1]
}), response[2])
| brayoh/bucket-list-api | app/resources/bucketlist.py | Python | mit | 8,468 |
__author__ = 'tahsmith'
from operator import add
import os
from cmake.context import Context
from functools import reduce
class VariableReference(object):
def __init__(self, tokens):
self.name = tokens[0]
def evaluate(self, ctx):
"""
Perform any nested interpolations and give the value of the variable, or None.
:type ctx: Context
"""
return ctx.variable_lookup(self.name.evaluate(ctx))
class EnvironmentVariableReference(object):
def __init__(self, tokens):
self.name = tokens
def evaluate(self, ctx):
"""
Perform any nested interpolations and give the value of the variable, or None.
:type ctx: Context
"""
name = self.name.evaluate(ctx)
if name in os.environ:
return os.environ[name]
else:
return
class StringFragment(object):
def __init__(self, tokens):
self.token = tokens[0]
def evaluate(self, ctx):
return self.token
class InterpolatedString(object):
def __init__(self, tokens):
self.tokens = tokens
def evaluate(self, ctx):
"""
Perform any substitutions in each token and join into one string.
:type ctx: Context
"""
return reduce(add, (token.evaluate(ctx) for token in self.tokens))
class ArgumentList(object):
def __init__(self, tokens):
self.tokens = tokens
def evaluate(self, ctx):
"""
Process the argument tokens, performing interpolations and splitting semi-colon delimited lists.
:param ctx: map of variables for performing substitutions.
:return: list of strings
"""
# Interpolate tokens.
list = (token.evaluate(ctx) for token in self.tokens)
# Split lists.
list = [item for token in list for item in token.split(';')]
return list
| tahsmith/pycmake | cmake/arguments/argument.py | Python | mit | 1,893 |
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
TRIG = 35
ECHO = 38
GPIO.setup(TRIG,GPIO.OUT)
GPIO.output(TRIG,0)
GPIO.setup(ECHO,GPIO.IN)
time.sleep(0.1)
print ("Starting gesture recognition")
try:
# here you put your main loop or block of code
while True:
value_list = []
for x in xrange(0,5):
GPIO.output(TRIG,1)
time.sleep(0.0001)
GPIO.output(TRIG,0)
start = time.time()
while GPIO.input(ECHO) == 0 and time.time()-start < 0.4:
pass
start = time.time()
while GPIO.input(ECHO) == 1:
pass
stop = time.time()
distance = (stop - start) * 17000
value_list.append(distance)
time.sleep(0.025)
value_list.sort();
print value_list[2]
except KeyboardInterrupt:
# here you put any code you want to run before the program
# exits when you press CTRL+C
print ("exiting")
except:
# this catches ALL other exceptions including errors.
# You won't get any error messages for debugging
# so only use it once your code is working
print ("Other error or exception occurred!")
finally:
GPIO.cleanup() # this ensures a clean exit
| s6joui/MirrorOS | system/core/gesture-recognizer/sensors_raw_left.py | Python | mit | 1,155 |
# coding: utf-8
"""Test the kernel specs webservice API."""
import errno
import io
import json
import os
import shutil
pjoin = os.path.join
import requests
from IPython.kernel.kernelspec import NATIVE_KERNEL_NAME
from IPython.html.utils import url_path_join
from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error
# Copied from IPython.kernel.tests.test_kernelspec so updating that doesn't
# break these tests
sample_kernel_json = {'argv': ['cat', '{connection_file}'],
'display_name': 'Test kernel',
}
some_resource = u"The very model of a modern major general"
class KernelSpecAPI(object):
"""Wrapper for notebook API calls."""
def __init__(self, base_url):
self.base_url = base_url
def _req(self, verb, path, body=None):
response = requests.request(verb,
url_path_join(self.base_url, path),
data=body,
)
response.raise_for_status()
return response
def list(self):
return self._req('GET', 'api/kernelspecs')
def kernel_spec_info(self, name):
return self._req('GET', url_path_join('api/kernelspecs', name))
def kernel_resource(self, name, path):
return self._req('GET', url_path_join('kernelspecs', name, path))
class APITest(NotebookTestBase):
"""Test the kernelspec web service API"""
def setUp(self):
ipydir = self.ipython_dir.name
sample_kernel_dir = pjoin(ipydir, 'kernels', 'sample')
try:
os.makedirs(sample_kernel_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(pjoin(sample_kernel_dir, 'kernel.json'), 'w') as f:
json.dump(sample_kernel_json, f)
with io.open(pjoin(sample_kernel_dir, 'resource.txt'), 'w',
encoding='utf-8') as f:
f.write(some_resource)
self.ks_api = KernelSpecAPI(self.base_url())
def test_list_kernelspecs_bad(self):
"""Can list kernelspecs when one is invalid"""
bad_kernel_dir = pjoin(self.ipython_dir.name, 'kernels', 'bad')
try:
os.makedirs(bad_kernel_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(pjoin(bad_kernel_dir, 'kernel.json'), 'w') as f:
f.write("garbage")
model = self.ks_api.list().json()
assert isinstance(model, dict)
self.assertEqual(model['default'], NATIVE_KERNEL_NAME)
specs = model['kernelspecs']
assert isinstance(specs, dict)
# 2: the sample kernelspec created in setUp, and the native Python
# kernel
self.assertGreaterEqual(len(specs), 2)
shutil.rmtree(bad_kernel_dir)
def test_list_kernelspecs(self):
model = self.ks_api.list().json()
assert isinstance(model, dict)
self.assertEqual(model['default'], NATIVE_KERNEL_NAME)
specs = model['kernelspecs']
assert isinstance(specs, dict)
# 2: the sample kernelspec created in setUp, and the native Python
# kernel
self.assertGreaterEqual(len(specs), 2)
def is_sample_kernelspec(s):
return s['name'] == 'sample' and s['display_name'] == 'Test kernel'
def is_default_kernelspec(s):
return s['name'] == NATIVE_KERNEL_NAME and s['display_name'].startswith("IPython")
assert any(is_sample_kernelspec(s) for s in specs.values()), specs
assert any(is_default_kernelspec(s) for s in specs.values()), specs
def test_get_kernelspec(self):
spec = self.ks_api.kernel_spec_info(
'Sample').json() # Case insensitive
self.assertEqual(spec['display_name'], 'Test kernel')
def test_get_nonexistant_kernelspec(self):
with assert_http_error(404):
self.ks_api.kernel_spec_info('nonexistant')
def test_get_kernel_resource_file(self):
res = self.ks_api.kernel_resource('sAmple', 'resource.txt')
self.assertEqual(res.text, some_resource)
def test_get_nonexistant_resource(self):
with assert_http_error(404):
self.ks_api.kernel_resource('nonexistant', 'resource.txt')
with assert_http_error(404):
self.ks_api.kernel_resource('sample', 'nonexistant.txt')
| mattvonrocketstein/smash | smashlib/ipy3x/html/services/kernelspecs/tests/test_kernelspecs_api.py | Python | mit | 4,428 |
# vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import os
from functools import partial
from collections import namedtuple
from time import sleep
from platform import python_implementation
from powerline.segments import shell, tmux, pdb, i3wm
from powerline.lib.vcs import get_fallback_create_watcher
from powerline.lib.unicode import out_u
import tests.vim as vim_module
from tests.lib import Args, urllib_read, replace_attr, new_module, replace_module_module, replace_env, Pl
from tests import TestCase, SkipTest
def get_dummy_guess(**kwargs):
if 'directory' in kwargs:
def guess(path, create_watcher):
return Args(branch=lambda: out_u(os.path.basename(path)), **kwargs)
else:
def guess(path, create_watcher):
return Args(branch=lambda: out_u(os.path.basename(path)), directory=path, **kwargs)
return guess
class TestShell(TestCase):
def test_last_status(self):
pl = Pl()
segment_info = {'args': Args(last_exit_code=10)}
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': '10', 'highlight_groups': ['exit_fail']}
])
segment_info['args'].last_exit_code = 0
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_exit_code = None
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_exit_code = 'sigsegv'
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail']}
])
segment_info['args'].last_exit_code = 'sigsegv+core'
self.assertEqual(shell.last_status(pl=pl, segment_info=segment_info), [
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail']}
])
def test_last_pipe_status(self):
pl = Pl()
segment_info = {'args': Args(last_pipe_status=[])}
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_pipe_status = [0, 0, 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), None)
segment_info['args'].last_pipe_status = [0, 2, 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': '2', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv', 'sigsegv+core']
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv', 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
segment_info['args'].last_pipe_status = [0, 'sigsegv+core', 0]
self.assertEqual(shell.last_pipe_status(pl=pl, segment_info=segment_info), [
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True},
{'contents': 'sigsegv+core', 'highlight_groups': ['exit_fail'], 'draw_inner_divider': True},
{'contents': '0', 'highlight_groups': ['exit_success'], 'draw_inner_divider': True}
])
def test_jobnum(self):
pl = Pl()
segment_info = {'args': Args(jobnum=0)}
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info), None)
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=False), None)
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=True), '0')
segment_info = {'args': Args(jobnum=1)}
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info), '1')
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=False), '1')
self.assertEqual(shell.jobnum(pl=pl, segment_info=segment_info, show_zero=True), '1')
def test_continuation(self):
pl = Pl()
self.assertEqual(shell.continuation(pl=pl, segment_info={}), [{
'contents': '',
'width': 'auto',
'highlight_groups': ['continuation:current', 'continuation'],
}])
segment_info = {'parser_state': 'if cmdsubst'}
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=False), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'cmdsubst',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=False, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
'width': 'auto',
'align': 'r',
},
{
'contents': 'cmdsubst',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True), [
{
'contents': 'if',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True, renames={'if': 'IF'}), [
{
'contents': 'IF',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info, omit_cmdsubst=True, right_align=True, renames={'if': None}), [
{
'contents': '',
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'r',
},
])
segment_info = {'parser_state': 'then then then cmdsubst'}
self.assertEqual(shell.continuation(pl=pl, segment_info=segment_info), [
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation'],
},
{
'contents': 'then',
'draw_inner_divider': True,
'highlight_groups': ['continuation:current', 'continuation'],
'width': 'auto',
'align': 'l',
},
])
def test_cwd(self):
new_os = new_module('os', path=os.path, sep='/')
pl = Pl()
cwd = [None]
def getcwd():
wd = cwd[0]
if isinstance(wd, Exception):
raise wd
else:
return wd
segment_info = {'getcwd': getcwd, 'home': None}
with replace_attr(shell, 'os', new_os):
cwd[0] = '/abc/def/ghi/foo/bar'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'abc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'def', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info['home'] = '/abc/def/ghi'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info.update(shortened_path='~foo/ghi')
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info), [
{'contents': '~foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_shortened_path=False), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info.pop('shortened_path')
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3, shorten_home=False), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis='---'), [
{'contents': '---', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True), [
{'contents': '.../', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis='---'), [
{'contents': '---/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'fo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2, use_path_separator=True), [
{'contents': '~/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'fo/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
cwd[0] = '/etc'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
cwd[0] = '/'
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
ose = OSError()
ose.errno = 2
cwd[0] = ose
self.assertEqual(shell.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_groups': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True}
])
cwd[0] = OSError()
self.assertRaises(OSError, shell.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
cwd[0] = ValueError()
self.assertRaises(ValueError, shell.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
class TestTmux(TestCase):
def test_attached_clients(self):
def get_tmux_output(pl, cmd, *args):
if cmd == 'list-panes':
return 'session_name\n'
elif cmd == 'list-clients':
return '/dev/pts/2: 0 [191x51 xterm-256color] (utf8)\n/dev/pts/3: 0 [191x51 xterm-256color] (utf8)'
pl = Pl()
with replace_attr(tmux, 'get_tmux_output', get_tmux_output):
self.assertEqual(tmux.attached_clients(pl=pl), '2')
self.assertEqual(tmux.attached_clients(pl=pl, minimum=3), None)
class TestCommon(TestCase):
@classmethod
def setUpClass(cls):
module = __import__(str('powerline.segments.common.{0}'.format(cls.module_name)))
cls.module = getattr(module.segments.common, str(cls.module_name))
class TestNet(TestCommon):
module_name = 'net'
def test_hostname(self):
pl = Pl()
with replace_env('SSH_CLIENT', '192.168.0.12 40921 22') as segment_info:
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True), 'abc')
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc.mydomain'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc.mydomain')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, exclude_domain=True), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True), 'abc.mydomain')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True, exclude_domain=True), 'abc')
segment_info['environ'].pop('SSH_CLIENT')
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True), None)
with replace_module_module(self.module, 'socket', gethostname=lambda: 'abc.mydomain'):
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info), 'abc.mydomain')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, exclude_domain=True), 'abc')
self.assertEqual(self.module.hostname(pl=pl, segment_info=segment_info, only_if_ssh=True, exclude_domain=True), None)
def test_external_ip(self):
pl = Pl()
with replace_attr(self.module, 'urllib_read', urllib_read):
self.assertEqual(self.module.external_ip(pl=pl), [{'contents': '127.0.0.1', 'divider_highlight_group': 'background:divider'}])
def test_internal_ip(self):
try:
import netifaces
except ImportError:
raise SkipTest('netifaces module is not available')
pl = Pl()
addr = {
'enp2s0': {
netifaces.AF_INET: [{'addr': '192.168.100.200'}],
netifaces.AF_INET6: [{'addr': 'feff::5446:5eff:fe5a:7777%enp2s0'}]
},
'lo': {
netifaces.AF_INET: [{'addr': '127.0.0.1'}],
netifaces.AF_INET6: [{'addr': '::1'}]
},
'teredo': {
netifaces.AF_INET6: [{'addr': 'feff::5446:5eff:fe5a:7777'}]
},
}
interfaces = ['lo', 'enp2s0', 'teredo']
with replace_module_module(
self.module, 'netifaces',
interfaces=(lambda: interfaces),
ifaddresses=(lambda interface: addr[interface]),
AF_INET=netifaces.AF_INET,
AF_INET6=netifaces.AF_INET6,
):
self.assertEqual(self.module.internal_ip(pl=pl), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='auto'), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='lo'), '127.0.0.1')
self.assertEqual(self.module.internal_ip(pl=pl, interface='teredo'), None)
self.assertEqual(self.module.internal_ip(pl=pl, ipv=4), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='auto', ipv=4), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='lo', ipv=4), '127.0.0.1')
self.assertEqual(self.module.internal_ip(pl=pl, interface='teredo', ipv=4), None)
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), 'feff::5446:5eff:fe5a:7777%enp2s0')
self.assertEqual(self.module.internal_ip(pl=pl, interface='auto', ipv=6), 'feff::5446:5eff:fe5a:7777%enp2s0')
self.assertEqual(self.module.internal_ip(pl=pl, interface='lo', ipv=6), '::1')
self.assertEqual(self.module.internal_ip(pl=pl, interface='teredo', ipv=6), 'feff::5446:5eff:fe5a:7777')
interfaces[1:2] = ()
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), 'feff::5446:5eff:fe5a:7777')
interfaces[1:2] = ()
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), '::1')
interfaces[:] = ()
self.assertEqual(self.module.internal_ip(pl=pl, ipv=6), None)
gateways = {
'default': {
netifaces.AF_INET: ('192.168.100.1', 'enp2s0'),
netifaces.AF_INET6: ('feff::5446:5eff:fe5a:0001', 'enp2s0')
}
}
with replace_module_module(
self.module, 'netifaces',
interfaces=(lambda: interfaces),
ifaddresses=(lambda interface: addr[interface]),
gateways=(lambda: gateways),
AF_INET=netifaces.AF_INET,
AF_INET6=netifaces.AF_INET6,
):
# default gateway has specified address family
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=4), '192.168.100.200')
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=6), 'feff::5446:5eff:fe5a:7777%enp2s0')
# default gateway doesn't have specified address family
gateways['default'] = {}
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=4), None)
self.assertEqual(self.module.internal_ip(pl=pl, interface='default_gateway', ipv=6), None)
def test_network_load(self):
def gb(interface):
return None
f = [gb]
def _get_bytes(interface):
return f[0](interface)
pl = Pl()
with replace_attr(self.module, '_get_bytes', _get_bytes):
self.module.network_load.startup(pl=pl)
try:
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), None)
sleep(self.module.network_load.interval)
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), None)
while 'prev' not in self.module.network_load.interfaces.get('eth0', {}):
sleep(0.1)
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), None)
l = [0, 0]
def gb2(interface):
l[0] += 1200
l[1] += 2400
return tuple(l)
f[0] = gb2
while not self.module.network_load.interfaces.get('eth0', {}).get('prev', (None, None))[1]:
sleep(0.1)
self.assertEqual(self.module.network_load(pl=pl, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'DL 1 KiB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 'UL 2 KiB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, interface='eth0', recv_format='r {value}', sent_format='s {value}'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 KiB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 KiB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', suffix='bps', interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 Kibps', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 Kibps', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', si_prefix=True, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 kB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 kB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', recv_max=0, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 KiB/s', 'highlight_groups': ['network_load_recv_gradient', 'network_load_gradient', 'network_load_recv', 'network_load'], 'gradient_level': 100},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 KiB/s', 'highlight_groups': ['network_load_sent', 'network_load']},
])
class ApproxEqual(object):
def __eq__(self, i):
return abs(i - 50.0) < 1
self.assertEqual(self.module.network_load(pl=pl, recv_format='r {value}', sent_format='s {value}', sent_max=4800, interface='eth0'), [
{'divider_highlight_group': 'network_load:divider', 'contents': 'r 1 KiB/s', 'highlight_groups': ['network_load_recv', 'network_load']},
{'divider_highlight_group': 'network_load:divider', 'contents': 's 2 KiB/s', 'highlight_groups': ['network_load_sent_gradient', 'network_load_gradient', 'network_load_sent', 'network_load'], 'gradient_level': ApproxEqual()},
])
finally:
self.module.network_load.shutdown()
class TestEnv(TestCommon):
module_name = 'env'
def test_user(self):
new_os = new_module('os', getpid=lambda: 1)
class Process(object):
def __init__(self, pid):
pass
def username(self):
return 'def@DOMAIN.COM'
if hasattr(self.module, 'psutil') and not callable(self.module.psutil.Process.username):
username = property(username)
struct_passwd = namedtuple('struct_passwd', ('pw_name',))
new_psutil = new_module('psutil', Process=Process)
new_pwd = new_module('pwd', getpwuid=lambda uid: struct_passwd(pw_name='def@DOMAIN.COM'))
new_getpass = new_module('getpass', getuser=lambda: 'def@DOMAIN.COM')
pl = Pl()
with replace_attr(self.module, 'pwd', new_pwd):
with replace_attr(self.module, 'getpass', new_getpass):
with replace_attr(self.module, 'os', new_os):
with replace_attr(self.module, 'psutil', new_psutil):
with replace_attr(self.module, '_geteuid', lambda: 5):
self.assertEqual(self.module.user(pl=pl), [
{'contents': 'def@DOMAIN.COM', 'highlight_groups': ['user']}
])
self.assertEqual(self.module.user(pl=pl, hide_user='abc'), [
{'contents': 'def@DOMAIN.COM', 'highlight_groups': ['user']}
])
self.assertEqual(self.module.user(pl=pl, hide_domain=False), [
{'contents': 'def@DOMAIN.COM', 'highlight_groups': ['user']}
])
self.assertEqual(self.module.user(pl=pl, hide_user='def@DOMAIN.COM'), None)
self.assertEqual(self.module.user(pl=pl, hide_domain=True), [
{'contents': 'def', 'highlight_groups': ['user']}
])
with replace_attr(self.module, '_geteuid', lambda: 0):
self.assertEqual(self.module.user(pl=pl), [
{'contents': 'def', 'highlight_groups': ['superuser', 'user']}
])
def test_cwd(self):
new_os = new_module('os', path=os.path, sep='/')
pl = Pl()
cwd = [None]
def getcwd():
wd = cwd[0]
if isinstance(wd, Exception):
raise wd
else:
return wd
segment_info = {'getcwd': getcwd, 'home': None}
with replace_attr(self.module, 'os', new_os):
cwd[0] = '/abc/def/ghi/foo/bar'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'abc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'def', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
segment_info['home'] = '/abc/def/ghi'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=3, shorten_home=False), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'ghi', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'foo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1), [
{'contents': '...', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis='---'), [
{'contents': '---', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True), [
{'contents': '.../', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis='---'), [
{'contents': '---/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=1, use_path_separator=True, ellipsis=None), [
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '~', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'fo', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2, use_path_separator=True), [
{'contents': '~/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'fo/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'bar', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']}
])
cwd[0] = '/etc'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False},
{'contents': 'etc', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
cwd[0] = '/'
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=False), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': True, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, use_path_separator=True), [
{'contents': '/', 'divider_highlight_group': 'cwd:divider', 'draw_inner_divider': False, 'highlight_groups': ['cwd:current_folder', 'cwd']},
])
ose = OSError()
ose.errno = 2
cwd[0] = ose
self.assertEqual(self.module.cwd(pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2), [
{'contents': '[not found]', 'divider_highlight_group': 'cwd:divider', 'highlight_groups': ['cwd:current_folder', 'cwd'], 'draw_inner_divider': True}
])
cwd[0] = OSError()
self.assertRaises(OSError, self.module.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
cwd[0] = ValueError()
self.assertRaises(ValueError, self.module.cwd, pl=pl, segment_info=segment_info, dir_limit_depth=2, dir_shorten_len=2)
def test_virtualenv(self):
pl = Pl()
with replace_env('VIRTUAL_ENV', '/abc/def/ghi') as segment_info:
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
segment_info['environ'].pop('VIRTUAL_ENV')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
with replace_env('CONDA_DEFAULT_ENV', 'foo') as segment_info:
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), 'foo')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), 'foo')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
segment_info['environ'].pop('CONDA_DEFAULT_ENV')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
with replace_env('CONDA_DEFAULT_ENV', 'foo', environ={'VIRTUAL_ENV': '/sbc/def/ghi'}) as segment_info:
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), 'foo')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
segment_info['environ'].pop('CONDA_DEFAULT_ENV')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_conda=True), 'ghi')
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True), None)
self.assertEqual(self.module.virtualenv(pl=pl, segment_info=segment_info, ignore_venv=True, ignore_conda=True), None)
def test_environment(self):
pl = Pl()
variable = 'FOO'
value = 'bar'
with replace_env(variable, value) as segment_info:
self.assertEqual(self.module.environment(pl=pl, segment_info=segment_info, variable=variable), value)
segment_info['environ'].pop(variable)
self.assertEqual(self.module.environment(pl=pl, segment_info=segment_info, variable=variable), None)
class TestVcs(TestCommon):
module_name = 'vcs'
def test_branch(self):
pl = Pl()
create_watcher = get_fallback_create_watcher()
segment_info = {'getcwd': os.getcwd}
branch = partial(self.module.branch, pl=pl, create_watcher=create_watcher)
with replace_attr(self.module, 'guess', get_dummy_guess(status=lambda: None, directory='/tmp/tests')):
with replace_attr(self.module, 'tree_status', lambda repo, pl: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [{
'contents': 'tests',
'highlight_groups': ['branch_clean', 'branch'],
'divider_highlight_group': None
}])
with replace_attr(self.module, 'guess', get_dummy_guess(status=lambda: 'D ', directory='/tmp/tests')):
with replace_attr(self.module, 'tree_status', lambda repo, pl: 'D '):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [{
'contents': 'tests',
'highlight_groups': ['branch_dirty', 'branch'],
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
with replace_attr(self.module, 'guess', lambda path, create_watcher: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), None)
with replace_attr(self.module, 'guess', get_dummy_guess(status=lambda: 'U')):
with replace_attr(self.module, 'tree_status', lambda repo, pl: 'U'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False, ignore_statuses=['U']), [{
'highlight_groups': ['branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['DU']), [{
'highlight_groups': ['branch_dirty', 'branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [{
'highlight_groups': ['branch_dirty', 'branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['U']), [{
'highlight_groups': ['branch_clean', 'branch'],
'contents': 'tests',
'divider_highlight_group': None
}])
class TestTime(TestCommon):
module_name = 'time'
def test_date(self):
pl = Pl()
with replace_attr(self.module, 'datetime', Args(now=lambda: Args(strftime=lambda fmt: fmt))):
self.assertEqual(self.module.date(pl=pl), [{'contents': '%Y-%m-%d', 'highlight_groups': ['date'], 'divider_highlight_group': None}])
self.assertEqual(self.module.date(pl=pl, format='%H:%M', istime=True), [{'contents': '%H:%M', 'highlight_groups': ['time', 'date'], 'divider_highlight_group': 'time:divider'}])
unicode_date = self.module.date(pl=pl, format='\u231a', istime=True)
expected_unicode_date = [{'contents': '\u231a', 'highlight_groups': ['time', 'date'], 'divider_highlight_group': 'time:divider'}]
if python_implementation() == 'PyPy' and sys.version_info >= (3,):
if unicode_date != expected_unicode_date:
raise SkipTest('Dates do not match, see https://bitbucket.org/pypy/pypy/issues/2161/pypy3-strftime-does-not-accept-unicode')
self.assertEqual(unicode_date, expected_unicode_date)
def test_fuzzy_time(self):
time = Args(hour=0, minute=45)
pl = Pl()
with replace_attr(self.module, 'datetime', Args(now=lambda: time)):
self.assertEqual(self.module.fuzzy_time(pl=pl), 'quarter to one')
time.hour = 23
time.minute = 59
self.assertEqual(self.module.fuzzy_time(pl=pl), 'round about midnight')
time.minute = 33
self.assertEqual(self.module.fuzzy_time(pl=pl), 'twenty-five to twelve')
time.minute = 60
self.assertEqual(self.module.fuzzy_time(pl=pl), 'twelve o\'clock')
time.minute = 33
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=False), 'twenty-five to twelve')
time.minute = 60
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=False), 'twelve o\'clock')
time.minute = 33
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=True), 'twenty‐five to twelve')
time.minute = 60
self.assertEqual(self.module.fuzzy_time(pl=pl, unicode_text=True), 'twelve o’clock')
class TestSys(TestCommon):
module_name = 'sys'
def test_uptime(self):
pl = Pl()
with replace_attr(self.module, '_get_uptime', lambda: 259200):
self.assertEqual(self.module.uptime(pl=pl), [{'contents': '3d', 'divider_highlight_group': 'background:divider'}])
with replace_attr(self.module, '_get_uptime', lambda: 93784):
self.assertEqual(self.module.uptime(pl=pl), [{'contents': '1d 2h 3m', 'divider_highlight_group': 'background:divider'}])
self.assertEqual(self.module.uptime(pl=pl, shorten_len=4), [{'contents': '1d 2h 3m 4s', 'divider_highlight_group': 'background:divider'}])
with replace_attr(self.module, '_get_uptime', lambda: 65536):
self.assertEqual(self.module.uptime(pl=pl), [{'contents': '18h 12m 16s', 'divider_highlight_group': 'background:divider'}])
self.assertEqual(self.module.uptime(pl=pl, shorten_len=2), [{'contents': '18h 12m', 'divider_highlight_group': 'background:divider'}])
self.assertEqual(self.module.uptime(pl=pl, shorten_len=1), [{'contents': '18h', 'divider_highlight_group': 'background:divider'}])
def _get_uptime():
raise NotImplementedError
with replace_attr(self.module, '_get_uptime', _get_uptime):
self.assertEqual(self.module.uptime(pl=pl), None)
def test_system_load(self):
pl = Pl()
with replace_module_module(self.module, 'os', getloadavg=lambda: (7.5, 3.5, 1.5)):
with replace_attr(self.module, '_cpu_count', lambda: 2):
self.assertEqual(self.module.system_load(pl=pl), [
{'contents': '7.5 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '3.5 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0},
{'contents': '1.5', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 0}
])
self.assertEqual(self.module.system_load(pl=pl, format='{avg:.0f}', threshold_good=0, threshold_bad=1), [
{'contents': '8 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '4 ', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 100},
{'contents': '2', 'highlight_groups': ['system_load_gradient', 'system_load'], 'divider_highlight_group': 'background:divider', 'gradient_level': 75.0}
])
def test_cpu_load_percent(self):
try:
__import__('psutil')
except ImportError as e:
raise SkipTest('Failed to import psutil: {0}'.format(e))
pl = Pl()
with replace_module_module(self.module, 'psutil', cpu_percent=lambda **kwargs: 52.3):
self.assertEqual(self.module.cpu_load_percent(pl=pl), [{
'contents': '52%',
'gradient_level': 52.3,
'highlight_groups': ['cpu_load_percent_gradient', 'cpu_load_percent'],
}])
self.assertEqual(self.module.cpu_load_percent(pl=pl, format='{0:.1f}%'), [{
'contents': '52.3%',
'gradient_level': 52.3,
'highlight_groups': ['cpu_load_percent_gradient', 'cpu_load_percent'],
}])
class TestWthr(TestCommon):
module_name = 'wthr'
def test_weather(self):
pl = Pl()
with replace_attr(self.module, 'urllib_read', urllib_read):
self.assertEqual(self.module.weather(pl=pl), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, temp_coldest=0, temp_hottest=100), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 0}
])
self.assertEqual(self.module.weather(pl=pl, temp_coldest=-100, temp_hottest=-50), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 100}
])
self.assertEqual(self.module.weather(pl=pl, icons={'cloudy': 'o'}), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'o '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, icons={'partly_cloudy_day': 'x'}), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'x '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, unit='F'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '16°F', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, unit='K'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '264K', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, temp_format='{temp:.1e}C'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9.0e+00C', 'gradient_level': 30.0}
])
with replace_attr(self.module, 'urllib_read', urllib_read):
self.module.weather.startup(pl=pl, location_query='Meppen,06,DE')
self.assertEqual(self.module.weather(pl=pl), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '-9°C', 'gradient_level': 30.0}
])
self.assertEqual(self.module.weather(pl=pl, location_query='Moscow,RU'), [
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_condition_partly_cloudy_day', 'weather_condition_cloudy', 'weather_conditions', 'weather'], 'contents': 'CLOUDS '},
{'divider_highlight_group': 'background:divider', 'highlight_groups': ['weather_temp_gradient', 'weather_temp', 'weather'], 'contents': '19°C', 'gradient_level': 70.0}
])
self.module.weather.shutdown()
class TestI3WM(TestCase):
def test_workspaces(self):
pl = Pl()
with replace_attr(i3wm, 'conn', Args(get_workspaces=lambda: iter([
{'name': '1: w1', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': False},
{'name': '2: w2', 'output': 'LVDS1', 'focused': False, 'urgent': False, 'visible': True},
{'name': '3: w3', 'output': 'HDMI1', 'focused': False, 'urgent': True, 'visible': True},
{'name': '4: w4', 'output': 'DVI01', 'focused': True, 'urgent': True, 'visible': True},
]))):
segment_info = {}
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info), [
{'contents': '1: w1', 'highlight_groups': ['workspace']},
{'contents': '2: w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=None), [
{'contents': '1: w1', 'highlight_groups': ['workspace']},
{'contents': '2: w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['focused', 'urgent']), [
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible']), [
{'contents': '2: w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], strip=3), [
{'contents': 'w2', 'highlight_groups': ['w_visible', 'workspace']},
{'contents': 'w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
{'contents': 'w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['focused', 'urgent'], output='DVI01'), [
{'contents': '4: w4', 'highlight_groups': ['w_focused', 'w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], output='HDMI1'), [
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], strip=3, output='LVDS1'), [
{'contents': 'w2', 'highlight_groups': ['w_visible', 'workspace']},
])
segment_info['output'] = 'LVDS1'
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], output='HDMI1'), [
{'contents': '3: w3', 'highlight_groups': ['w_urgent', 'w_visible', 'workspace']},
])
self.assertEqual(i3wm.workspaces(pl=pl, segment_info=segment_info, only_show=['visible'], strip=3), [
{'contents': 'w2', 'highlight_groups': ['w_visible', 'workspace']},
])
def test_mode(self):
pl = Pl()
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'default'}), None)
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'test'}), 'test')
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'default'}, names={'default': 'test'}), 'test')
self.assertEqual(i3wm.mode(pl=pl, segment_info={'mode': 'test'}, names={'default': 'test', 'test': 't'}), 't')
class TestMail(TestCommon):
module_name = 'mail'
def test_email_imap_alert(self):
# TODO
pass
class TestPlayers(TestCommon):
module_name = 'players'
def test_now_playing(self):
# TODO
pass
class TestBat(TestCommon):
module_name = 'bat'
def test_battery(self):
pl = Pl()
def _get_battery_status(pl):
return 86, False
with replace_attr(self.module, '_get_battery_status', _get_battery_status):
self.assertEqual(self.module.battery(pl=pl), [{
'contents': ' 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
self.assertEqual(self.module.battery(pl=pl, format='{capacity:.2f}'), [{
'contents': '0.86',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
self.assertEqual(self.module.battery(pl=pl, steps=7), [{
'contents': ' 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
self.assertEqual(self.module.battery(pl=pl, gamify=True), [
{
'contents': ' ',
'draw_inner_divider': False,
'highlight_groups': ['battery_offline', 'battery_ac_state', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': 'OOOO',
'draw_inner_divider': False,
'highlight_groups': ['battery_full', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': 'O',
'draw_inner_divider': False,
'highlight_groups': ['battery_empty', 'battery_gradient', 'battery'],
'gradient_level': 100
}
])
self.assertEqual(self.module.battery(pl=pl, gamify=True, full_heart='+', empty_heart='-', steps='10'), [
{
'contents': ' ',
'draw_inner_divider': False,
'highlight_groups': ['battery_offline', 'battery_ac_state', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': '++++++++',
'draw_inner_divider': False,
'highlight_groups': ['battery_full', 'battery_gradient', 'battery'],
'gradient_level': 0
},
{
'contents': '--',
'draw_inner_divider': False,
'highlight_groups': ['battery_empty', 'battery_gradient', 'battery'],
'gradient_level': 100
}
])
def test_battery_with_ac_online(self):
pl = Pl()
def _get_battery_status(pl):
return 86, True
with replace_attr(self.module, '_get_battery_status', _get_battery_status):
self.assertEqual(self.module.battery(pl=pl, online='C', offline=' '), [
{
'contents': 'C 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
def test_battery_with_ac_offline(self):
pl = Pl()
def _get_battery_status(pl):
return 86, False
with replace_attr(self.module, '_get_battery_status', _get_battery_status):
self.assertEqual(self.module.battery(pl=pl, online='C', offline=' '), [
{
'contents': ' 86%',
'highlight_groups': ['battery_gradient', 'battery'],
'gradient_level': 14,
}])
class TestVim(TestCase):
def test_mode(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info), 'NORMAL')
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info, override={'i': 'INS'}), 'NORMAL')
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info, override={'n': 'NORM'}), 'NORM')
with vim_module._with('mode', 'i') as segment_info:
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info), 'INSERT')
with vim_module._with('mode', chr(ord('V') - 0x40)) as segment_info:
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info), 'V-BLCK')
self.assertEqual(self.vim.mode(pl=pl, segment_info=segment_info, override={'^V': 'VBLK'}), 'VBLK')
def test_visual_range(self):
pl = Pl()
vr = partial(self.vim.visual_range, pl=pl)
vim_module.current.window.cursor = [0, 0]
try:
with vim_module._with('mode', 'i') as segment_info:
self.assertEqual(vr(segment_info=segment_info), '')
with vim_module._with('mode', '^V') as segment_info:
self.assertEqual(vr(segment_info=segment_info), '1 x 1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 4')
with vim_module._with('mode', '^S') as segment_info:
self.assertEqual(vr(segment_info=segment_info), '1 x 1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), '5 x 4')
with vim_module._with('mode', 'V') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'L:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('mode', 'S') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'L:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('mode', 'v') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'C:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('mode', 's') as segment_info:
self.assertEqual(vr(segment_info=segment_info), 'C:1')
with vim_module._with('vpos', line=5, col=5, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
with vim_module._with('vpos', line=5, col=4, off=0):
self.assertEqual(vr(segment_info=segment_info), 'L:5')
finally:
vim_module._close(1)
def test_modified_indicator(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info), None)
segment_info['buffer'][0] = 'abc'
try:
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info), '+')
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info, text='-'), '-')
finally:
vim_module._bw(segment_info['bufnr'])
def test_paste_indicator(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.paste_indicator(pl=pl, segment_info=segment_info), None)
with vim_module._with('options', paste=1):
self.assertEqual(self.vim.paste_indicator(pl=pl, segment_info=segment_info), 'PASTE')
self.assertEqual(self.vim.paste_indicator(pl=pl, segment_info=segment_info, text='P'), 'P')
def test_readonly_indicator(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.readonly_indicator(pl=pl, segment_info=segment_info), None)
with vim_module._with('bufoptions', readonly=1):
self.assertEqual(self.vim.readonly_indicator(pl=pl, segment_info=segment_info), 'RO')
self.assertEqual(self.vim.readonly_indicator(pl=pl, segment_info=segment_info, text='L'), 'L')
def test_file_scheme(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_scheme(pl=pl, segment_info=segment_info), None)
with vim_module._with('buffer', '/tmp/’’/abc') as segment_info:
self.assertEqual(self.vim.file_scheme(pl=pl, segment_info=segment_info), None)
with vim_module._with('buffer', 'zipfile:/tmp/abc.zip::abc/abc.vim') as segment_info:
self.assertEqual(self.vim.file_scheme(pl=pl, segment_info=segment_info), 'zipfile')
def test_file_directory(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), None)
with replace_env('HOME', '/home/foo', os.environ):
with vim_module._with('buffer', '/tmp/’’/abc') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/’’/')
with vim_module._with('buffer', b'/tmp/\xFF\xFF/abc') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/<ff><ff>/')
with vim_module._with('buffer', '/tmp/abc') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/')
os.environ['HOME'] = '/tmp'
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '~/')
with vim_module._with('buffer', 'zipfile:/tmp/abc.zip::abc/abc.vim') as segment_info:
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=False), 'zipfile:/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=True), '/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/abc.zip::abc/')
os.environ['HOME'] = '/tmp'
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=False), 'zipfile:/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info, remove_scheme=True), '/tmp/abc.zip::abc/')
self.assertEqual(self.vim.file_directory(pl=pl, segment_info=segment_info), '/tmp/abc.zip::abc/')
def test_file_name(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True), [
{'contents': '[No file]', 'highlight_groups': ['file_name_no_file', 'file_name']}
])
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info, display_no_file=True, no_file_text='X'), [
{'contents': 'X', 'highlight_groups': ['file_name_no_file', 'file_name']}
])
with vim_module._with('buffer', '/tmp/abc') as segment_info:
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), 'abc')
with vim_module._with('buffer', '/tmp/’’') as segment_info:
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), '’’')
with vim_module._with('buffer', b'/tmp/\xFF\xFF') as segment_info:
self.assertEqual(self.vim.file_name(pl=pl, segment_info=segment_info), '<ff><ff>')
def test_file_size(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_size(pl=pl, segment_info=segment_info), '0 B')
with vim_module._with('buffer', os.path.join(os.path.dirname(__file__), 'empty')) as segment_info:
self.assertEqual(self.vim.file_size(pl=pl, segment_info=segment_info), '0 B')
def test_file_opts(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.file_format(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'unix'}
])
self.assertEqual(self.vim.file_encoding(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'utf-8'}
])
self.assertEqual(self.vim.file_type(pl=pl, segment_info=segment_info), None)
with vim_module._with('bufoptions', filetype='python'):
self.assertEqual(self.vim.file_type(pl=pl, segment_info=segment_info), [
{'divider_highlight_group': 'background:divider', 'contents': 'python'}
])
def test_window_title(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.window_title(pl=pl, segment_info=segment_info), None)
with vim_module._with('wvars', quickfix_title='Abc'):
self.assertEqual(self.vim.window_title(pl=pl, segment_info=segment_info), 'Abc')
def test_line_percent(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
segment_info['buffer'][0:-1] = [str(i) for i in range(100)]
try:
self.assertEqual(self.vim.line_percent(pl=pl, segment_info=segment_info), '1')
vim_module._set_cursor(50, 0)
self.assertEqual(self.vim.line_percent(pl=pl, segment_info=segment_info), '50')
self.assertEqual(self.vim.line_percent(pl=pl, segment_info=segment_info, gradient=True), [
{'contents': '50', 'highlight_groups': ['line_percent_gradient', 'line_percent'], 'gradient_level': 50 * 100.0 / 101}
])
finally:
vim_module._bw(segment_info['bufnr'])
def test_line_count(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
segment_info['buffer'][0:-1] = [str(i) for i in range(99)]
try:
self.assertEqual(self.vim.line_count(pl=pl, segment_info=segment_info), '100')
vim_module._set_cursor(50, 0)
self.assertEqual(self.vim.line_count(pl=pl, segment_info=segment_info), '100')
finally:
vim_module._bw(segment_info['bufnr'])
def test_position(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
try:
segment_info['buffer'][0:-1] = [str(i) for i in range(99)]
vim_module._set_cursor(49, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info), '50%')
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, gradient=True), [
{'contents': '50%', 'highlight_groups': ['position_gradient', 'position'], 'gradient_level': 50.0}
])
vim_module._set_cursor(0, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info), 'Top')
vim_module._set_cursor(97, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, position_strings={'top': 'Comienzo', 'bottom': 'Final', 'all': 'Todo'}), 'Final')
segment_info['buffer'][0:-1] = [str(i) for i in range(2)]
vim_module._set_cursor(0, 0)
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, position_strings={'top': 'Comienzo', 'bottom': 'Final', 'all': 'Todo'}), 'Todo')
self.assertEqual(self.vim.position(pl=pl, segment_info=segment_info, gradient=True), [
{'contents': 'All', 'highlight_groups': ['position_gradient', 'position'], 'gradient_level': 0.0}
])
finally:
vim_module._bw(segment_info['bufnr'])
def test_cursor_current(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.line_current(pl=pl, segment_info=segment_info), '1')
self.assertEqual(self.vim.col_current(pl=pl, segment_info=segment_info), '1')
self.assertEqual(self.vim.virtcol_current(pl=pl, segment_info=segment_info), [{
'highlight_groups': ['virtcol_current_gradient', 'virtcol_current', 'col_current'], 'contents': '1', 'gradient_level': 100.0 / 80,
}])
self.assertEqual(self.vim.virtcol_current(pl=pl, segment_info=segment_info, gradient=False), [{
'highlight_groups': ['virtcol_current', 'col_current'], 'contents': '1',
}])
def test_modified_buffers(self):
pl = Pl()
self.assertEqual(self.vim.modified_buffers(pl=pl), None)
def test_branch(self):
pl = Pl()
create_watcher = get_fallback_create_watcher()
branch = partial(self.vim.branch, pl=pl, create_watcher=create_watcher)
with vim_module._with('buffer', '/foo') as segment_info:
with replace_attr(self.vcs, 'guess', get_dummy_guess(status=lambda: None)):
with replace_attr(self.vcs, 'tree_status', lambda repo, pl: None):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_clean', 'branch'], 'contents': 'foo'}
])
with replace_attr(self.vcs, 'guess', get_dummy_guess(status=lambda: 'DU')):
with replace_attr(self.vcs, 'tree_status', lambda repo, pl: 'DU'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
with replace_attr(self.vcs, 'guess', get_dummy_guess(status=lambda: 'U')):
with replace_attr(self.vcs, 'tree_status', lambda repo, pl: 'U'):
self.assertEqual(branch(segment_info=segment_info, status_colors=False, ignore_statuses=['U']), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['DU']), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_dirty', 'branch'], 'contents': 'foo'}
])
self.assertEqual(branch(segment_info=segment_info, status_colors=True, ignore_statuses=['U']), [
{'divider_highlight_group': 'branch:divider', 'highlight_groups': ['branch_clean', 'branch'], 'contents': 'foo'}
])
def test_file_vcs_status(self):
pl = Pl()
create_watcher = get_fallback_create_watcher()
file_vcs_status = partial(self.vim.file_vcs_status, pl=pl, create_watcher=create_watcher)
with vim_module._with('buffer', '/foo') as segment_info:
with replace_attr(self.vim, 'guess', get_dummy_guess(status=lambda file: 'M')):
self.assertEqual(file_vcs_status(segment_info=segment_info), [
{'highlight_groups': ['file_vcs_status_M', 'file_vcs_status'], 'contents': 'M'}
])
with replace_attr(self.vim, 'guess', get_dummy_guess(status=lambda file: None)):
self.assertEqual(file_vcs_status(segment_info=segment_info), None)
with vim_module._with('buffer', '/bar') as segment_info:
with vim_module._with('bufoptions', buftype='nofile'):
with replace_attr(self.vim, 'guess', get_dummy_guess(status=lambda file: 'M')):
self.assertEqual(file_vcs_status(segment_info=segment_info), None)
def test_trailing_whitespace(self):
pl = Pl()
with vim_module._with('buffer', 'tws') as segment_info:
trailing_whitespace = partial(self.vim.trailing_whitespace, pl=pl, segment_info=segment_info)
self.assertEqual(trailing_whitespace(), None)
self.assertEqual(trailing_whitespace(), None)
vim_module.current.buffer[0] = ' '
self.assertEqual(trailing_whitespace(), [{
'highlight_groups': ['trailing_whitespace', 'warning'],
'contents': '1',
}])
self.assertEqual(trailing_whitespace(), [{
'highlight_groups': ['trailing_whitespace', 'warning'],
'contents': '1',
}])
vim_module.current.buffer[0] = ''
self.assertEqual(trailing_whitespace(), None)
self.assertEqual(trailing_whitespace(), None)
def test_tabnr(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.tabnr(pl=pl, segment_info=segment_info, show_current=True), '1')
self.assertEqual(self.vim.tabnr(pl=pl, segment_info=segment_info, show_current=False), None)
def test_tab(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.tab(pl=pl, segment_info=segment_info), [{
'contents': None,
'literal_contents': (0, '%1T'),
}])
self.assertEqual(self.vim.tab(pl=pl, segment_info=segment_info, end=True), [{
'contents': None,
'literal_contents': (0, '%T'),
}])
def test_bufnr(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.bufnr(pl=pl, segment_info=segment_info, show_current=True), str(segment_info['bufnr']))
self.assertEqual(self.vim.bufnr(pl=pl, segment_info=segment_info, show_current=False), None)
def test_winnr(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.winnr(pl=pl, segment_info=segment_info, show_current=True), str(segment_info['winnr']))
self.assertEqual(self.vim.winnr(pl=pl, segment_info=segment_info, show_current=False), None)
def test_segment_info(self):
pl = Pl()
with vim_module._with('tabpage'):
with vim_module._with('buffer', '1') as segment_info:
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), None)
vim_module.current.buffer[0] = ' '
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), [{
'contents': '+',
'highlight_groups': ['tab_modified_indicator', 'modified_indicator'],
}])
vim_module._undo()
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), None)
old_buffer = vim_module.current.buffer
vim_module._new('2')
segment_info = vim_module._get_segment_info()
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), None)
old_buffer[0] = ' '
self.assertEqual(self.vim.modified_indicator(pl=pl, segment_info=segment_info), None)
self.assertEqual(self.vim.tab_modified_indicator(pl=pl, segment_info=segment_info), [{
'contents': '+',
'highlight_groups': ['tab_modified_indicator', 'modified_indicator'],
}])
def test_csv_col_current(self):
pl = Pl()
segment_info = vim_module._get_segment_info()
def csv_col_current(**kwargs):
self.vim.csv_cache and self.vim.csv_cache.clear()
return self.vim.csv_col_current(pl=pl, segment_info=segment_info, **kwargs)
buffer = segment_info['buffer']
try:
self.assertEqual(csv_col_current(), None)
buffer.options['filetype'] = 'csv'
self.assertEqual(csv_col_current(), None)
buffer[:] = ['1;2;3', '4;5;6']
vim_module._set_cursor(1, 1)
self.assertEqual(csv_col_current(), [{
'contents': '1', 'highlight_groups': ['csv:column_number', 'csv']
}])
vim_module._set_cursor(2, 3)
self.assertEqual(csv_col_current(), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}])
vim_module._set_cursor(2, 3)
self.assertEqual(csv_col_current(display_name=True), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (2)', 'highlight_groups': ['csv:column_name', 'csv']
}])
buffer[:0] = ['Foo;Bar;Baz']
vim_module._set_cursor(2, 3)
self.assertEqual(csv_col_current(), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (Bar)', 'highlight_groups': ['csv:column_name', 'csv']
}])
if sys.version_info < (2, 7):
raise SkipTest('csv module in Python-2.6 does not handle multiline csv files well')
buffer[len(buffer):] = ['1;"bc', 'def', 'ghi', 'jkl";3']
vim_module._set_cursor(5, 1)
self.assertEqual(csv_col_current(), [{
'contents': '2', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (Bar)', 'highlight_groups': ['csv:column_name', 'csv']
}])
vim_module._set_cursor(7, 6)
self.assertEqual(csv_col_current(), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (Baz)', 'highlight_groups': ['csv:column_name', 'csv']
}])
self.assertEqual(csv_col_current(name_format=' ({column_name:.1})'), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (B)', 'highlight_groups': ['csv:column_name', 'csv']
}])
self.assertEqual(csv_col_current(display_name=True, name_format=' ({column_name:.1})'), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}, {
'contents': ' (B)', 'highlight_groups': ['csv:column_name', 'csv']
}])
self.assertEqual(csv_col_current(display_name=False, name_format=' ({column_name:.1})'), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}])
self.assertEqual(csv_col_current(display_name=False), [{
'contents': '3', 'highlight_groups': ['csv:column_number', 'csv']
}])
finally:
vim_module._bw(segment_info['bufnr'])
@classmethod
def setUpClass(cls):
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'path')))
from powerline.segments import vim
cls.vim = vim
from powerline.segments.common import vcs
cls.vcs = vcs
@classmethod
def tearDownClass(cls):
sys.path.pop(0)
class TestPDB(TestCase):
def test_current_line(self):
pl = Pl()
self.assertEqual(pdb.current_line(pl=pl, segment_info={'curframe': Args(f_lineno=10)}), '10')
def test_current_file(self):
pl = Pl()
cf = lambda **kwargs: pdb.current_file(
pl=pl,
segment_info={'curframe': Args(f_code=Args(co_filename='/tmp/abc.py'))},
**kwargs
)
self.assertEqual(cf(), 'abc.py')
self.assertEqual(cf(basename=True), 'abc.py')
self.assertEqual(cf(basename=False), '/tmp/abc.py')
def test_current_code_name(self):
pl = Pl()
ccn = lambda **kwargs: pdb.current_code_name(
pl=pl,
segment_info={'curframe': Args(f_code=Args(co_name='<module>'))},
**kwargs
)
self.assertEqual(ccn(), '<module>')
def test_current_context(self):
pl = Pl()
cc = lambda **kwargs: pdb.current_context(
pl=pl,
segment_info={'curframe': Args(f_code=Args(co_name='<module>', co_filename='/tmp/abc.py'))},
**kwargs
)
self.assertEqual(cc(), 'abc.py')
def test_stack_depth(self):
pl = Pl()
sd = lambda **kwargs: pdb.stack_depth(
pl=pl,
segment_info={'pdb': Args(stack=[1, 2, 3]), 'initial_stack_length': 1},
**kwargs
)
self.assertEqual(sd(), '2')
self.assertEqual(sd(full_stack=False), '2')
self.assertEqual(sd(full_stack=True), '3')
old_cwd = None
def setUpModule():
global old_cwd
global __file__
old_cwd = os.getcwd()
__file__ = os.path.abspath(__file__)
os.chdir(os.path.dirname(__file__))
def tearDownModule():
global old_cwd
os.chdir(old_cwd)
if __name__ == '__main__':
from tests import main
main()
| bezhermoso/powerline | tests/test_segments.py | Python | mit | 79,449 |
from corrdb.common.core import setup_app
from flask.ext.testing import LiveServerTestCase
import twill
class DatabaseTest(LiveServerTestCase):
def create_app(self):
try:
browser = twill.get_browser()
browser.go("http://localhost:5200/")
app = setup_app(__name__, 'corrdb.tests.integrate')
app.config['LIVESERVER_PORT'] = 5210
app.config['TESTING'] = True
app.config['MONGODB_SETTINGS'] = {'db': 'corr-integrate','host': 'localhost','port': 27017}
except:
app = setup_app(__name__, 'corrdb.tests.integrate')
app.config['LIVESERVER_PORT'] = 5200
app.config['TESTING'] = True
app.config['MONGODB_SETTINGS'] = {'db': 'corr-integrate','host': 'localhost','port': 27017}
return app
def setUp(self):
# Put some dummy things in the db.
print "Supposed to setup the testcase."
print "Which probably means to push some testing records in the database."
def test_Database(self):
print "This is a test to check that the api endpoints are working properly."
browser = twill.get_browser()
browser.go("http://localhost:27017/")
self.assertTrue(browser.get_code() in (200, 201))
def tearDown(self):
del self.app
print "Supposed to tear down the testcase."
print "Which most likely means to clear the database of all records."
| wd15/corr | corr-db/corrdb/tests/test_backend.py | Python | mit | 1,456 |
#coding:utf-8
import os
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(filename)s:%(lineno)s - %(funcName)20s() - %(name)s - %(levelname)s - %(message)s')
logging.warning('load module:%s', __name__)
user_path = os.path.expanduser("~/")
dir_path = os.path.join(user_path, '.mywunder/')
try:
os.mkdir(dir_path)
except OSError:
pass
config_txt = os.path.join(dir_path, 'config.txt')
db_path = os.path.join(dir_path, "mywunder.db")
CLIENT_ID = 'ce310d4e732dc98c6a07'
| snowleung/mywunder | mywunder/myconfig.py | Python | mit | 505 |
from .topology import Master, OVS, Netns, Link
from collections import OrderedDict
def direct_veth(disable_offloading=False, **settings):
m = Master()
ns1 = Netns('x-ns1').add_to(m)
ns2 = Netns('x-ns2').add_to(m)
Link.declare((ns1, '10.112.1.1'), (ns2, '10.112.1.2'), disable_offloading=disable_offloading)
return (m, ns1, ns2)
direct_veth.arguments = {}
def ovs_chain(disable_offloading=False, **settings):
n_ovs = settings['chain_len']
ovs_ovs_links = settings['ovs_ovs_links']
ovs_ns_links = settings['ovs_ns_links']
m = Master()
ns1 = Netns('x-ns1').add_to(m)
ns2 = Netns('x-ns2').add_to(m)
if n_ovs < 1:
raise ValueError("a chain needs at least one OvS")
ovss = []
for i_ovs in range(n_ovs):
ovss.append(OVS().add_to(m))
# and link them
for ovs1, ovs2 in zip(ovss, ovss[1:]):
Link.declare(ovs1, ovs2, link_type=ovs_ovs_links, disable_offloading=disable_offloading)
Link.declare((ns1, '10.113.1.1'), ovss[0], link_type=ovs_ns_links, disable_offloading=disable_offloading)
Link.declare((ns2, '10.113.1.2'), ovss[-1], link_type=ovs_ns_links, disable_offloading=disable_offloading)
return (m, ns1, ns2)
ovs_chain.arguments = OrderedDict([
('chain_len', {'type': int, 'default': 1, 'help': 'number of switches to chain'}),
('ovs_ovs_links', {'default': 'patch', 'choices': ('patch', 'veth'), 'help': 'choses the link type between OvS switches'}),
('ovs_ns_links', {'default': 'port', 'choices': ('port', 'veth'), 'help': 'choses the link type between OvS and namespaces'}),
])
def unrouted_ns_chain(disable_offloading=False, **settings):
n_ns = settings['chain_len']
use_ovs = settings['use_ovs']
ovs_ns_links = settings['ovs_ns_links']
base_net = '10.114'
m = Master()
if n_ns < 2:
raise ValueError("two namespaces at least")
if n_ns > 255:
raise ValueError("too many namespaces will break the ip addresses")
nss = []
for i_ns in range(n_ns):
ns = Netns().add_to(m)
ns.left = None
ns.right = None
nss.append(ns)
# and link them
subnet_number = 0
for ns1, ns2 in zip(nss, nss[1:]):
ip_address_base = '{}.{}.'.format(base_net, subnet_number)
if not use_ovs:
l = Link.declare((ns1, ip_address_base + '1'), (ns2, ip_address_base + '2'), disable_offloading=disable_offloading)
ns1.right = l.e2
ns2.left = l.e1
else:
ovs = OVS().add_to(m)
l1 = Link.declare((ns1, ip_address_base + '1'), ovs, link_type=ovs_ns_links, disable_offloading=disable_offloading)
ns2.left = l1.e2
l2 = Link.declare((ns2, ip_address_base + '2'), ovs, link_type=ovs_ns_links, disable_offloading=disable_offloading)
ns1.right = l2.e2
subnet_number += 1
return m, nss, base_net
unrouted_ns_chain.arguments = OrderedDict([
('chain_len', {'type': int, 'default': 2, 'help': 'number of namespaces to chain'}),
('use_ovs', {'default': False, 'action': 'store_true', 'help': 'use OvS switches to link the namespaces'}),
('ovs_ns_links', {'default': 'port', 'choices': ('port', 'veth'), 'help': 'choses the link type between OvS and namespaces, if OvS is enabled'}),
])
def ns_chain_add_routing(m, nss, base_net):
# do the routing for intermediate namespaces
for i, ns in enumerate(nss):
for subnet_number in range(len(nss) - 1):
if subnet_number in range(i - 1, i + 1):
continue # directly linked
elif subnet_number < i:
endpoint = ns.left
elif subnet_number > i:
endpoint = ns.right
ns.add_route('{}.{}.0/24'.format(base_net, subnet_number), endpoint)
def ns_chain(disable_offloading=False, **settings):
m, nss, base_net = unrouted_ns_chain(disable_offloading, **settings)
ns_chain_add_routing(m, nss, base_net)
return (m, nss[0], nss[-1])
ns_chain.arguments = unrouted_ns_chain.arguments
def ns_chain_iptables(disable_offloading=False, **settings):
m, nss, base_net = unrouted_ns_chain(disable_offloading, **settings)
ns_chain_add_routing(m, nss, base_net)
if settings['iptables_type'] == 'stateful':
max_int = 9223372036854775807 # see https://en.wikipedia.org/wiki/9223372036854775807 ...ok, it's that --connbytes takes 64bit integers.
def generate_rule(x):
return "iptables -w -A fakerules -m connbytes --connbytes {}:{} --connbytes-dir reply --connbytes-mode bytes -j ACCEPT".format(max_int - x - 1, max_int - x)
else:
def generate_rule(x):
second_octet, remainder = divmod(x + 1, 255 * 255)
third_octet, fourth_octet = divmod(remainder, 255)
rule_ipaddr = '11.{}.{}.{}'.format(second_octet, third_octet, fourth_octet)
return "iptables -w -A fakerules --source {} -j DROP".format(rule_ipaddr)
for ns in nss:
ns.add_configure_command("echo ' adding {iptables_rules_len} {iptables_type} iptables rules'".format(**settings))
ns.add_configure_command("iptables -w -N fakerules")
ns.add_configure_command("iptables -w -A INPUT -j fakerules")
ns.add_configure_command("iptables -w -A FORWARD -j fakerules")
ns.add_configure_command('last_ts="$(date +%s)"', False)
for x in range(settings['iptables_rules_len']):
if not ((x + 1) % 100) and x > 0:
ns.add_configure_command('cur_ts="$(date +%s)"', False)
ns.add_configure_command('echo " inserted {} rules ($((cur_ts - last_ts))s from the last report)"'.format(x + 1), False)
ns.add_configure_command('last_ts=$cur_ts', False)
ns.add_configure_command(generate_rule(x))
return (m, nss[0], nss[-1])
ns_chain_iptables.arguments = ns_chain.arguments.copy()
ns_chain_iptables.arguments['iptables_type'] = {'default': 'stateless', 'choices': ('stateless', 'stateful'), 'help': 'iptables rules type'}
ns_chain_iptables.arguments['iptables_rules_len'] = {'type': int, 'default': 0, 'help': 'number of useless iptables rules to inject'}
def ns_chain_qdisc(qdisc, tera, disable_offloading=False, **settings):
m, nss, base_net = unrouted_ns_chain(disable_offloading, **settings)
ns_chain_add_routing(m, nss, base_net)
# 4294967295 is the maximum unsigned 32bit int (should fit on tc, according to docs)
limit = 4294967295 if not tera else 10**12 // 8
m.get_script() # look, I'm hacking my code! (this will force autogeneration of endpoint names)
for ns in nss:
for endpoint in ns.endpoints:
if qdisc == 'netem':
packet_size = settings['packet_size']
if packet_size == 'default':
packet_size = 2**16 # jumbo packets
limit_burst = int(round(limit / (packet_size + 29), 0)) # to be fair with HTB, this should be the same (netem takes packets instead of bytes)
limit_burst = limit_burst if limit_burst < 2147483647 else 2147483647
# that is max signed 32bit int. not at all clear what netem does, this value seems to be well-swallowed
# netem does not like an uint64 for the rate; the effect is something like "value % uint32.max".
ns.add_configure_command('tc qdisc replace dev {} root netem rate {}bps limit {} 2>&1'.format(endpoint.name, limit, limit_burst))
elif qdisc == 'htb':
ns.add_configure_command('tc qdisc replace dev {} root handle 1: htb default 1 2>&1'.format(endpoint.name))
ns.add_configure_command('tc class replace dev {0} parent 1: classid 1:1 htb rate {1}bps burst {1}b 2>&1'.format(endpoint.name, limit))
return (m, nss[0], nss[-1])
def ns_chain_qdisc_netem(disable_offloading=False, **settings):
return ns_chain_qdisc('netem', False, disable_offloading, **settings)
ns_chain_qdisc_netem.arguments = ns_chain.arguments.copy()
def ns_chain_qdisc_htb(disable_offloading=False, **settings):
return ns_chain_qdisc('htb', False, disable_offloading, **settings)
ns_chain_qdisc_htb.arguments = ns_chain.arguments.copy()
def ns_chain_qdisc_netem_tera(disable_offloading=False, **settings):
return ns_chain_qdisc('netem', True, disable_offloading, **settings)
ns_chain_qdisc_netem.arguments = ns_chain.arguments.copy()
def ns_chain_qdisc_htb_tera(disable_offloading=False, **settings):
return ns_chain_qdisc('htb', True, disable_offloading, **settings)
ns_chain_qdisc_htb.arguments = ns_chain.arguments.copy()
| serl/topoblocktest | lib/topologies.py | Python | mit | 8,613 |
# This module should be imported from REPL, not run from command line.
import socket
import uos
import network
import uwebsocket
import websocket_helper
import _webrepl
listen_s = None
client_s = None
def setup_conn(port, accept_handler):
global listen_s
listen_s = socket.socket()
listen_s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
ai = socket.getaddrinfo("0.0.0.0", port)
addr = ai[0][4]
listen_s.bind(addr)
listen_s.listen(1)
if accept_handler:
listen_s.setsockopt(socket.SOL_SOCKET, 20, accept_handler)
for i in (network.AP_IF, network.STA_IF):
iface = network.WLAN(i)
if iface.active():
print("WebREPL daemon started on ws://%s:%d" % (iface.ifconfig()[0], port))
return listen_s
def accept_conn(listen_sock):
global client_s
cl, remote_addr = listen_sock.accept()
prev = uos.dupterm(None)
uos.dupterm(prev)
if prev:
print("\nConcurrent WebREPL connection from", remote_addr, "rejected")
cl.close()
return
print("\nWebREPL connection from:", remote_addr)
client_s = cl
websocket_helper.server_handshake(cl)
ws = uwebsocket.websocket(cl, True)
ws = _webrepl._webrepl(ws)
cl.setblocking(False)
# notify REPL on socket incoming data (ESP32/ESP8266-only)
if hasattr(uos, 'dupterm_notify'):
cl.setsockopt(socket.SOL_SOCKET, 20, uos.dupterm_notify)
uos.dupterm(ws)
def stop():
global listen_s, client_s
uos.dupterm(None)
if client_s:
client_s.close()
if listen_s:
listen_s.close()
def start(port=8266, password=None):
stop()
if password is None:
try:
import webrepl_cfg
_webrepl.password(webrepl_cfg.PASS)
setup_conn(port, accept_conn)
print("Started webrepl in normal mode")
except:
print("WebREPL is not configured, run 'import webrepl_setup'")
else:
_webrepl.password(password)
setup_conn(port, accept_conn)
print("Started webrepl in manual override mode")
def start_foreground(port=8266):
stop()
s = setup_conn(port, None)
accept_conn(s)
| trezor/micropython | extmod/webrepl/webrepl.py | Python | mit | 2,184 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class SelfHostedIntegrationRuntimeNode(Model):
"""Properties of Self-hosted integration runtime node.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar node_name: Name of the integration runtime node.
:vartype node_name: str
:ivar machine_name: Machine name of the integration runtime node.
:vartype machine_name: str
:ivar host_service_uri: URI for the host machine of the integration
runtime.
:vartype host_service_uri: str
:ivar status: Status of the integration runtime node. Possible values
include: 'NeedRegistration', 'Online', 'Limited', 'Offline', 'Upgrading',
'Initializing', 'InitializeFailed'
:vartype status: str or
~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus
:ivar capabilities: The integration runtime capabilities dictionary
:vartype capabilities: dict[str, str]
:ivar version_status: Status of the integration runtime node version.
:vartype version_status: str
:ivar version: Version of the integration runtime node.
:vartype version: str
:ivar register_time: The time at which the integration runtime node was
registered in ISO8601 format.
:vartype register_time: datetime
:ivar last_connect_time: The most recent time at which the integration
runtime was connected in ISO8601 format.
:vartype last_connect_time: datetime
:ivar expiry_time: The time at which the integration runtime will expire
in ISO8601 format.
:vartype expiry_time: datetime
:ivar last_start_time: The time the node last started up.
:vartype last_start_time: datetime
:ivar last_stop_time: The integration runtime node last stop time.
:vartype last_stop_time: datetime
:ivar last_update_result: The result of the last integration runtime node
update. Possible values include: 'Succeed', 'Fail'
:vartype last_update_result: str or
~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult
:ivar last_start_update_time: The last time for the integration runtime
node update start.
:vartype last_start_update_time: datetime
:ivar last_end_update_time: The last time for the integration runtime node
update end.
:vartype last_end_update_time: datetime
:ivar is_active_dispatcher: Indicates whether this node is the active
dispatcher for integration runtime requests.
:vartype is_active_dispatcher: bool
:ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration
runtime node.
:vartype concurrent_jobs_limit: int
:ivar max_concurrent_jobs: The maximum concurrent jobs in this integration
runtime.
:vartype max_concurrent_jobs: int
"""
_validation = {
'node_name': {'readonly': True},
'machine_name': {'readonly': True},
'host_service_uri': {'readonly': True},
'status': {'readonly': True},
'capabilities': {'readonly': True},
'version_status': {'readonly': True},
'version': {'readonly': True},
'register_time': {'readonly': True},
'last_connect_time': {'readonly': True},
'expiry_time': {'readonly': True},
'last_start_time': {'readonly': True},
'last_stop_time': {'readonly': True},
'last_update_result': {'readonly': True},
'last_start_update_time': {'readonly': True},
'last_end_update_time': {'readonly': True},
'is_active_dispatcher': {'readonly': True},
'concurrent_jobs_limit': {'readonly': True},
'max_concurrent_jobs': {'readonly': True},
}
_attribute_map = {
'node_name': {'key': 'nodeName', 'type': 'str'},
'machine_name': {'key': 'machineName', 'type': 'str'},
'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'capabilities': {'key': 'capabilities', 'type': '{str}'},
'version_status': {'key': 'versionStatus', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'register_time': {'key': 'registerTime', 'type': 'iso-8601'},
'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'},
'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'},
'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'},
'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'},
'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'},
'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'},
'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'},
'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'},
'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'},
'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'},
}
def __init__(self):
self.node_name = None
self.machine_name = None
self.host_service_uri = None
self.status = None
self.capabilities = None
self.version_status = None
self.version = None
self.register_time = None
self.last_connect_time = None
self.expiry_time = None
self.last_start_time = None
self.last_stop_time = None
self.last_update_result = None
self.last_start_update_time = None
self.last_end_update_time = None
self.is_active_dispatcher = None
self.concurrent_jobs_limit = None
self.max_concurrent_jobs = None
| AutorestCI/azure-sdk-for-python | azure-mgmt-datafactory/azure/mgmt/datafactory/models/self_hosted_integration_runtime_node.py | Python | mit | 6,083 |
import sublime
import sublime_plugin
from isort.isort import SortImports
class PysortCommand(sublime_plugin.TextCommand):
def run(self, edit):
old_content = self.view.substr(sublime.Region(0, self.view.size()))
new_content = SortImports(file_contents=old_content).output
self.view.replace(edit, sublime.Region(0, self.view.size()), new_content)
sublime.status_message("Python sort import complete.")
sublime.run_command('sub_notify', {'title': 'ISort', 'msg': 'Python sort import complete.', 'sound': False})
| turbidsoul/isort | sort.py | Python | mit | 556 |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class ConnectAppTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.connect_apps(sid="CNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/ConnectApps/CNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.json',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"authorize_redirect_url": "http://example.com/redirect",
"company_name": "Twilio",
"deauthorize_callback_method": "GET",
"deauthorize_callback_url": "http://example.com/deauth",
"description": null,
"friendly_name": "Connect app for deletion",
"homepage_url": "http://example.com/home",
"permissions": [],
"sid": "CNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/ConnectApps/CNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.connect_apps(sid="CNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.connect_apps(sid="CNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.holodeck.assert_has_request(Request(
'post',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/ConnectApps/CNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.json',
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"authorize_redirect_url": "http://example.com/redirect",
"company_name": "Twilio",
"deauthorize_callback_method": "GET",
"deauthorize_callback_url": "http://example.com/deauth",
"description": null,
"friendly_name": "Connect app for deletion",
"homepage_url": "http://example.com/home",
"permissions": [],
"sid": "CNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/ConnectApps/CNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.connect_apps(sid="CNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.connect_apps.list()
self.holodeck.assert_has_request(Request(
'get',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/ConnectApps.json',
))
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"connect_apps": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"authorize_redirect_url": "http://example.com/redirect",
"company_name": "Twilio",
"deauthorize_callback_method": "GET",
"deauthorize_callback_url": "http://example.com/deauth",
"description": null,
"friendly_name": "Connect app for deletion",
"homepage_url": "http://example.com/home",
"permissions": [],
"sid": "CNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/ConnectApps/CNaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.json"
}
],
"end": 0,
"first_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/ConnectApps.json?Page=0&PageSize=50",
"next_page_uri": null,
"page": 0,
"page_size": 50,
"previous_page_uri": null,
"start": 0,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/ConnectApps.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.connect_apps.list()
self.assertIsNotNone(actual)
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"connect_apps": [],
"end": 0,
"first_page_uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/ConnectApps.json?Page=0&PageSize=50",
"next_page_uri": null,
"page": 0,
"page_size": 50,
"previous_page_uri": null,
"start": 0,
"uri": "/2010-04-01/Accounts/ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/ConnectApps.json"
}
'''
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.connect_apps.list()
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.connect_apps(sid="CNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://api.twilio.com/2010-04-01/Accounts/ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/ConnectApps/CNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.json',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.api.v2010.accounts(sid="ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.connect_apps(sid="CNXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.assertTrue(actual)
| tysonholub/twilio-python | tests/integration/api/v2010/account/test_connect_app.py | Python | mit | 7,437 |
"""
Import prescribing data from CSV files into SQLite
"""
from collections import namedtuple
import csv
from itertools import groupby
import logging
import os
import sqlite3
import gzip
import heapq
from matrixstore.matrix_ops import sparse_matrix, finalise_matrix
from matrixstore.serializer import serialize_compressed
from .common import get_prescribing_filename
logger = logging.getLogger(__name__)
MatrixRow = namedtuple("MatrixRow", "bnf_code items quantity actual_cost net_cost")
class MissingHeaderError(Exception):
pass
def import_prescribing(filename):
if not os.path.exists(filename):
raise RuntimeError("No SQLite file at: {}".format(filename))
connection = sqlite3.connect(filename)
# Trade crash-safety for insert speed
connection.execute("PRAGMA synchronous=OFF")
dates = [date for (date,) in connection.execute("SELECT date FROM date")]
prescriptions = get_prescriptions_for_dates(dates)
write_prescribing(connection, prescriptions)
connection.commit()
connection.close()
def write_prescribing(connection, prescriptions):
cursor = connection.cursor()
# Map practice codes and date strings to their corresponding row/column
# offset in the matrix
practices = dict(cursor.execute("SELECT code, offset FROM practice"))
dates = dict(cursor.execute("SELECT date, offset FROM date"))
matrices = build_matrices(prescriptions, practices, dates)
rows = format_as_sql_rows(matrices, connection)
cursor.executemany(
"""
UPDATE presentation SET items=?, quantity=?, actual_cost=?, net_cost=?
WHERE bnf_code=?
""",
rows,
)
def get_prescriptions_for_dates(dates):
"""
Yield all prescribing data for the given dates as tuples of the form:
bnf_code, practice_code, date, items, quantity, actual_cost, net_cost
sorted by bnf_code, practice and date.
"""
dates = sorted(dates)
filenames = [get_prescribing_filename(date) for date in dates]
missing_files = [f for f in filenames if not os.path.exists(f)]
if missing_files:
raise RuntimeError(
"Some required CSV files were missing:\n {}".format(
"\n ".join(missing_files)
)
)
prescribing_streams = [read_gzipped_prescribing_csv(f) for f in filenames]
# We assume that the input files are already sorted by (bnf_code, practice,
# month) so to ensure that the combined stream is sorted we just need to
# merge them correctly, which heapq.merge handles nicely for us
return heapq.merge(*prescribing_streams)
def read_gzipped_prescribing_csv(filename):
with gzip.open(filename, "rt") as f:
for row in parse_prescribing_csv(f):
yield row
def parse_prescribing_csv(input_stream):
"""
Accepts a stream of CSV and yields prescribing data as tuples of the form:
bnf_code, practice_code, date, items, quantity, actual_cost, net_cost
"""
reader = csv.reader(input_stream)
headers = next(reader)
try:
bnf_code_col = headers.index("bnf_code")
practice_col = headers.index("practice")
date_col = headers.index("month")
items_col = headers.index("items")
quantity_col = headers.index("quantity")
actual_cost_col = headers.index("actual_cost")
net_cost_col = headers.index("net_cost")
except ValueError as e:
raise MissingHeaderError(str(e))
for row in reader:
yield (
# These sometimes have trailing spaces in the CSV
row[bnf_code_col].strip(),
row[practice_col].strip(),
# We only need the YYYY-MM-DD part of the date
row[date_col][:10],
int(row[items_col]),
float(row[quantity_col]),
pounds_to_pence(row[actual_cost_col]),
pounds_to_pence(row[net_cost_col]),
)
def pounds_to_pence(value):
return int(round(float(value) * 100))
def build_matrices(prescriptions, practices, dates):
"""
Accepts an iterable of prescriptions plus mappings of pratice codes and
date strings to their respective row/column offsets. Yields tuples of the
form:
bnf_code, items_matrix, quantity_matrix, actual_cost_matrix, net_cost_matrix
Where the matrices contain the prescribed values for that presentation for
every practice and date.
"""
max_row = max(practices.values())
max_col = max(dates.values())
shape = (max_row + 1, max_col + 1)
grouped_by_bnf_code = groupby(prescriptions, lambda row: row[0])
for bnf_code, row_group in grouped_by_bnf_code:
items_matrix = sparse_matrix(shape, integer=True)
quantity_matrix = sparse_matrix(shape, integer=False)
actual_cost_matrix = sparse_matrix(shape, integer=True)
net_cost_matrix = sparse_matrix(shape, integer=True)
for _, practice, date, items, quantity, actual_cost, net_cost in row_group:
practice_offset = practices[practice]
date_offset = dates[date]
items_matrix[practice_offset, date_offset] = items
quantity_matrix[practice_offset, date_offset] = quantity
actual_cost_matrix[practice_offset, date_offset] = actual_cost
net_cost_matrix[practice_offset, date_offset] = net_cost
yield MatrixRow(
bnf_code,
finalise_matrix(items_matrix),
finalise_matrix(quantity_matrix),
finalise_matrix(actual_cost_matrix),
finalise_matrix(net_cost_matrix),
)
def format_as_sql_rows(matrices, connection):
"""
Given an iterable of MatrixRows (which contain a BNF code plus all
prescribing data for that presentation) yield tuples of values ready for
insertion into SQLite
"""
cursor = connection.cursor()
num_presentations = next(cursor.execute("SELECT COUNT(*) FROM presentation"))[0]
count = 0
for row in matrices:
count += 1
# We make sure we have a row for every BNF code in the data, even ones
# we didn't know about previously. This is a hack that we won't need
# once we can use SQLite v3.24.0 which has proper UPSERT support.
cursor.execute(
"INSERT OR IGNORE INTO presentation (bnf_code) VALUES (?)", [row.bnf_code]
)
if should_log_message(count):
logger.info(
"Writing data for %s (%s/%s)", row.bnf_code, count, num_presentations
)
yield (
serialize_compressed(row.items),
serialize_compressed(row.quantity),
serialize_compressed(row.actual_cost),
serialize_compressed(row.net_cost),
row.bnf_code,
)
logger.info("Finished writing data for %s presentations", count)
def should_log_message(n):
"""
To avoid cluttering log output we don't log the insertion of every single
presentation
"""
if n <= 10:
return True
if n == 100:
return True
return n % 200 == 0
| ebmdatalab/openprescribing | openprescribing/matrixstore/build/import_prescribing.py | Python | mit | 7,057 |
__author__ = 'jdaniel'
from GaiaSolve.model import Model
class ZDT2(Model):
def __init__(self):
super(ZDT2, self).__init__()
def evaluate(self):
g = 1.0 + 9.0*sum(self.x[1:])/(len(self.x) - 1)
f1 = self.x[0]
f2 = g*(1.0 - (f1/g)**2)
self.obj = [f1, f2]
self.eqcon = []
self.neqcon = []
def number_of_design_variables(self):
return 30
def lower_bound(self):
return [0.0]*30
def upper_bound(self):
return [1.0]*30
def number_of_objectives(self):
return 2
def has_equality_constraints(self):
return False
def number_of_equality_constraints(self):
return 0
def has_inequality_constraints(self):
return False
def number_of_inequality_constraints(self):
return 0
def decision_variable_names(self):
x_names = []
for i in range(30):
x_names.append('x' + str(i))
return x_names
def objective_variable_names(self):
return ['f1', 'f2']
def equality_constraint_variable_names(self):
return []
def inequality_constraint_variable_names(self):
return [] | jldaniel/Gaia | Models/zdt2.py | Python | mit | 1,190 |
# -*- coding: utf-8 -*-
import keyring
from ironworks.database import mDb, db
app = None
rundir = None
datadir = None
logger = None
webroot = ""
log_list = []
log_file = ""
commits_behind = 0
latest_commit = None
current_commit = None
commits_compare_url = ""
use_git = False
first_run = 0
threads = []
host = '0.0.0.0'
port = 7000
database = None
loginDatabase = None
systemDatabase = None
pyemoncmsDatabase = None
user = None
cmsSettings = None
bleexSettings = None
adminLogin = False
pyemoncmsLogin = False
pyemoncmsUser = None
def setApp(APP):
global app
app = APP
def getApp():
global app
return app
def setRunDir(RUNDIR):
global rundir
rundir = RUNDIR
def getRunDir():
global rundir
return rundir
def setDataDir(DATA_DIR):
global datadir
datadir = DATA_DIR
def getDataDir():
global datadir
return datadir
def setLogger(LOGGER):
global logger
logger = LOGGER
def getLogger():
global logger
return logger
def setWebroot(WEBROOT):
global webroot
webroot = WEBROOT
def getWebroot():
global webroot
return webroot
def setLogList(LOG_LIST):
global log_list
log_list = LOG_LIST
def getLogList():
global log_list
return log_list
def setLogFile(LOG_FILE):
global log_file
log_file = LOG_FILE
def getLogFile():
global log_file
return log_file
def setCommitsBehind(COMMITS_BEHIND):
global commits_behind
commits_behind = COMMITS_BEHIND
def getCommitsBehind():
global commits_behind
return commits_behind
def setCommitsCompareURL(COMMITS_COMPARE_URL):
global commits_compare_url
commits_compare_url = COMMITS_COMPARE_URL
def getCommitsCompareURL():
global commits_compare_url
return commits_compare_url
def setUseGit(USE_GIT):
global use_git
use_git = USE_GIT
def getUseGit():
global use_git
return use_git
def setLatestCommit(LATEST_COMMIT):
global latest_commit
latest_commit = LATEST_COMMIT
def getLatestCommit():
global latest_commit
return latest_commit
def setCurrentCommit(CURRENT_COMMIT):
global current_commit
current_commit = CURRENT_COMMIT
def getCurrentCommit():
global current_commit
return current_commit
def setFirstRun(FIRST_RUN):
global first_run
first_run = FIRST_RUN
def getFirstRun():
global first_run
return first_run
def setThreads(THREADS):
global threads
threads = THREADS
def getThreads():
global threads
return threads
def setHost(HOST):
global host
host = HOST
def getHost():
global host
return host
def setPort(PORT):
global port
port = PORT
def getPort():
global port
return port
def setAdminLogin(val):
global adminLogin
adminLogin = val
def getAdminLogin():
global adminLogin
return adminLogin
def setPyEmoncmsLogin(val, user):
global pyemoncmsLogin, pyemoncmsUser
pyemoncmsLogin = val
pyemoncmsUser = user
def getPyEmoncmsLogin():
global pyemoncmsLogin, pyemoncmsUser
status = {"user": pyemoncmsUser, "status": pyemoncmsLogin}
return status
#sqlite database-------------------------------------------------------------------------
def setPrefsDb(DATABASE):
global database
database = db.Db(DATABASE)
def getPrefsDb():
global database
return database
#mysql database----------------------------------------------------------------------------
def setLoginDb(host, userName, dbPassword, dbName):
global loginDatabase, logger
loginDatabase = mDb.Db(host, userName, dbPassword, dbName, logger)
def getLoginDb():
global loginDatabase
return loginDatabase
def setSystemDb(host, userName, dbPassword, dbName):
global systemDatabase, logger
systemDatabase = mDb.Db(host, userName, dbPassword, dbName, logger)
def getSystemDb():
global systemDatabase
return systemDatabase
def setPyEMONCMSDb(host, userName, dbPassword, dbName):
global pyemoncmsDatabase, logger
pyemoncmsDatabase = mDb.Db(host, userName, dbPassword, dbName, logger)
def getPyEMONCMSDb():
global pyemoncmsDatabase
return pyemoncmsDatabase
#Settings - Bleextop----------------------------------------------------------------------
def setUser(username):
global user
if username is not None:
result = systemDatabase.select("users", where={"username": username})
user = result[0]
userDict = {}
userDict["user_k"] = str(user[0])
userDict["username"] = user[1]
userDict["email"] = user[3]
userDict["name"] = user[4]
userDict["lastname"] = user[5]
userDict["avatar"] = user[6]
userDict["active"] = user[7]
user = userDict
else:
user = username
return user
def getUser():
return user
def setCMSSettings(cms):
global cmsSettings
cmsSettings = cms
def getCMSSettings():
global cmsSettings
return cmsSettings
def setBleexSettings(bleex):
global bleexSettings
bleexSettings = bleex
def getBleexSettings():
global bleexSettings
return bleexSettings
#Try to fix the "Oerationalrror: (2006 MySQL server has gone away.)"
def checkDbKey(prefix, userName):
userName = userName
try:
dbPassword = keyring.get_password(prefix + userName, userName)
if dbPassword is None:
password = 'your db password'
keyring.set_password(prefix + userName, userName, password)
logger.log('Initial database password added to keyring.', "INFO")
elif str(dbPassword) == 'your db password':
logger.log('Initial database password in keyring.', "WARNING")
logger.log('Please change your password.', "WARNING")
else:
logger.log('Userdefined database password set.', "INFO")
return dbPassword
except:
logger.log('Either could not access keyring or an entry could not be made.', "ERROR")
return ""
def resetLoginDb(host, userName, dbName):
userPassword = checkDbKey("Ironworks-Login-", userName)
setLoginDb(host, userName, userPassword, dbName)
def resetSystemDb(host, userName, dbName):
userPassword = checkDbKey("Ironworks-MySQL-", userName)
setSystemDb(host, userName, userPassword, dbName)
def resetPyEMONCMSDb(host, userName, dbName):
userPassword = checkDbKey("Ironworks-PyEMONCMS-", userName)
setPyEMONCMSDb(host, userName, userPassword, dbName)
| hephaestus9/Ironworks | ironworks/serverTools.py | Python | mit | 6,472 |
# MIT License
#
# Copyright (c) 2020-2021 CNRS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from typing import List, Optional, Text, Tuple, Union
import numpy as np
from torch_audiomentations.core.transforms_interface import BaseWaveformTransform
from pyannote.audio.core.task import Problem, Resolution, Specifications, Task
from pyannote.audio.tasks.segmentation.mixins import SegmentationTaskMixin
from pyannote.database import Protocol
class SpeakerTracking(SegmentationTaskMixin, Task):
"""Speaker tracking
Speaker tracking is the process of determining if and when a (previously
enrolled) person's voice can be heard in a given audio recording.
Here, it is addressed with the same approach as voice activity detection,
except {"non-speech", "speech"} classes are replaced by {"speaker1", ...,
"speaker_N"} where N is the number of speakers in the training set.
Parameters
----------
protocol : Protocol
pyannote.database protocol
duration : float, optional
Chunks duration. Defaults to 2s.
warm_up : float or (float, float), optional
Use that many seconds on the left- and rightmost parts of each chunk
to warm up the model. While the model does process those left- and right-most
parts, only the remaining central part of each chunk is used for computing the
loss during training, and for aggregating scores during inference.
Defaults to 0. (i.e. no warm-up).
balance: str, optional
When provided, training samples are sampled uniformly with respect to that key.
For instance, setting `balance` to "uri" will make sure that each file will be
equally represented in the training samples.
weight: str, optional
When provided, use this key to as frame-wise weight in loss function.
batch_size : int, optional
Number of training samples per batch. Defaults to 32.
num_workers : int, optional
Number of workers used for generating training samples.
Defaults to multiprocessing.cpu_count() // 2.
pin_memory : bool, optional
If True, data loaders will copy tensors into CUDA pinned
memory before returning them. See pytorch documentation
for more details. Defaults to False.
augmentation : BaseWaveformTransform, optional
torch_audiomentations waveform transform, used by dataloader
during training.
"""
ACRONYM = "spk"
def __init__(
self,
protocol: Protocol,
duration: float = 2.0,
warm_up: Union[float, Tuple[float, float]] = 0.0,
balance: Text = None,
weight: Text = None,
batch_size: int = 32,
num_workers: int = None,
pin_memory: bool = False,
augmentation: BaseWaveformTransform = None,
):
super().__init__(
protocol,
duration=duration,
warm_up=warm_up,
batch_size=batch_size,
num_workers=num_workers,
pin_memory=pin_memory,
augmentation=augmentation,
)
self.balance = balance
self.weight = weight
# for speaker tracking, task specification depends
# on the data: we do not know in advance which
# speakers should be tracked. therefore, we postpone
# the definition of specifications.
def setup(self, stage: Optional[str] = None):
super().setup(stage=stage)
self.specifications = Specifications(
# one class per speaker
classes=sorted(self._train_metadata["annotation"]),
# multiple speakers can be active at once
problem=Problem.MULTI_LABEL_CLASSIFICATION,
resolution=Resolution.FRAME,
duration=self.duration,
warm_up=self.warm_up,
)
@property
def chunk_labels(self) -> List[Text]:
"""Ordered list of labels
Used by `prepare_chunk` so that y[:, k] corresponds to activity of kth speaker
"""
return self.specifications.classes
def prepare_y(self, y: np.ndarray) -> np.ndarray:
"""Get speaker tracking targets"""
return y
# TODO: add option to give more weights to smaller classes
# TODO: add option to balance training samples between classes
| pyannote/pyannote-audio | pyannote/audio/tasks/segmentation/speaker_tracking.py | Python | mit | 5,325 |
from django import forms
from lists.models import Item
EMPTY_LIST_ERROR = "You can't have an empty list item"
class ItemForm(forms.models.ModelForm):
class Meta:
model = Item
fields = ('text',)
widgets = {
'text': forms.fields.TextInput(attrs={
'placeholder': 'Enter a to-do item',
'class': 'form-control input-lg'
}),
}
error_messages = {
'text': {'required': "You can't have an empty list item"}
} | rmelchorv/TDD-Cuervos | lists/forms.py | Python | mit | 435 |
from crpyto_tool.libs.finite_field_op import FiniteFieldNumber
if __name__ == '__main__':
magical_number = FiniteFieldNumber(FiniteFieldNumber.magical_number, False)
print 'p(x): ' + str(magical_number)
number2 = FiniteFieldNumber('0')
number3 = FiniteFieldNumber('1000110')
print 'Q5-(1):' + str(number2 - number3)
number0 = FiniteFieldNumber('1000110')
number1 = FiniteFieldNumber('10001011')
print 'Q5-(2):' + str(number0 + number1)
print 'Q5-(3):' + str(number0 * number1)
number4 = FiniteFieldNumber('10000111111010')
print number4 / magical_number
print FiniteFieldNumber('11110101') * FiniteFieldNumber('1000110')
| YcheLanguageStudio/PythonStudy | crpytography/tests/test_finite_field.py | Python | mit | 675 |
#! /usr/bin/env python
# -*- encoding: utf-8 -*-
# Michel Mooij, michel.mooij7@gmail.com
"""
Tool Description
================
This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
checking tool 'cppcheck'.
See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
itself.
Note that many linux distributions already provide a ready to install version
of cppcheck. On fedora, for instance, it can be installed using yum:
'sudo yum install cppcheck'
Usage
=====
In order to use this waftool simply add it to the 'options' and 'configure'
functions of your main waf script as shown in the example below:
def options(opt):
opt.load('cppcheck', tooldir='./waftools')
def configure(conf):
conf.load('cppcheck')
Note that example shown above assumes that the cppcheck waftool is located in
the sub directory named 'waftools'.
When configured as shown in the example above, cppcheck will automatically
perform a source code analysis on all C/C++ build tasks that have been
defined in your waf build system.
The example shown below for a C program will be used as input for cppcheck when
building the task.
def build(bld):
bld.program(name='foo', src='foobar.c')
The result of the source code analysis will be stored both as xml and html
files in the build location for the task. Should any error be detected by
cppcheck the build will be aborted and a link to the html report will be shown.
When needed source code checking by cppcheck can be disabled per task, per
detected error or warning for a particular task. It can be also be disabled for
all tasks.
In order to exclude a task from source code checking add the skip option to the
task as shown below:
def build(bld):
bld.program(
name='foo',
src='foobar.c'
cppcheck_skip=True
)
When needed problems detected by cppcheck may be suppressed using a file
containing a list of suppression rules. The relative or absolute path to this
file can be added to the build task as shown in the example below:
bld.program(
name='bar',
src='foobar.c',
cppcheck_suppress='bar.suppress'
)
A cppcheck suppress file should contain one suppress rule per line. Each of
these rules will be passed as an '--suppress=<rule>' argument to cppcheck.
Dependencies
================
This waftool depends on the python pygments module, it is used for source code
syntax highlighting when creating the html reports. see http://pygments.org/ for
more information on this package.
Remarks
================
The generation of the html report is originally based on the cppcheck-htmlreport.py
script that comes shipped with the cppcheck tool.
"""
import os
import sys
import xml.etree.ElementTree as ElementTree
from waflib import Task, TaskGen, Logs, Context
PYGMENTS_EXC_MSG= '''
The required module 'pygments' could not be found. Please install it using your
platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
see 'http://pygments.org/download/' for installation instructions.
'''
try:
import pygments
from pygments import formatters, lexers
except ImportError, e:
Logs.warn(PYGMENTS_EXC_MSG)
raise e
def options(opt):
opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
default=False, action='store_true',
help='do not check C/C++ sources (default=False)')
opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
default=False, action='store_true',
help='continue in case of errors (default=False)')
opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
default='warning,performance,portability,style,unusedFunction', action='store',
help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")
opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
default='warning,performance,portability,style', action='store',
help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")
opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
default='c99', action='store',
help='cppcheck standard to use when checking C (default=c99)')
opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
default='c++03', action='store',
help='cppcheck standard to use when checking C++ (default=c++03)')
opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
default=False, action='store_true',
help='forced check for missing buildin include files, e.g. stdio.h (default=False)')
opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
default='20', action='store',
help='maximum preprocessor (--max-configs) define iterations (default=20)')
def configure(conf):
if conf.options.cppcheck_skip:
conf.env.CPPCHECK_SKIP = [True]
conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
conf.find_program('cppcheck', var='CPPCHECK')
@TaskGen.feature('c')
@TaskGen.feature('cxx')
def cppcheck_execute(self):
if len(self.env.CPPCHECK_SKIP) or self.bld.options.cppcheck_skip:
return
if getattr(self, 'cppcheck_skip', False):
return
task = self.create_task('cppcheck')
task.cmd = _tgen_create_cmd(self)
task.fatal = []
if not self.bld.options.cppcheck_err_resume:
task.fatal.append('error')
def _tgen_create_cmd(self):
features = getattr(self, 'features', [])
std_c = self.env.CPPCHECK_STD_C
std_cxx = self.env.CPPCHECK_STD_CXX
max_configs = self.env.CPPCHECK_MAX_CONFIGS
bin_enable = self.env.CPPCHECK_BIN_ENABLE
lib_enable = self.env.CPPCHECK_LIB_ENABLE
cmd = '%s' % self.env.CPPCHECK
args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
args.append('--max-configs=%s' % max_configs)
if 'cxx' in features:
args.append('--language=c++')
args.append('--std=%s' % std_cxx)
else:
args.append('--language=c')
args.append('--std=%s' % std_c)
if self.bld.options.cppcheck_check_config:
args.append('--check-config')
if set(['cprogram','cxxprogram']) & set(features):
args.append('--enable=%s' % bin_enable)
else:
args.append('--enable=%s' % lib_enable)
for src in self.to_list(getattr(self, 'source', [])):
args.append('%r' % src)
for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
args.append('-I%r' % inc)
for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
args.append('-I%r' % inc)
return '%s %s' % (cmd, ' '.join(args))
class cppcheck(Task.Task):
quiet = True
def run(self):
stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
self._save_xml_report(stderr)
defects = self._get_defects(stderr)
index = self._create_html_report(defects)
self._errors_evaluate(defects, index)
return 0
def _save_xml_report(self, s):
'''use cppcheck xml result string, add the command string used to invoke cppcheck
and save as xml file.
'''
header = '%s\n' % s.split('\n')[0]
root = ElementTree.fromstring(s)
cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
cmd.text = str(self.cmd)
body = ElementTree.tostring(root)
node = self.generator.path.get_bld().find_or_declare('cppcheck.xml')
node.write(header + body)
def _get_defects(self, xml_string):
'''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
a list of defects.
'''
defects = []
for error in ElementTree.fromstring(xml_string).iter('error'):
defect = {}
defect['id'] = error.get('id')
defect['severity'] = error.get('severity')
defect['msg'] = str(error.get('msg')).replace('<','<')
defect['verbose'] = error.get('verbose')
for location in error.findall('location'):
defect['file'] = location.get('file')
defect['line'] = str(int(location.get('line')) - 1)
defects.append(defect)
return defects
def _create_html_report(self, defects):
files, css_style_defs = self._create_html_files(defects)
index = self._create_html_index(files)
self._create_css_file(css_style_defs)
return index
def _create_html_files(self, defects):
sources = {}
defects = [defect for defect in defects if defect.has_key('file')]
for defect in defects:
name = defect['file']
if not sources.has_key(name):
sources[name] = [defect]
else:
sources[name].append(defect)
files = {}
css_style_defs = None
bpath = self.generator.path.get_bld().abspath()
names = sources.keys()
for i in range(0,len(names)):
name = names[i]
htmlfile = 'cppcheck/%i.html' % (i)
errors = sources[name]
files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
css_style_defs = self._create_html_file(name, htmlfile, errors)
return files, css_style_defs
def _create_html_file(self, sourcefile, htmlfile, errors):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name
body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'content':
content = div
srcnode = self.generator.bld.root.find_node(sourcefile)
hl_lines = [e['line'] for e in errors if e.has_key('line')]
formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
formatter.errors = [e for e in errors if e.has_key('line')]
css_style_defs = formatter.get_style_defs('.highlight')
lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
s = pygments.highlight(srcnode.read(), lexer, formatter)
table = ElementTree.fromstring(s)
content.append(table)
s = ElementTree.tostring(root, method='html')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare(htmlfile)
node.write(s)
return css_style_defs
def _create_html_index(self, files):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name
body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'content':
content = div
self._create_html_table(content, files)
s = ElementTree.tostring(root, method='html')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare('cppcheck/index.html')
node.write(s)
return node
def _create_html_table(self, content, files):
table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
for name, val in files.items():
f = val['htmlfile']
s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
row = ElementTree.fromstring(s)
table.append(row)
errors = sorted(val['errors'], key=lambda e: int(e['line']) if e.has_key('line') else sys.maxint)
for e in errors:
if not e.has_key('line'):
s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
else:
attr = ''
if e['severity'] == 'error':
attr = 'class="error"'
s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
row = ElementTree.fromstring(s)
table.append(row)
content.append(table)
def _create_css_file(self, css_style_defs):
css = str(CPPCHECK_CSS_FILE)
if css_style_defs:
css = "%s\n%s\n" % (css, css_style_defs)
node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
node.write(css)
def _errors_evaluate(self, errors, http_index):
name = self.generator.get_name()
fatal = self.fatal
severity = [err['severity'] for err in errors]
problems = [err for err in errors if err['severity'] != 'information']
if set(fatal) & set(severity):
exc = "\n"
exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
exc += "\n file://%r" % (http_index)
exc += "\n"
self.generator.bld.fatal(exc)
elif len(problems):
msg = "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
msg += "\n file://%r" % http_index
msg += "\n"
Logs.error(msg)
class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
errors = []
def wrap(self, source, outfile):
line_no = 1
for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
# If this is a source code line we want to add a span tag at the end.
if i == 1:
for error in self.errors:
if int(error['line']) == line_no:
t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
line_no = line_no + 1
yield i, t
CCPCHECK_HTML_TYPE = \
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'
CPPCHECK_HTML_FILE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp " ">]>
<html>
<head>
<title>cppcheck - report - XXX</title>
<link href="style.css" rel="stylesheet" type="text/css" />
<style type="text/css">
</style>
</head>
<body class="body">
<div id="page-header"> </div>
<div id="page">
<div id="header">
<h1>cppcheck report - XXX</h1>
</div>
<div id="menu">
<a href="index.html">Defect list</a>
</div>
<div id="content">
</div>
<div id="footer">
<div>cppcheck - a tool for static C/C++ code analysis</div>
<div>
Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
IRC: #cppcheck at irc.freenode.net
</div>
</div>
</div>
<div id="page-footer"> </div>
</body>
</html>
"""
CPPCHECK_HTML_TABLE = """
<table>
<tr>
<th>Line</th>
<th>Id</th>
<th>Severity</th>
<th>Message</th>
</tr>
</table>
"""
CPPCHECK_HTML_ERROR = \
'<span style="background: #ffaaaa;padding: 3px;"><--- %s</span>\n'
CPPCHECK_CSS_FILE = """
body.body {
font-family: Arial;
font-size: 13px;
background-color: black;
padding: 0px;
margin: 0px;
}
.error {
font-family: Arial;
font-size: 13px;
background-color: #ffb7b7;
padding: 0px;
margin: 0px;
}
th, td {
min-width: 100px;
text-align: left;
}
#page-header {
clear: both;
width: 1200px;
margin: 20px auto 0px auto;
height: 10px;
border-bottom-width: 2px;
border-bottom-style: solid;
border-bottom-color: #aaaaaa;
}
#page {
width: 1160px;
margin: auto;
border-left-width: 2px;
border-left-style: solid;
border-left-color: #aaaaaa;
border-right-width: 2px;
border-right-style: solid;
border-right-color: #aaaaaa;
background-color: White;
padding: 20px;
}
#page-footer {
clear: both;
width: 1200px;
margin: auto;
height: 10px;
border-top-width: 2px;
border-top-style: solid;
border-top-color: #aaaaaa;
}
#header {
width: 100%;
height: 70px;
background-image: url(logo.png);
background-repeat: no-repeat;
background-position: left top;
border-bottom-style: solid;
border-bottom-width: thin;
border-bottom-color: #aaaaaa;
}
#menu {
margin-top: 5px;
text-align: left;
float: left;
width: 100px;
height: 300px;
}
#menu > a {
margin-left: 10px;
display: block;
}
#content {
float: left;
width: 1020px;
margin: 5px;
padding: 0px 10px 10px 10px;
border-left-style: solid;
border-left-width: thin;
border-left-color: #aaaaaa;
}
#footer {
padding-bottom: 5px;
padding-top: 5px;
border-top-style: solid;
border-top-width: thin;
border-top-color: #aaaaaa;
clear: both;
font-size: 10px;
}
#footer > div {
float: left;
width: 33%;
}
"""
| michelm/beehive | waftools/cppcheck.py | Python | mit | 16,204 |
import win32api
import os
import sys
import subprocess
import logging
from itertools import izip_longest
#itertools recipe
def grouper(n, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
def harddrive_enumerator():
"""
Generator to get all (fixed) drive letters in the computers
Returns tuples of (DriveName, VolumeName) - eg. ("D:", "Samsung Station")
"""
logger = logging.getLogger("keepitup")
drivesDetailedList = []
if sys.platform == "win32":
logger.debug("Enumerating win32 hard drives")
getDrivesProc = subprocess.Popen('wmic logicaldisk where drivetype=3 get name, VolumeName /format:list',
shell=True,
stdout=subprocess.PIPE)
output, err = getDrivesProc.communicate()
logger.debug("Enumerated hard drives output: %s", output)
drivesDetailedList = output.split(os.linesep)
elif sys.platform in ["linux2", "darwin"]:
logger.debug("Enumerating linux/osx hard drives")
raise NotImplementedError()
else:
logger.error("Cannot enumeratre hard drives - unrecognized OS: %s", sys.platform)
raise NotImplementedError()
for name, volumeName in grouper(2, drivesDetailedList):
if "Name=" in name and "VolumeName" in volumeName:
name = name[len("Name="):].strip()
volumeName = volumeName[len("VolumeName="):].strip()
yield name, volumeName
| yoavfrancis/KeepItUp | KeepItUp/harddrive_enumerator.py | Python | mit | 1,589 |
# coding=utf-8
import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), "..")))
from db.MysqlUtil import initMysql, execute, select, batchInsert, disconnect
from common.JsonHelper import loadJsonConfig
from api.tushareApi import getSimpleHistoryData
from datetime import datetime, timedelta
from common.LoggerHelper import writeErrorLog, writeWarningLog, writeInfoLog, writeDebugLog, writeLog, writeExceptionLog
from wechat.weChatSender import sendMessageToMySelf
from common.HttpHelper import httpGet
from common.FileHelper import saveFile
import time
import json
# 从同花顺抓取历史行情数据(前复权)
def updateStockHistoryInfoByTHS(stockList):
for stock in stockList:
code = stock[0]
i = 2010
thisYear = datetime.now().year
while (i <= thisYear):
# time.sleep(1)
infos = getStockInfos(code, i)
if infos is None:
continue
for date in infos:
open = infos.get(date).get('open')
close = infos.get(date).get('close')
high = infos.get(date).get('high')
low = infos.get(date).get('low')
volume = infos.get(date).get('volume')
amount = infos.get(date).get('amount')
checkExistSql = unicode("select count(*) from s_stock where code='{0}' and date='{1}'").format(code,
date)
count = select(checkExistSql, False)[0]
if count > 0:
updateSql = unicode(
"update s_stock set volume={2},highPrice={3},lowPrice={4},openPrice={5},closePrice={6},amount='{7}' where code='{0}' and date='{1}'").format(
code, date, volume, high, low, open, close, amount)
execute(updateSql)
print code, date, updateSql
else:
insertSql = unicode(
"insert into s_stock(code,date,timestamp,volume,highPrice,lowPrice,openPrice,closePrice,amount) VALUES ('{0}','{1}',{2},{3},{4},{5},{6},{7},'{8}')").format(
code, date, int(time.mktime(time.strptime(date, '%Y-%m-%d'))), volume, high, low, open, close,
amount)
execute(insertSql)
print code, date, insertSql
i = i + 1
# 解析同花顺年行情数据(前复权)
def getStockInfos(code, year):
try:
url = "http://d.10jqka.com.cn/v2/line/hs_{0}/01/{1}.js".format(code, year)
res = httpGet(url).decode("utf-8")
index = res.find("(")
if (index < 0):
writeErrorLog(unicode("解析行情失败: code:{0}, year:{1}, res:{2}").format(code, year, res))
return []
res = res[index + 1:-1]
writeLog(unicode("获取股票历史行情: code: {0}, year:{1}").format(code, year))
jo = json.loads(res)
dataInfo = jo['data'].split(';')
result = {}
for item in dataInfo:
infos = item.split(',')
dic = {}
dic['open'] = infos[1]
dic['high'] = infos[2]
dic['low'] = infos[3]
dic['close'] = infos[4]
dic['volume'] = infos[5]
dic['amount'] = "{0}亿".format(round(float(infos[6]) / 100000000, 1))
result[datetime.strptime(infos[0], '%Y%m%d').strftime('%Y-%m-%d')] = dic
return result
except Exception, e:
writeErrorLog(unicode("解析行情失败: code:{0}, year:{1}, e:{2}").format(code, year, str(e)))
if "404" in str(e):
return []
else:
return None
def getStockHistoryInfoFromDb():
sql = unicode("SELECT code,count(*) from s_stock GROUP by code HAVING count(*)<20")
data = select(sql)
updateStockHistoryInfoByTHS(data)
def getStockHistoryInfoFromConfig():
stockList = loadJsonConfig(os.path.abspath(os.path.join(os.getcwd(), "../config/newStockList.json")))
updateStockHistoryInfoByTHS(stockList)
def updateAllStockHistoryInfo():
sql = unicode("select code,name from s_stock_info order by code asc")
data = select(sql)
updateStockHistoryInfoByTHS(data)
def updateStockOtherInfo():
sql = unicode("select code,name from s_stock_info order by code asc")
stockList = select(sql)
for stock in stockList:
code = stock[0]
if int(code) < 601126:
continue
selectInfoSql = unicode("select date,closePrice from s_stock where code='{0}' order by date asc").format(code)
data = select(selectInfoSql)
writeLog(unicode("更新股票其他指标数据: code: {0}").format(code))
updataStockBias(code, data, 6)
updataStockBias(code, data, 12)
updataStockBias(code, data, 24)
updateStockMA(code, data, 5)
updateStockMA(code, data, 10)
updateStockMA(code, data, 20)
updateStockMA(code, data, 30)
updateStockMA(code, data, 60)
updateStockMA(code, data, 120)
updateStockMA(code, data, 250)
updateStockChangePercent(code, data)
def updateStockChangePercent(code, data):
for i in range(1, len(data)):
try:
changeAmount = data[i][1] - data[i - 1][1]
changePercent = round(changeAmount * 100 / data[i - 1][1], 2)
updateSql = unicode(
"update s_stock set changePercent={0},changeAmount={1} where code='{2}' and date='{3}'").format(
changePercent, changeAmount, code, data[i][0])
execute(updateSql)
except Exception, e:
writeErrorLog(
unicode("更新涨幅数据失败: code:{0}, i:{1}, date:{2}, closePrice:{3}").format(code, i, data[i][0], data[i][1]))
def updateStockMA(code, data, n):
for i in range(n - 1, len(data)):
j = i
sum = 0
while (i - j < n):
sum = sum + data[j][1]
j = j - 1
avg = round(sum / n, 2)
sql = unicode("update s_stock set MA{0}={1} where code='{2}' and date='{3}'").format(n, avg, code, data[i][0])
execute(sql)
def updataStockBias(code, data, n):
for i in range(n - 1, len(data)):
j = i
sum = 0
while (i - j < n):
sum = sum + data[j][1]
j = j - 1
avg = round(sum / n, 2)
todayClosePrice = float(data[i][1])
bias = 0 if avg == 0 else round((todayClosePrice - avg) * 100 / avg, 2)
number = 1 if n == 6 else (2 if n == 12 else 3)
sql = unicode("update s_stock set BIAS{0}={1} where code='{2}' and date='{3}'").format(number, bias, code,
data[i][0])
execute(sql)
def main(argv):
try:
reload(sys)
sys.setdefaultencoding('utf-8')
# sendMessageToMySelf(unicode("开始查询股票历史行情数据"))
begin = datetime.now()
initMysql()
# getStockHistoryInfoFromDb()
# getStockHistoryInfoFromConfig()
updateStockOtherInfo()
disconnect()
end = datetime.now()
message = unicode("查询股票历史行情数据的任务执行完毕,当前时间:{0},执行用时:{1}").format(datetime.now(), end - begin)
writeLog(message)
sendMessageToMySelf(message)
except:
writeExceptionLog('RealTimeRemindTask Error.')
if __name__ == '__main__':
main(sys.argv)
| zwffff2015/stock | task/GetStockHistoryInfoTask.py | Python | mit | 7,619 |
# -*- coding: utf-8 -*-
"""
flask.ext.hippocket.tasks
~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2013 by Sean Vieira.
:license: MIT, see LICENSE for more details.
"""
from flask import Blueprint, Markup, request, render_template
from itertools import chain
from os import path
from pkgutil import walk_packages
from werkzeug.utils import import_string
from werkzeug.exceptions import default_exceptions, HTTPException
def autoload(app, apps_package="apps", module_name="routes", blueprint_name="routes", on_error=None):
"""Automatically load Blueprints from the specified package and registers them with Flask."""
if not apps_package:
raise ValueError("No apps package provided - unable to begin autoload")
if isinstance(apps_package, basestring):
package_code = import_string(apps_package)
else:
#: `apps_package` can be the already imported parent package
#: (i.e. the following is a licit pattern)::
#:
#: import app_package
#: # do something else with app_package
#: autoload(app, app_package)
package_code = apps_package
apps_package = apps_package.__name__
package_paths = package_code.__path__
package_paths = [path.join(app.root_path, p) for p in package_paths]
root = apps_package
apps_package = apps_package + u"." if not apps_package.endswith(".") else apps_package
if on_error is None:
on_error = lambda name: app.logger.warn("Unable to import {name}.".format(name=name))
_to_import = "{base}.{module}.{symbol}"
import_template = lambda base: _to_import.format(base=base,
module=module_name,
symbol=blueprint_name)
#: Autoloaded apps must be Python packages
#: The root of the package is also inspected for a routing file
package_contents = chain([[None, root, True]],
walk_packages(path=package_paths, prefix=apps_package, onerror=on_error))
for _, sub_app_name, is_pkg in package_contents:
if not is_pkg:
continue
sub_app_import_path = import_template(base=sub_app_name)
sub_app = import_string(sub_app_import_path)
if isinstance(sub_app, Blueprint):
app.register_blueprint(sub_app)
else:
app.logger.warn(("Failed to register {name} - "
"it does not match the registration pattern.").format(name=sub_app_name))
def setup_errors(app, error_template="errors.html"):
"""Add a handler for each of the available HTTP error responses."""
def error_handler(error):
if isinstance(error, HTTPException):
description = error.get_description(request.environ)
code = error.code
name = error.name
else:
description = error
code = 500
name = "Internal Server Error"
return render_template(error_template,
code=code,
name=Markup(name),
description=Markup(description))
for exception in default_exceptions:
app.register_error_handler(exception, error_handler)
| svieira/Flask-HipPocket | flask_hippocket/tasks.py | Python | mit | 3,301 |
a = "nabb jasj jjs, jjsajdhh kjkda jj"
a1 = a.split(",")
for i in range(0,len(a1)):
print (len(a1[i].split())) | shakcho/Indic-language-ngram-viewer | demo.py | Python | mit | 120 |
"""The tests for the analytics ."""
from unittest.mock import patch
from homeassistant.components.analytics.const import ANALYTICS_ENDPOINT_URL, DOMAIN
from homeassistant.setup import async_setup_component
MOCK_VERSION = "1970.1.0"
async def test_setup(hass):
"""Test setup of the integration."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
assert DOMAIN in hass.data
async def test_websocket(hass, hass_ws_client, aioclient_mock):
"""Test WebSocket commands."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
ws_client = await hass_ws_client(hass)
await ws_client.send_json({"id": 1, "type": "analytics"})
response = await ws_client.receive_json()
assert response["success"]
with patch("homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION):
await ws_client.send_json(
{"id": 2, "type": "analytics/preferences", "preferences": {"base": True}}
)
response = await ws_client.receive_json()
assert len(aioclient_mock.mock_calls) == 1
assert response["result"]["preferences"]["base"]
await ws_client.send_json({"id": 3, "type": "analytics"})
response = await ws_client.receive_json()
assert response["result"]["preferences"]["base"]
| rohitranjan1991/home-assistant | tests/components/analytics/test_init.py | Python | mit | 1,428 |
# django-drf imports
from rest_framework import serializers
# app level imports
from .models import Player, Team
class PlayerSerializer(serializers.ModelSerializer):
class Meta:
model = Player
fields = (
'id', 'name', 'rating', 'teams',
'install_ts', 'update_ts'
)
class TeamSerializer(serializers.ModelSerializer):
class Meta:
model = Team
fields = (
'id', 'name', 'rating', 'players',
'install_ts', 'update_ts'
)
| manjitkumar/drf-url-filters | example_app/serializers.py | Python | mit | 525 |
"""Clowder command line diff controller
.. codeauthor:: Joe DeCapo <joe@polka.cat>
"""
import argparse
import clowder.util.formatting as fmt
from clowder.clowder_controller import CLOWDER_CONTROLLER, print_clowder_name, valid_clowder_yaml_required
from clowder.config import Config
from clowder.git.clowder_repo import print_clowder_repo_status
from clowder.util.console import CONSOLE
from .util import add_parser_arguments
def add_diff_parser(subparsers: argparse._SubParsersAction) -> None: # noqa
"""Add clowder diff parser
:param argparse._SubParsersAction subparsers: Subparsers action to add parser to
"""
parser = subparsers.add_parser('diff', help='Show git diff for projects')
parser.formatter_class = argparse.RawTextHelpFormatter
parser.set_defaults(func=diff)
add_parser_arguments(parser, [
(['projects'], dict(metavar='<project|group>', default='default', nargs='*',
choices=CLOWDER_CONTROLLER.project_choices_with_default,
help=fmt.project_options_help_message('projects and groups to show diff for'))),
])
@valid_clowder_yaml_required
@print_clowder_name
@print_clowder_repo_status
def diff(args) -> None:
"""Clowder diff command private implementation"""
projects = Config().process_projects_arg(args.projects)
projects = CLOWDER_CONTROLLER.filter_projects(CLOWDER_CONTROLLER.projects, projects)
for project in projects:
CONSOLE.stdout(project.status())
project.diff()
| JrGoodle/clowder | clowder/cli/diff.py | Python | mit | 1,529 |
import time
from torba.server import util
def sessions_lines(data):
"""A generator returning lines for a list of sessions.
data is the return value of rpc_sessions()."""
fmt = ('{:<6} {:<5} {:>17} {:>5} {:>5} {:>5} '
'{:>7} {:>7} {:>7} {:>7} {:>7} {:>9} {:>21}')
yield fmt.format('ID', 'Flags', 'Client', 'Proto',
'Reqs', 'Txs', 'Subs',
'Recv', 'Recv KB', 'Sent', 'Sent KB', 'Time', 'Peer')
for (id_, flags, peer, client, proto, reqs, txs_sent, subs,
recv_count, recv_size, send_count, send_size, time) in data:
yield fmt.format(id_, flags, client, proto,
'{:,d}'.format(reqs),
'{:,d}'.format(txs_sent),
'{:,d}'.format(subs),
'{:,d}'.format(recv_count),
'{:,d}'.format(recv_size // 1024),
'{:,d}'.format(send_count),
'{:,d}'.format(send_size // 1024),
util.formatted_time(time, sep=''), peer)
def groups_lines(data):
"""A generator returning lines for a list of groups.
data is the return value of rpc_groups()."""
fmt = ('{:<6} {:>9} {:>9} {:>6} {:>6} {:>8}'
'{:>7} {:>9} {:>7} {:>9}')
yield fmt.format('ID', 'Sessions', 'Bwidth KB', 'Reqs', 'Txs', 'Subs',
'Recv', 'Recv KB', 'Sent', 'Sent KB')
for (id_, session_count, bandwidth, reqs, txs_sent, subs,
recv_count, recv_size, send_count, send_size) in data:
yield fmt.format(id_,
'{:,d}'.format(session_count),
'{:,d}'.format(bandwidth // 1024),
'{:,d}'.format(reqs),
'{:,d}'.format(txs_sent),
'{:,d}'.format(subs),
'{:,d}'.format(recv_count),
'{:,d}'.format(recv_size // 1024),
'{:,d}'.format(send_count),
'{:,d}'.format(send_size // 1024))
def peers_lines(data):
"""A generator returning lines for a list of peers.
data is the return value of rpc_peers()."""
def time_fmt(t):
if not t:
return 'Never'
return util.formatted_time(now - t)
now = time.time()
fmt = ('{:<30} {:<6} {:>5} {:>5} {:<17} {:>4} '
'{:>4} {:>8} {:>11} {:>11} {:>5} {:>20} {:<15}')
yield fmt.format('Host', 'Status', 'TCP', 'SSL', 'Server', 'Min',
'Max', 'Pruning', 'Last Good', 'Last Try',
'Tries', 'Source', 'IP Address')
for item in data:
features = item['features']
hostname = item['host']
host = features['hosts'][hostname]
yield fmt.format(hostname[:30],
item['status'],
host.get('tcp_port') or '',
host.get('ssl_port') or '',
features['server_version'] or 'unknown',
features['protocol_min'],
features['protocol_max'],
features['pruning'] or '',
time_fmt(item['last_good']),
time_fmt(item['last_try']),
item['try_count'],
item['source'][:20],
item['ip_addr'] or '')
| lbryio/lbry | torba/torba/server/text.py | Python | mit | 3,433 |
from kompromatron.core import app
from kompromatron.views.base import base
# app.register_blueprint(entities)
# app.register_blueprint(relations)
#app.register_blueprint(base)
| pudo/kompromatron | kompromatron/web.py | Python | mit | 177 |
import numpy as np
def weighted_pick(weights):
t = np.cumsum(weights)
s = np.sum(weights)
return(int(np.searchsorted(t, np.random.rand(1)*s)))
def list_to_string(ascii_list):
res = u""
for a in ascii_list:
if a >= 0 and a < 256:
res += unichr(a)
return res
| litoeknee/byteNet-tensorflow | utils.py | Python | mit | 271 |
from __future__ import print_function
f = open('inputs/input_01.txt')
contents = f.read()
print("Floor:", contents.count('(') - contents.count(')'))
# Part Two
change = {'(': 1, ')': -1}
floor = 0
position = 1
for c in contents:
if c in change:
floor += change[c]
if floor == -1:
print("Basement entered at position:", position)
break
position += 1
| jjhelmus/adventofcode | day01.py | Python | mit | 390 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-18 22:59
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('about', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='entry',
name='notes',
),
]
| Arlefreak/ApiArlefreak | about/migrations/0002_remove_entry_notes.py | Python | mit | 377 |
"""
Django settings for CatyHIS project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'jh40i$ueqp$s7+@e71)s-&c*ek8vgt9atzdz7un6=r9(9^*5+-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'FormGen',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'CatyHIS.urls'
WSGI_APPLICATION = 'CatyHIS.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
RES_DIR = os.path.join(BASE_DIR, 'res')
| LyndonChin/CatyHIS | CatyHIS/settings.py | Python | mit | 2,157 |
'''
cloudelements: tests module.
Meant for use with py.test.
Organize tests into files, each named xxx_test.py
Read more here: http://pytest.org/
Copyright 2015, LeadGenius
Licensed under MIT
''' | rsimba/cloudelements | tests/__init__.py | Python | mit | 197 |
#!/usr/bin/env python2
##
# autosign
# https://github.com/leosartaj/autosign.git
#
# copyright (c) 2014 sartaj singh
# licensed under the mit license.
##
import unittest
import os, shutil
import helper
from autosign.main import removeSign, isSign
from autosign.exce import UnsignedError
class TestremoveSign(unittest.TestCase):
"""
tests the removeSign function in main module
"""
def setUp(self):
self.dire = os.path.dirname(__file__)
self.signedfile = os.path.join(self.dire, 'testData/toBeSigned.py')
self.signed = os.path.join(self.dire, 'testData/test_signedfile.py')
shutil.copyfile(self.signedfile, self.signed)
self.unsigned = os.path.join(self.dire, 'testData/test_unsignedfile.py')
helper.newFile(self.unsigned)
helper.readrc(self)
def test_remove_from_unsigned_file(self):
self.assertRaises(UnsignedError, removeSign, self.unsigned, self.options_py)
def test_remove_from_signed_file(self):
self.assertTrue(isSign(self.signed, self.options_py))
removeSign(self.signed, self.options_py)
self.assertFalse(isSign(self.signed, self.options_py))
def tearDown(self):
os.remove(self.unsigned)
| leosartaj/autosign | tests/test_removeSign.py | Python | mit | 1,228 |
from time import time
from os import remove
from matplotlib.image import imread
import json
import subprocess
import numpy as np
import matplotlib.pyplot as plt
def time_a_function(program, args):
start = time()
subprocess.call([program] + [args])
end = time()
return float(end - start)
def clean(programs):
for p in programs:
remove(p)
def plot_results(times, programs, images):
x = [imread(img)[:,:,0].shape for img in images]
xlabels = [str(xi) for xi in x]
x = [np.prod(xi) for xi in x]
for p in programs:
y, std_y = zip(*times[p])
# plt.plot(x, y, 'o')
plt.errorbar(x, y, yerr=std_y, fmt='o')
plt.xticks(x, xlabels)
plt.xlabel('Image size')
plt.ylabel('Time (s)')
plt.show()
def print_results(times, programs, images):
sizes = [imread(img)[:,:,0].size for img in images]
for p in programs:
print '\n{}'.format(p)
mean_t, std_t = zip(*times[p])
print 'Image'.rjust(13), 'Size'.rjust(8), 'Avg. time'.rjust(10), 'Std. time'.rjust(10)
for img, size, m, s in zip(images, sizes, mean_t, std_t):
print '{:13} {:8d} {:10.5f} {:10.5f}'.format(img, size, m, s)
def main():
print 'Running make...'
subprocess.call(['make', '-j8'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
programs = ['./grayscale.out', './grayscale-seq.out']
images = ['img/emma{}.png'.format(i) for i in range(1, 6)]
n = 20
times = {}
try:
print 'Loading times.json...'
time_file = open('times.json', 'r')
times = json.load(time_file)
except IOError:
print 'Failed, calculating times'
for p in programs:
times[p] = []
for img in images:
t = []
print 'Running {} with {} {} times...'.format(p, img, n),
for _ in range(n):
t.append(time_a_function(p, img))
mean_t = np.mean(t)
std_t = np.std(t)
print '({} +- {})s on average'.format(mean_t, std_t)
times[p].append((mean_t, std_t))
time_file = open('times.json', 'w')
print 'Writing times.json...'
json.dump(times, time_file)
time_file.close()
print_results(times, programs, images)
plot_results(times, programs, images)
clean(programs)
if __name__ == '__main__':
main()
| sebasvega95/HPC-assignments | CUDA/grayscale/timing.py | Python | mit | 2,425 |
import unittest
import unittest.mock
import functools
from g1.asyncs import kernels
from g1.operations.databases.bases import interfaces
from g1.operations.databases.servers import connections
# I am not sure why pylint cannot lint contextlib.asynccontextmanager
# correctly; let us disable this check for now.
#
# pylint: disable=not-async-context-manager
def synchronous(test_method):
@kernels.with_kernel
@functools.wraps(test_method)
def wrapper(self):
kernels.run(test_method(self))
return wrapper
class ConnectionsTest(unittest.TestCase):
def setUp(self):
super().setUp()
self.conn = unittest.mock.Mock()
self.tx = self.conn.begin.return_value
self.manager = connections.ConnectionManager(self.conn)
unittest.mock.patch.multiple(
connections,
_WAIT_FOR_READER=0.01,
_WAIT_FOR_WRITER=0.01,
).start()
def tearDown(self):
unittest.mock.patch.stopall()
super().tearDown()
def assert_manager(
self,
num_readers,
tx_id,
rollback_tx_ids,
commit_tx_ids,
timeout_tx_ids,
):
self.assertEqual(self.manager._num_readers, num_readers)
self.assertEqual(self.manager._tx_id, tx_id)
self.assertEqual(tuple(self.manager._rollback_tx_ids), rollback_tx_ids)
self.assertEqual(tuple(self.manager._commit_tx_ids), commit_tx_ids)
self.assertEqual(tuple(self.manager._timeout_tx_ids), timeout_tx_ids)
self.assertEqual(self.manager.tx_id, tx_id)
@synchronous
async def test_reading(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.reading() as conn_1:
self.assert_manager(1, 0, (), (), ())
self.assertIs(conn_1, self.conn)
async with self.manager.reading() as conn_2:
self.assert_manager(2, 0, (), (), ())
self.assertIs(conn_2, self.conn)
async with self.manager.reading() as conn_3:
self.assert_manager(3, 0, (), (), ())
self.assertIs(conn_3, self.conn)
self.assert_manager(2, 0, (), (), ())
self.assert_manager(1, 0, (), (), ())
self.assert_manager(0, 0, (), (), ())
self.conn.begin.assert_not_called()
@synchronous
async def test_reading_timeout(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting():
tx_id = self.manager.tx_id
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.reading():
pass
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_writing(self):
with self.assertRaises(interfaces.InvalidRequestError):
async with self.manager.writing(0):
pass
with self.assertRaises(interfaces.TransactionNotFoundError):
async with self.manager.writing(1):
pass
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting():
tx_id = self.manager.tx_id
self.assert_manager(0, tx_id, (), (), ())
async with self.manager.writing(tx_id) as conn:
self.assert_manager(0, tx_id, (), (), ())
self.assertIs(conn, self.conn)
with self.assertRaises(interfaces.TransactionNotFoundError):
async with self.manager.writing(tx_id + 1):
pass
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_transacting(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting() as conn:
tx_id = self.manager.tx_id
self.assertNotEqual(tx_id, 0)
self.assert_manager(0, tx_id, (), (), ())
self.assertIs(conn, self.conn)
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_transacting_rollback(self):
self.assert_manager(0, 0, (), (), ())
with self.assertRaises(ValueError):
async with self.manager.transacting():
tx_id = self.manager.tx_id
raise ValueError
self.assert_manager(0, 0, (tx_id, ), (), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_transacting_timeout_on_reader(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.reading():
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.transacting():
pass
self.assert_manager(0, 0, (), (), ())
self.conn.begin.assert_not_called()
@synchronous
async def test_transacting_timeout_on_writer(self):
self.assert_manager(0, 0, (), (), ())
async with self.manager.transacting():
tx_id = self.manager.tx_id
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.transacting():
pass
self.assert_manager(0, 0, (), (tx_id, ), ())
self.conn.begin.assert_called_once()
@synchronous
async def test_begin(self):
with self.assertRaises(interfaces.InvalidRequestError):
await self.manager.begin(0)
self.assert_manager(0, 0, (), (), ())
conn = await self.manager.begin(1)
for _ in range(3): # begin is idempotent.
self.assertIs(await self.manager.begin(1), conn)
self.assertIs(conn, self.conn)
self.assert_manager(0, 1, (), (), ())
with self.assertRaises(interfaces.TransactionTimeoutError):
await self.manager.begin(2)
self.conn.begin.assert_called_once()
@synchronous
async def test_end(self):
with self.assertRaises(interfaces.InvalidRequestError):
await self.manager.rollback(0)
with self.assertRaises(interfaces.InvalidRequestError):
await self.manager.commit(0)
with self.assertRaises(interfaces.TransactionNotFoundError):
await self.manager.rollback(1)
with self.assertRaisesRegex(AssertionError, r'expect x != 0'):
await self.manager.rollback_due_to_timeout()
with self.assertRaises(interfaces.TransactionNotFoundError):
await self.manager.commit(1)
self.assert_manager(0, 0, (), (), ())
await self.manager.begin(1)
self.assert_manager(0, 1, (), (), ())
with self.assertRaises(interfaces.TransactionNotFoundError):
self.manager.rollback(999)
with self.assertRaises(interfaces.TransactionNotFoundError):
self.manager.commit(999)
self.tx.rollback.assert_not_called()
for _ in range(3): # rollback is idempotent.
self.manager.rollback(1)
self.tx.rollback.assert_called_once()
self.assert_manager(0, 0, (1, ), (), ())
await self.manager.begin(2)
self.tx.commit.assert_not_called()
for _ in range(3): # commit is idempotent.
self.manager.commit(2)
self.tx.commit.assert_called_once()
self.assert_manager(0, 0, (1, ), (2, ), ())
self.tx.rollback.reset_mock()
await self.manager.begin(3)
self.manager.rollback_due_to_timeout()
self.tx.rollback.assert_called_once()
self.assert_manager(0, 0, (1, ), (2, ), (3, ))
await self.manager.begin(1)
with self.assertRaises(interfaces.TransactionTimeoutError):
async with self.manager.writing(3):
pass
with self.assertRaises(interfaces.TransactionNotFoundError):
async with self.manager.writing(4):
pass
if __name__ == '__main__':
unittest.main()
| clchiou/garage | py/g1/operations/databases/servers/tests/test_connections.py | Python | mit | 8,015 |
'''
Animation
=========
:class:`Animation` and :class:`AnimationTransition` are used to animate
:class:`~kivy.uix.widget.Widget` properties. You must specify at least a
property name and target value. To use an Animation, follow these steps:
* Setup an Animation object
* Use the Animation object on a Widget
Simple animation
----------------
To animate a Widget's x or y position, simply specify the target x/y values
where you want the widget positioned at the end of the animation::
anim = Animation(x=100, y=100)
anim.start(widget)
The animation will last for 1 second unless :attr:`duration` is specified.
When anim.start() is called, the Widget will move smoothly from the current
x/y position to (100, 100).
Multiple properties and transitions
-----------------------------------
You can animate multiple properties and use built-in or custom transition
functions using :attr:`transition` (or the `t=` shortcut). For example,
to animate the position and size using the 'in_quad' transition::
anim = Animation(x=50, size=(80, 80), t='in_quad')
anim.start(widget)
Note that the `t=` parameter can be the string name of a method in the
:class:`AnimationTransition` class or your own animation function.
Sequential animation
--------------------
To join animations sequentially, use the '+' operator. The following example
will animate to x=50 over 1 second, then animate the size to (80, 80) over the
next two seconds::
anim = Animation(x=50) + Animation(size=(80, 80), duration=2.)
anim.start(widget)
Parallel animation
------------------
To join animations in parallel, use the '&' operator. The following example
will animate the position to (80, 10) over 1 second, whilst in parallel
animating the size to (800, 800)::
anim = Animation(pos=(80, 10))
anim &= Animation(size=(800, 800), duration=2.)
anim.start(widget)
Keep in mind that creating overlapping animations on the same property may have
unexpected results. If you want to apply multiple animations to the same
property, you should either schedule them sequentially (via the '+' operator or
using the *on_complete* callback) or cancel previous animations using the
:attr:`~Animation.cancel_all` method.
Repeating animation
-------------------
.. versionadded:: 1.8.0
.. note::
This is currently only implemented for 'Sequence' animations.
To set an animation to repeat, simply set the :attr:`Sequence.repeat`
property to `True`::
anim = Animation(...) + Animation(...)
anim.repeat = True
anim.start(widget)
For flow control of animations such as stopping and cancelling, use the methods
already in place in the animation module.
'''
__all__ = ('Animation', 'AnimationTransition')
from math import sqrt, cos, sin, pi
from kivy.event import EventDispatcher
from kivy.clock import Clock
from kivy.compat import string_types, iterkeys
from kivy.weakproxy import WeakProxy
class Animation(EventDispatcher):
'''Create an animation definition that can be used to animate a Widget.
:Parameters:
`duration` or `d`: float, defaults to 1.
Duration of the animation, in seconds.
`transition` or `t`: str or func
Transition function for animate properties. It can be the name of a
method from :class:`AnimationTransition`.
`step` or `s`: float
Step in milliseconds of the animation. Defaults to 0, which means
the animation is updated for every frame.
To update the animation less often, set the step value to a float.
For example, if you want to animate at 30 FPS, use s=1/30.
:Events:
`on_start`: animation, widget
Fired when the animation is started on a widget.
`on_complete`: animation, widget
Fired when the animation is completed or stopped on a widget.
`on_progress`: animation, widget, progression
Fired when the progression of the animation is changing.
.. versionchanged:: 1.4.0
Added s/step parameter.
.. versionchanged:: 1.9.2
The default value of the step parameter was changed from 1/60. to 0.
'''
_update_ev = None
_instances = set()
__events__ = ('on_start', 'on_progress', 'on_complete')
def __init__(self, **kw):
super(Animation, self).__init__()
# Initialize
self._clock_installed = False
self._duration = kw.pop('d', kw.pop('duration', 1.))
self._transition = kw.pop('t', kw.pop('transition', 'linear'))
self._step = kw.pop('s', kw.pop('step', 0))
if isinstance(self._transition, string_types):
self._transition = getattr(AnimationTransition, self._transition)
self._animated_properties = kw
self._widgets = {}
@property
def duration(self):
'''Return the duration of the animation.
'''
return self._duration
@property
def transition(self):
'''Return the transition of the animation.
'''
return self._transition
@property
def animated_properties(self):
'''Return the properties used to animate.
'''
return self._animated_properties
@staticmethod
def stop_all(widget, *largs):
'''Stop all animations that concern a specific widget / list of
properties.
Example::
anim = Animation(x=50)
anim.start(widget)
# and later
Animation.stop_all(widget, 'x')
'''
if len(largs):
for animation in list(Animation._instances):
for x in largs:
animation.stop_property(widget, x)
else:
for animation in set(Animation._instances):
animation.stop(widget)
@staticmethod
def cancel_all(widget, *largs):
'''Cancel all animations that concern a specific widget / list of
properties. See :attr:`cancel`.
Example::
anim = Animation(x=50)
anim.start(widget)
# and later
Animation.cancel_all(widget, 'x')
.. versionadded:: 1.4.0
'''
if len(largs):
for animation in list(Animation._instances):
for x in largs:
animation.cancel_property(widget, x)
else:
for animation in set(Animation._instances):
animation.cancel(widget)
def start(self, widget):
'''Start the animation on a widget.
'''
self.stop(widget)
self._initialize(widget)
self._register()
self.dispatch('on_start', widget)
def stop(self, widget):
'''Stop the animation previously applied to a widget, triggering the
`on_complete` event.'''
props = self._widgets.pop(widget.uid, None)
if props:
self.dispatch('on_complete', widget)
self.cancel(widget)
def cancel(self, widget):
'''Cancel the animation previously applied to a widget. Same
effect as :attr:`stop`, except the `on_complete` event will
*not* be triggered!
.. versionadded:: 1.4.0
'''
self._widgets.pop(widget.uid, None)
self._clock_uninstall()
if not self._widgets:
self._unregister()
def stop_property(self, widget, prop):
'''Even if an animation is running, remove a property. It will not be
animated futher. If it was the only/last property being animated,
the animation will be stopped (see :attr:`stop`).
'''
props = self._widgets.get(widget.uid, None)
if not props:
return
props['properties'].pop(prop, None)
# no more properties to animation ? kill the animation.
if not props['properties']:
self.stop(widget)
def cancel_property(self, widget, prop):
'''Even if an animation is running, remove a property. It will not be
animated further. If it was the only/last property being animated,
the animation will be canceled (see :attr:`cancel`)
.. versionadded:: 1.4.0
'''
props = self._widgets.get(widget.uid, None)
if not props:
return
props['properties'].pop(prop, None)
# no more properties to animation ? kill the animation.
if not props['properties']:
self.cancel(widget)
def have_properties_to_animate(self, widget):
'''Return True if a widget still has properties to animate.
.. versionadded:: 1.8.0
'''
props = self._widgets.get(widget.uid, None)
if props and props['properties']:
return True
#
# Private
#
def _register(self):
Animation._instances.add(self)
def _unregister(self):
if self in Animation._instances:
Animation._instances.remove(self)
def _initialize(self, widget):
d = self._widgets[widget.uid] = {
'widget': widget,
'properties': {},
'time': None}
# get current values
p = d['properties']
for key, value in self._animated_properties.items():
original_value = getattr(widget, key)
if isinstance(original_value, (tuple, list)):
original_value = original_value[:]
elif isinstance(original_value, dict):
original_value = original_value.copy()
p[key] = (original_value, value)
# install clock
self._clock_install()
def _clock_install(self):
if self._clock_installed:
return
self._update_ev = Clock.schedule_interval(self._update, self._step)
self._clock_installed = True
def _clock_uninstall(self):
if self._widgets or not self._clock_installed:
return
self._clock_installed = False
if self._update_ev is not None:
self._update_ev.cancel()
self._update_ev = None
def _update(self, dt):
widgets = self._widgets
transition = self._transition
calculate = self._calculate
for uid in list(widgets.keys())[:]:
anim = widgets[uid]
widget = anim['widget']
if isinstance(widget, WeakProxy) and not len(dir(widget)):
# empty proxy, widget is gone. ref: #2458
del widgets[uid]
continue
if anim['time'] is None:
anim['time'] = 0.
else:
anim['time'] += dt
# calculate progression
if self._duration:
progress = min(1., anim['time'] / self._duration)
else:
progress = 1
t = transition(progress)
# apply progression on widget
for key, values in anim['properties'].items():
a, b = values
value = calculate(a, b, t)
setattr(widget, key, value)
self.dispatch('on_progress', widget, progress)
# time to stop ?
if progress >= 1.:
self.stop(widget)
def _calculate(self, a, b, t):
_calculate = self._calculate
if isinstance(a, list) or isinstance(a, tuple):
if isinstance(a, list):
tp = list
else:
tp = tuple
return tp([_calculate(a[x], b[x], t) for x in range(len(a))])
elif isinstance(a, dict):
d = {}
for x in iterkeys(a):
if x not in b:
# User requested to animate only part of the dict.
# Copy the rest
d[x] = a[x]
else:
d[x] = _calculate(a[x], b[x], t)
return d
else:
return (a * (1. - t)) + (b * t)
#
# Default handlers
#
def on_start(self, widget):
pass
def on_progress(self, widget, progress):
pass
def on_complete(self, widget):
pass
def __add__(self, animation):
return Sequence(self, animation)
def __and__(self, animation):
return Parallel(self, animation)
class Sequence(Animation):
def __init__(self, anim1, anim2):
super(Sequence, self).__init__()
#: Repeat the sequence. See 'Repeating animation' in the header
#: documentation.
self.repeat = False
self.anim1 = anim1
self.anim2 = anim2
self.anim1.bind(on_start=self.on_anim1_start,
on_progress=self.on_anim1_progress)
self.anim2.bind(on_complete=self.on_anim2_complete,
on_progress=self.on_anim2_progress)
@property
def duration(self):
return self.anim1.duration + self.anim2.duration
def start(self, widget):
self.stop(widget)
self._widgets[widget.uid] = True
self._register()
self.anim1.start(widget)
self.anim1.bind(on_complete=self.on_anim1_complete)
def stop(self, widget):
self.anim1.stop(widget)
self.anim2.stop(widget)
props = self._widgets.pop(widget.uid, None)
if props:
self.dispatch('on_complete', widget)
super(Sequence, self).cancel(widget)
def stop_property(self, widget, prop):
self.anim1.stop_property(widget, prop)
self.anim2.stop_property(widget, prop)
if (not self.anim1.have_properties_to_animate(widget) and
not self.anim2.have_properties_to_animate(widget)):
self.stop(widget)
def cancel(self, widget):
self.anim1.cancel(widget)
self.anim2.cancel(widget)
super(Sequence, self).cancel(widget)
def cancel_property(self, widget, prop):
'''Even if an animation is running, remove a property. It will not be
animated further. If it was the only/last property being animated,
the animation will be canceled (see :attr:`cancel`)
This method overrides `:class:kivy.animation.Animation`'s
version, to cancel it on all animations of the Sequence.
.. versionadded:: 1.9.2
'''
self.anim1.cancel_property(widget, prop)
self.anim2.cancel_property(widget, prop)
if (not self.anim1.have_properties_to_animate(widget) and
not self.anim2.have_properties_to_animate(widget)):
self.cancel(widget)
def on_anim1_start(self, instance, widget):
self.dispatch('on_start', widget)
def on_anim1_complete(self, instance, widget):
self.anim1.unbind(on_complete=self.on_anim1_complete)
self.anim2.start(widget)
def on_anim1_progress(self, instance, widget, progress):
self.dispatch('on_progress', widget, progress / 2.)
def on_anim2_complete(self, instance, widget):
'''Repeating logic used with boolean variable "repeat".
.. versionadded:: 1.7.1
'''
if self.repeat:
self.anim1.start(widget)
self.anim1.bind(on_complete=self.on_anim1_complete)
else:
self.dispatch('on_complete', widget)
def on_anim2_progress(self, instance, widget, progress):
self.dispatch('on_progress', widget, .5 + progress / 2.)
class Parallel(Animation):
def __init__(self, anim1, anim2):
super(Parallel, self).__init__()
self.anim1 = anim1
self.anim2 = anim2
self.anim1.bind(on_complete=self.on_anim_complete)
self.anim2.bind(on_complete=self.on_anim_complete)
@property
def duration(self):
return max(self.anim1.duration, self.anim2.duration)
def start(self, widget):
self.stop(widget)
self.anim1.start(widget)
self.anim2.start(widget)
self._widgets[widget.uid] = {'complete': 0}
self._register()
self.dispatch('on_start', widget)
def stop(self, widget):
self.anim1.stop(widget)
self.anim2.stop(widget)
props = self._widgets.pop(widget.uid, None)
if props:
self.dispatch('on_complete', widget)
super(Parallel, self).cancel(widget)
def stop_property(self, widget, prop):
self.anim1.stop_property(widget, prop)
self.anim2.stop_property(widget, prop)
if (not self.anim1.have_properties_to_animate(widget) and
not self.anim2.have_properties_to_animate(widget)):
self.stop(widget)
def cancel(self, widget):
self.anim1.cancel(widget)
self.anim2.cancel(widget)
super(Parallel, self).cancel(widget)
def on_anim_complete(self, instance, widget):
self._widgets[widget.uid]['complete'] += 1
if self._widgets[widget.uid]['complete'] == 2:
self.stop(widget)
class AnimationTransition(object):
'''Collection of animation functions to be used with the Animation object.
Easing Functions ported to Kivy from the Clutter Project
https://developer.gnome.org/clutter/stable/ClutterAlpha.html
The `progress` parameter in each animation function is in the range 0-1.
'''
@staticmethod
def linear(progress):
'''.. image:: images/anim_linear.png'''
return progress
@staticmethod
def in_quad(progress):
'''.. image:: images/anim_in_quad.png
'''
return progress * progress
@staticmethod
def out_quad(progress):
'''.. image:: images/anim_out_quad.png
'''
return -1.0 * progress * (progress - 2.0)
@staticmethod
def in_out_quad(progress):
'''.. image:: images/anim_in_out_quad.png
'''
p = progress * 2
if p < 1:
return 0.5 * p * p
p -= 1.0
return -0.5 * (p * (p - 2.0) - 1.0)
@staticmethod
def in_cubic(progress):
'''.. image:: images/anim_in_cubic.png
'''
return progress * progress * progress
@staticmethod
def out_cubic(progress):
'''.. image:: images/anim_out_cubic.png
'''
p = progress - 1.0
return p * p * p + 1.0
@staticmethod
def in_out_cubic(progress):
'''.. image:: images/anim_in_out_cubic.png
'''
p = progress * 2
if p < 1:
return 0.5 * p * p * p
p -= 2
return 0.5 * (p * p * p + 2.0)
@staticmethod
def in_quart(progress):
'''.. image:: images/anim_in_quart.png
'''
return progress * progress * progress * progress
@staticmethod
def out_quart(progress):
'''.. image:: images/anim_out_quart.png
'''
p = progress - 1.0
return -1.0 * (p * p * p * p - 1.0)
@staticmethod
def in_out_quart(progress):
'''.. image:: images/anim_in_out_quart.png
'''
p = progress * 2
if p < 1:
return 0.5 * p * p * p * p
p -= 2
return -0.5 * (p * p * p * p - 2.0)
@staticmethod
def in_quint(progress):
'''.. image:: images/anim_in_quint.png
'''
return progress * progress * progress * progress * progress
@staticmethod
def out_quint(progress):
'''.. image:: images/anim_out_quint.png
'''
p = progress - 1.0
return p * p * p * p * p + 1.0
@staticmethod
def in_out_quint(progress):
'''.. image:: images/anim_in_out_quint.png
'''
p = progress * 2
if p < 1:
return 0.5 * p * p * p * p * p
p -= 2.0
return 0.5 * (p * p * p * p * p + 2.0)
@staticmethod
def in_sine(progress):
'''.. image:: images/anim_in_sine.png
'''
return -1.0 * cos(progress * (pi / 2.0)) + 1.0
@staticmethod
def out_sine(progress):
'''.. image:: images/anim_out_sine.png
'''
return sin(progress * (pi / 2.0))
@staticmethod
def in_out_sine(progress):
'''.. image:: images/anim_in_out_sine.png
'''
return -0.5 * (cos(pi * progress) - 1.0)
@staticmethod
def in_expo(progress):
'''.. image:: images/anim_in_expo.png
'''
if progress == 0:
return 0.0
return pow(2, 10 * (progress - 1.0))
@staticmethod
def out_expo(progress):
'''.. image:: images/anim_out_expo.png
'''
if progress == 1.0:
return 1.0
return -pow(2, -10 * progress) + 1.0
@staticmethod
def in_out_expo(progress):
'''.. image:: images/anim_in_out_expo.png
'''
if progress == 0:
return 0.0
if progress == 1.:
return 1.0
p = progress * 2
if p < 1:
return 0.5 * pow(2, 10 * (p - 1.0))
p -= 1.0
return 0.5 * (-pow(2, -10 * p) + 2.0)
@staticmethod
def in_circ(progress):
'''.. image:: images/anim_in_circ.png
'''
return -1.0 * (sqrt(1.0 - progress * progress) - 1.0)
@staticmethod
def out_circ(progress):
'''.. image:: images/anim_out_circ.png
'''
p = progress - 1.0
return sqrt(1.0 - p * p)
@staticmethod
def in_out_circ(progress):
'''.. image:: images/anim_in_out_circ.png
'''
p = progress * 2
if p < 1:
return -0.5 * (sqrt(1.0 - p * p) - 1.0)
p -= 2.0
return 0.5 * (sqrt(1.0 - p * p) + 1.0)
@staticmethod
def in_elastic(progress):
'''.. image:: images/anim_in_elastic.png
'''
p = .3
s = p / 4.0
q = progress
if q == 1:
return 1.0
q -= 1.0
return -(pow(2, 10 * q) * sin((q - s) * (2 * pi) / p))
@staticmethod
def out_elastic(progress):
'''.. image:: images/anim_out_elastic.png
'''
p = .3
s = p / 4.0
q = progress
if q == 1:
return 1.0
return pow(2, -10 * q) * sin((q - s) * (2 * pi) / p) + 1.0
@staticmethod
def in_out_elastic(progress):
'''.. image:: images/anim_in_out_elastic.png
'''
p = .3 * 1.5
s = p / 4.0
q = progress * 2
if q == 2:
return 1.0
if q < 1:
q -= 1.0
return -.5 * (pow(2, 10 * q) * sin((q - s) * (2.0 * pi) / p))
else:
q -= 1.0
return pow(2, -10 * q) * sin((q - s) * (2.0 * pi) / p) * .5 + 1.0
@staticmethod
def in_back(progress):
'''.. image:: images/anim_in_back.png
'''
return progress * progress * ((1.70158 + 1.0) * progress - 1.70158)
@staticmethod
def out_back(progress):
'''.. image:: images/anim_out_back.png
'''
p = progress - 1.0
return p * p * ((1.70158 + 1) * p + 1.70158) + 1.0
@staticmethod
def in_out_back(progress):
'''.. image:: images/anim_in_out_back.png
'''
p = progress * 2.
s = 1.70158 * 1.525
if p < 1:
return 0.5 * (p * p * ((s + 1.0) * p - s))
p -= 2.0
return 0.5 * (p * p * ((s + 1.0) * p + s) + 2.0)
@staticmethod
def _out_bounce_internal(t, d):
p = t / d
if p < (1.0 / 2.75):
return 7.5625 * p * p
elif p < (2.0 / 2.75):
p -= (1.5 / 2.75)
return 7.5625 * p * p + .75
elif p < (2.5 / 2.75):
p -= (2.25 / 2.75)
return 7.5625 * p * p + .9375
else:
p -= (2.625 / 2.75)
return 7.5625 * p * p + .984375
@staticmethod
def _in_bounce_internal(t, d):
return 1.0 - AnimationTransition._out_bounce_internal(d - t, d)
@staticmethod
def in_bounce(progress):
'''.. image:: images/anim_in_bounce.png
'''
return AnimationTransition._in_bounce_internal(progress, 1.)
@staticmethod
def out_bounce(progress):
'''.. image:: images/anim_out_bounce.png
'''
return AnimationTransition._out_bounce_internal(progress, 1.)
@staticmethod
def in_out_bounce(progress):
'''.. image:: images/anim_in_out_bounce.png
'''
p = progress * 2.
if p < 1.:
return AnimationTransition._in_bounce_internal(p, 1.) * .5
return AnimationTransition._out_bounce_internal(p - 1., 1.) * .5 + .5
| darkopevec/kivy | kivy/animation.py | Python | mit | 24,358 |
from django.http import HttpResponseRedirect, Http404
from django.contrib import auth
from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
@login_required
def switch_user(request, username):
if request.user.is_superuser:
try:
user = get_user_model().objects.get(username=username)
auth.login(request, user, backend='django.contrib.auth.backends.ModelBackend')
from_url = request.META.get("HTTP_ORIGIN", "")
if not from_url:
from_url = request.META.get("HTTP_HOST", "")
from_url = from_url and ("http://" + from_url) or "/"
return HttpResponseRedirect(from_url)
except User.DoesNotExist:
pass
raise Http404
| empty/django-switch-user | su/views.py | Python | mit | 838 |
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2017 Tuukka Turto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Module for spell objects
"""
from pyherc.aspects import log_debug, log_info
from pyherc.data.effects import EffectsCollection
class Spell():
"""
Class to represent spells
.. versionadded:: 0.9
"""
@log_debug
def __init__(self):
"""
Default constructor
"""
self.targets = []
self.effects = EffectsCollection()
self.spirit = 0
@log_debug
def add_effect_handle(self, handle):
"""
Add effect handle
:param handle: effect handle to add
:type handle: EffectHandle
"""
self.effects.add_effect_handle(handle)
@log_debug
def get_effect_handles(self, trigger=None):
"""
Get effect handles
:param trigger: optional trigger type
:type trigger: string
:returns: effect handles
:rtype: [EffectHandle]
"""
return self.effects.get_effect_handles(trigger)
@log_debug
def remove_effect_handle(self, handle):
"""
Remove given handle
:param handle: handle to remove
:type handle: EffectHandle
"""
self.effects.remove_effect_handle(handle)
@log_info
def cast(self, effects_factory):
"""
Cast the spell
:param effects_factory: factory for creating effects
:type effects_factory: EffectsFactory
"""
handles = self.effects.get_effect_handles('on spell hit')
effects = []
targets = (x.target for x in self.targets
if x.target)
for target in targets:
for handle in handles:
effects.append(effects_factory(key=handle.effect,
target=target))
for effect in effects:
if not effect.duration or effect.duration <= 0:
effect.trigger()
else:
effect.target.add_effect(effect)
| tuturto/pyherc | src/pyherc/data/magic/spell.py | Python | mit | 3,082 |
#!/usr/bin/env python2
"""
COSMO TECHNICAL TESTSUITE
General purpose script to compare two files containing tables
Only lines with given table pattern are considered
"""
# built-in modules
import os, sys, string
# information
__author__ = "Xavier Lapillonne"
__maintainer__ = "xavier.lapillonne@meteoswiss.ch"
def cmp_table(file1,file2,colpattern,minval,threshold,verbose=1,maxcompline=-1):
# General purpose script to compare two files containing tables
# Only lines with given table column pattern. Column to be compared are marked with c
# column to discard with x
#init
ncomp=0
nerror=0
lerror=False
epsilon=1e-16 #used to avoid division by zero in case minval is zero
# check file existence
if not(os.path.exists(file1)):
print('File %s does not exist' %(file1))
return -1
elif not(os.path.exists(file2)):
print('File %s does not exist' %(file2))
print('File '+file2+' does not exist')
return -1
# convert input
colpattern=[x=='c' for x in list(colpattern)]
threshold=float(threshold)
minval=float(minval)
# open file
data1=open(file1).readlines()
data2=open(file2).readlines()
# get max record
nd1=len(data1)
nd2=len(data2)
# check that files are not empty
if nd1==0:
print('file %s is empty!' %(file1))
return -1
if nd2==0:
print('file %s is empty!' %(file2))
return -1
if nd1!=nd2 and verbose>1:
print('Warning: %s and %s have different size, comparing commun set only \n' %(file1,file2))
ncdata=min(nd1,nd2)
if (maxcompline>0):
ncdata=min(ncdata,maxcompline)
# Iterates through the lines
for il in range(ncdata):
l1=data1[il].split()
l2=data2[il].split()
l1match=matchColPattern(l1,colpattern)
l2match=matchColPattern(l2,colpattern)
# compare values if both lines are compatible
if l1match and l2match:
for ic in range(len(colpattern)):
if colpattern[ic]:
v1=float(l1[ic])
v2=float(l2[ic])
val_abs_max=max(abs(v1),abs(v2))
if val_abs_max > minval:
ncomp+=1
diff=abs(v1-v2)/(val_abs_max+epsilon)
if diff>threshold:
nerror+=1
# Print error
if verbose>1:
print('Error %2.2e above %2.2e thresold at line %i, col %i' %(diff,threshold,il+1,ic+1))
print('> %s' %(file1))
print(data1[il])
print('< %s' %(file2))
print(data2[il])
#save line for first error
if not lerror:
differ=diff
linerr=il+1
colerr=ic+1
linerr1=data1[il]
linerr2=data2[il]
lerror=True
if ncomp==0:
print('Warning :no line to compare')
nerror=-2
if lerror and verbose>0:
print('Compared values: %i, errors above threshold: %i ; %i %% ' %(ncomp,nerror,nerror*100./ncomp))
if verbose==1:
print('First error %2.2e above %2.2e thresold at line %i, col %i' %(differ,threshold,linerr,colerr))
print('> %s' %(file1))
print(linerr1)
print('< %s' %(file2))
print(linerr2)
return nerror
#----------------------------------------------------------------------------
# Local functions
def matchColPattern(line,colpattern):
if len(line)!=len(colpattern):
return False
try:
for i in range(len(colpattern)):
if colpattern[i]: f=float(line[i])
except ValueError:
return False
return True
#-----------------------------------
#execute as a script
if __name__ == "__main__":
if len(sys.argv)==6:
cmp_table(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4], \
sys.argv[5])
elif len(sys.argv)==7:
cmp_table(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4], \
sys.argv[5],sys.argv[6])
elif len(sys.argv)==8:
cmp_table(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4], \
sys.argv[5],sys.argv[6],sys.argv[7])
else:
print('''USAGE : ./comp_table file1 file2 colpattern minval threshold [verbose maxcompline]
General purpose script to compare two files containing tables
Only lines with given table column pattern. Column to be compared must be numbers are marked with c
column to discard with x
colpattern c for compare or x for ignore, ex: xccx discard first and last column of a 4 column table
''')
| C2SM-RCM/testsuite | tools/comp_table.py | Python | mit | 5,041 |
#!/usr/bin/env python
import sys
def parse_map_file(path):
map_grid = []
# Create a two-dimensional list based on the input data
with open(path, 'r') as f:
width, height = map(int, f.readline().split())
for line in f:
row = map(int, line.split())
map_grid.append(row)
# Input checking
if height < 1 or width < 1:
raise ValueError('grid height and width should be >= 1')
elif height != len(map_grid) or width != len(map_grid[0]):
raise ValueError('actual map does not match declared map dimensions')
return width, height, map_grid
def make_grid(width, height, initial_value):
return [width*[initial_value] for i in range(height)]
def get_length_and_elevation(x, y, map_grid, path_lengths, final_elevations):
path_length = path_lengths[y][x]
if path_length != -1:
return path_length, final_elevations[y][x]
current_elevation = map_grid[y][x]
longest_path = 0
lowest_elevation = current_elevation
neighbors = [
(x, y - 1), # up
(x, y + 1), # down
(x - 1, y), # left
(x + 1, y), # right
]
for xn, yn in neighbors:
try:
neighbor = map_grid[yn][xn]
except IndexError:
continue
if neighbor < current_elevation:
path_length, final_elevation = get_length_and_elevation(xn, yn, map_grid, path_lengths, final_elevations)
if path_length > longest_path or (path_length == longest_path and final_elevation < lowest_elevation):
longest_path = path_length
lowest_elevation = final_elevation
path_length = longest_path + 1
path_lengths[y][x] = path_length
final_elevations[y][x] = lowest_elevation
return path_length, lowest_elevation
def main():
if len(sys.argv) != 2:
sys.exit('Usage: {} <map file>'.format(sys.argv[0]))
print 'Parsing map data...'
try:
width, height, map_grid = parse_map_file(sys.argv[1])
except IOError as e:
sys.exit('Unable to read map file: {}'.format(e))
except ValueError as e:
sys.exit('Invalid map file: {}: {}'.format(sys.argv[1], e))
# Initialize corresponding grids for path lengths and final elevations
path_lengths = make_grid(width, height, -1)
final_elevations = make_grid(width, height, -1)
print 'Finding the best path...'
longest_path = -1
steepest_drop = -1
for y, row in enumerate(map_grid):
for x, initial_elevation in enumerate(row):
path_length, final_elevation = get_length_and_elevation(x, y, map_grid, path_lengths, final_elevations)
drop = initial_elevation - final_elevation
if path_length > longest_path or (path_length == longest_path and drop > steepest_drop):
longest_path = path_length
steepest_drop = drop
print '\nlength = {}, drop = {}\n'.format(longest_path, steepest_drop)
if __name__ == '__main__':
main()
| baudm/sg-ski | sg-ski.py | Python | mit | 3,012 |
import unittest
import transaction
import os
import csv
from pyramid import testing
from thesis.models import DBSession
from sqlalchemy import create_engine
from thesis.models import (
Base,
GriddedMappablePoint,
Layer
)
class TestGriddedMappableItem(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
engine = create_engine('postgresql+psycopg2://thesis_db_user:_89_hHh_989g2988h08g2As@127.0.0.1:5432/thesis_test_db')
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
with transaction.manager:
# Add TestLayer1
test_layer_1 = Layer(name='TestLayer1')
test_layer_1.mappable_points = [
GriddedMappablePoint('Point(30 10)'),
GriddedMappablePoint('Point(20 10)'),
]
DBSession.add(test_layer_1)
# Add TestLayer2
test_layer_2 = Layer(name='TestLayer2')
test_layer_2.mappable_points = [
GriddedMappablePoint('Point(10 15)'),
GriddedMappablePoint('Point(10 15)'),
GriddedMappablePoint('Point(30 15)'),
]
DBSession.add(test_layer_2)
# Add Emu Layer
tests_path = os.path.dirname(os.path.abspath(__file__))
test_fixtures_path = os.path.join(tests_path, 'fixtures')
emu_csv_path = os.path.join(test_fixtures_path, 'emu.csv')
emu_layer = Layer(name='Emu')
with open(emu_csv_path, 'rb') as csvfile:
emu_reader = csv.reader(csvfile)
rownum = 0
header = None
for row in emu_reader:
# Save header row.
if rownum == 0:
header = row
else:
colnum = 0
latitude = 0
longitude = 0
for col in row:
column_label = header[colnum]
if column_label == "LNGDEC":
longitude = col
elif column_label == "LATDEC":
latitude = col
# print '%-8s: %s' % (column_label, col)
colnum += 1
if longitude and latitude:
mappable_point = GriddedMappablePoint('Point(%s %s)' % (longitude, latitude))
emu_layer.mappable_points.append(mappable_point)
rownum += 1
DBSession.add(emu_layer)
def tearDown(self):
DBSession.remove()
testing.tearDown()
engine = create_engine('postgresql+psycopg2://thesis_db_user:_89_hHh_989g2988h08g2As@127.0.0.1:5432/thesis_test_db')
DBSession.configure(bind=engine)
# Drop all the models
Base.metadata.drop_all(engine)
def test_search_layers_by_name(self):
test_layer_1 = DBSession.query(Layer).\
filter_by(name='TestLayer1').one()
self.assertEqual(test_layer_1.name, 'TestLayer1')
self.assertEqual(len(test_layer_1.mappable_points), 2)
test_layer_2 = DBSession.query(Layer).\
filter_by(name='TestLayer2').one()
self.assertEqual(test_layer_2.name, 'TestLayer2')
self.assertEqual(len(test_layer_2.mappable_points), 3)
def test_emu_fixure_loaded(self):
test_emu_layer = DBSession.query(Layer).\
filter_by(name='Emu').one()
self.assertGreater(len(test_emu_layer.mappable_points), 5)
def test_get_layer_points_as_geo_json(self):
test_layer_1 = DBSession.query(Layer).filter_by(name='TestLayer1').one()
test_layer_2 = DBSession.query(Layer).filter_by(name='TestLayer2').one()
q = GriddedMappablePoint.get_points_as_geojson(test_layer_1, grid_size=1)
result = q.all()
# self.assertEqual(result[0].locations, '{"type":"MultiPoint","coordinates":[[20,10]]}')
# self.assertEqual(result[1].locations, '{"type":"MultiPoint","coordinates":[[30,10]]}')
self.assertEqual(result[0].cluster_size, 1)
self.assertEqual(result[1].cluster_size, 1)
q2 = GriddedMappablePoint.get_points_as_geojson(test_layer_1, grid_size=100)
result2 = q2.all()
# self.assertEqual(result2[0].locations, '{"type":"MultiPoint","coordinates":[[30,10],[20,10]]}')
self.assertEqual(result2[0].cluster_size, 2)
q3 = GriddedMappablePoint.get_points_as_geojson(test_layer_2, grid_size=1)
result3 = q3.all()
# self.assertEqual(result3[0].locations, '{"type":"MultiPoint","coordinates":[[10,15],[10,15]]}')
# self.assertEqual(result3[1].locations, '{"type":"MultiPoint","coordinates":[[30,15]]}')
self.assertEqual(result3[0].cluster_size, 2)
self.assertEqual(result3[1].cluster_size, 1)
def test_get_cluster_centroids_as_geo_json(self):
test_layer_1 = DBSession.query(Layer).filter_by(name='TestLayer1').one()
test_layer_2 = DBSession.query(Layer).filter_by(name='TestLayer2').one()
q = GriddedMappablePoint.get_points_as_geojson(test_layer_1, grid_size=1)
result = q.all()
self.assertEqual(result[0].centroid, '{"type":"Point","coordinates":[20,10]}')
self.assertEqual(result[1].centroid, '{"type":"Point","coordinates":[30,10]}')
q2 = GriddedMappablePoint.get_points_as_geojson(test_layer_1, grid_size=100)
result2 = q2.one()
self.assertEqual(result2.centroid, '{"type":"Point","coordinates":[25,10]}')
q3 = GriddedMappablePoint.get_points_as_geojson(test_layer_2, grid_size=100)
result3 = q3.one()
self.assertEqual(result3.centroid, '{"type":"Point","coordinates":[16.6666666666667,15]}')
def test_get_layer_points_as_wkt(self):
test_layer_1 = DBSession.query(Layer).filter_by(name='TestLayer1').one()
q = GriddedMappablePoint.get_points_as_wkt(test_layer_1, grid_size=1)
result = q.all()
# self.assertEqual(result[0].locations, 'MULTIPOINT(20 10)')
# self.assertEqual(result[1].locations, 'MULTIPOINT(30 10)')
def test_normalise_grid_size(self):
grid_size_1 = GriddedMappablePoint.normalise_grid_size(10)
self.assertEqual(grid_size_1, 8)
grid_size_2 = GriddedMappablePoint.normalise_grid_size(0.00001)
self.assertEqual(grid_size_2, 0)
grid_size_3 = GriddedMappablePoint.normalise_grid_size(0.9)
self.assertEqual(grid_size_3, 0.5)
grid_size_4 = GriddedMappablePoint.normalise_grid_size(1.1)
self.assertEqual(grid_size_4, 1)
| robertpyke/PyThesis | thesis/tests/gridded_mappable_point.py | Python | mit | 6,740 |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
from matplotlib.colors import Normalize
class MidpointNormalize(Normalize):
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
def make_colormap(seq):
seq = [(None,) * 3, 0.0] + list(seq) + [1.0, (None,) * 3]
cdict = {'red': [], 'green': [], 'blue': []}
for i, item in enumerate(seq):
if isinstance(item, float):
r1, g1, b1 = seq[i - 1]
r2, g2, b2 = seq[i + 1]
cdict['red'].append([item, r1, r2])
cdict['green'].append([item, g1, g2])
cdict['blue'].append([item, b1, b2])
return mcolors.LinearSegmentedColormap('CustomMap', cdict)
c = mcolors.ColorConverter().to_rgb
phimap = make_colormap([c('white'), c('tomato'), 0.33, c(
'tomato'), c('deepskyblue'), 0.66, c('deepskyblue'), c('white')])
yinver = np.load('finalLMmilne2.npy')
# PLOT
titulos = ['B', 'thetaB', 'phiB', 'vlos',
'eta0', 'a', 'ddop', 'S_0', 'S_1', 'chi2']
# plt.figure(1, figsize(18,9))
for i in range(9):
plt.subplot(3, 3, i + 1)
plt.imshow(yinver[:, :, i], cmap='cubehelix', origin='lower')
if i == 2:
plt.imshow(yinver[:, :, i], cmap=phimap, origin='lower')
if i == 3:
norm = MidpointNormalize(midpoint=0)
plt.imshow(yinver[:, :, i], norm=norm,
cmap=plt.cm.seismic, origin='lower')
plt.title(titulos[i])
plt.colorbar()
plt.tight_layout()
plt.figure(2)
plt.subplot(2, 1, 1)
plt.imshow(yinver[:, :, 9], cmap='cubehelix', origin='lower', vmax=0.01)
plt.colorbar()
plt.subplot(2, 1, 2)
plt.imshow(yinver[:, :, 10], cmap='cubehelix', origin='lower')
plt.colorbar()
plt.show()
| cdiazbas/LMpyMilne | allplot.py | Python | mit | 2,084 |
#!/usr/bin/env python
import sys
import argparse
from .audio import create_tracks
from .downloader import YouTube
from .parser import parse_tracks_file
from .prompt import wizard
from .exceptions import WizardError
def get_from_youtube(source):
yt = YouTube(source)
highest_bitrate = yt.audio_available.get('high')
return yt.download_audio(highest_bitrate)
def get_from_local(source):
return source
def generate_album(artist, album, tracks, source, input, output,
format='mp3', from_wizard=None):
"""
Generates tracks under dest_dir using the source media file (download|local)
"""
get_media_file_src = {'youtube': get_from_youtube,
'local': get_from_local}
media_file_src = get_media_file_src.get(source)(input)
if from_wizard is None:
audio_segments = parse_tracks_file(tracks)
else:
audio_segments = tracks
create_tracks(media_file_src, output, audio_segments,
artist, album, source_type=source, format=format)
def main():
parser = argparse.ArgumentParser(
prog='lobster',
description='Cut audio files with a single command'
)
parser.add_argument('--artist', '-ar', type=str, required=False,
help='Name of the artist of the track this will be used '\
+ 'to name the output directory')
parser.add_argument('--album', '-al', type=str, required=False,
help='Name of the album, this will be used to name '\
+ 'the output directory')
parser.add_argument('--tracks', '-t', type=str, required=False,
help='File containing the information to build the tracks')
parser.add_argument('--source', '-s', type=str, choices=['local', 'youtube'],
required=False, help='Name of the media file source')
parser.add_argument('--input', '-i', type=str, required=False,
help='Path to the source media file')
parser.add_argument('--output', '-o', type=str, required=False,
help='Path to the utput directory')
parser.add_argument('--format', type=str, help='Input media file format',
default='mp3')
mode_help_mesage = 'Launch Lobster in Wizard or Command mode,`wizard`'\
' will launch the Wizard mode, `cmd` will lauch' \
' Command mode, `cmd` is the current default '
parser.add_argument('--mode', '-m', type=str,
help=mode_help_mesage,
default='cmd')
kwargs=vars(parser.parse_args())
mode = kwargs.get('mode').lower()
if mode == 'cmd':
required_fields = ["artist", "album", "tracks", "source", "input",
"output"]
should_generate = True
for req_field in required_fields:
if kwargs.get(req_field) is None:
should_generate = False
print("Missing required argument --{}".format(req_field))
if should_generate:
del kwargs['mode']
generate_album(**kwargs)
elif mode == 'wizard':
try:
generate_album(**wizard())
except WizardError:
sys.exit()
else:
print('Invalid {} mode'.format(mode))
sys.exit(main())
| noahfx/lobster | lobster/__main__.py | Python | mit | 3,384 |
# -*- coding: utf-8 -*-
from .baserequest import BaseRequest
from oandapyV20.types import TradeID, PriceValue
from oandapyV20.definitions.orders import TimeInForce, OrderType
class StopLossOrderRequest(BaseRequest):
"""create a StopLossOrderRequest.
StopLossOrderRequest is used to build the body for a StopLossOrder.
The body can be used to pass to the OrderCreate endpoint.
"""
def __init__(self,
tradeID,
price,
clientTradeID=None,
timeInForce=TimeInForce.GTC,
gtdTime=None,
clientExtensions=None):
"""
Instantiate a StopLossOrderRequest.
Parameters
----------
tradeID : string (required)
the tradeID of an existing trade
price : float (required)
the treshold price indicating the price to close the order
Example
-------
>>> import json
>>> from oandapyV20 import API
>>> import oandapyV20.endpoints.orders as orders
>>> from oandapyV20.contrib.requests import StopLossOrderRequest
>>>
>>> accountID = "..."
>>> client = API(access_token=...)
>>> ordr = StopLossOrderRequest(tradeID="1234", price=1.07)
>>> print(json.dumps(ordr.data, indent=4))
{
"order": {
"type": "STOP_LOSS",
"tradeID": "1234",
"price": "1.07000",
"timeInForce": "GTC",
}
}
>>> # now we have the order specification, create the order request
>>> r = orders.OrderCreate(accountID, data=ordr.data)
>>> # perform the request
>>> rv = client.request(r)
>>> print(json.dumps(rv, indent=4))
>>> ...
"""
super(StopLossOrderRequest, self).__init__()
# allowed: GTC/GFD/GTD
if timeInForce not in [TimeInForce.GTC,
TimeInForce.GTD,
TimeInForce.GFD]:
raise ValueError("timeInForce: {}".format(timeInForce))
# by default for a STOP_LOSS order
self._data.update({"type": OrderType.STOP_LOSS})
# required
self._data.update({"tradeID": TradeID(tradeID).value})
self._data.update({"price": PriceValue(price).value})
# optional
self._data.update({"clientExtensions": clientExtensions})
self._data.update({"timeInForce": timeInForce})
self._data.update({"gtdTime": gtdTime})
if timeInForce == TimeInForce.GTD and not gtdTime:
raise ValueError("gtdTime missing")
@property
def data(self):
"""data property.
return the JSON body.
"""
return dict({"order": super(StopLossOrderRequest, self).data})
| hootnot/oanda-api-v20 | oandapyV20/contrib/requests/stoplossorder.py | Python | mit | 2,841 |
# Copyright (C) 2017-2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
import base_bs_erf
import numba as nb
from math import log, sqrt, exp, erf
def black_scholes_numba_opt(price, strike, t, mr, sig_sig_two, vol, call, put):
P = float( price [0] )
S = strike [0]
T = t [0]
a = log(P / S)
b = T * mr[0]
z = T * sig_sig_two[0]
c = 0.25 * z
y = 1./sqrt(z)
w1 = (a - b + c) * y
w2 = (a - b - c) * y
d1 = 0.5 + 0.5 * erf(w1)
d2 = 0.5 + 0.5 * erf(w2)
Se = exp(b) * S
res = P * d1 - Se * d2
call [0] = res
put [0] = res - P + Se
black_scholes_numba_opt_vec = nb.guvectorize('(f8[::1],f8[::1],f8[::1],f8[:],f8[:],f8[:],f8[::1],f8[::1])',
'(),(),(),(),(),()->(),()', nopython=True, target="parallel", fastmath=False)(black_scholes_numba_opt)
@nb.jit
def black_scholes(nopt, price, strike, t, rate, vol, call, put):
sig_sig_two = vol*vol*2
mr = -rate
black_scholes_numba_opt_vec(price, strike, t, mr, sig_sig_two, vol, call, put)
base_bs_erf.run("Numba@guvec-par", black_scholes, pass_args=True)
| IntelPython/BlackScholes_bench | bs_erf_numba_guvec_par.py | Python | mit | 1,146 |
#!/usr/bin/python
import pyglet
from pyglet.gl import *
from pyglet.window import key
import math
import random
window = pyglet.window.Window(fullscreen=True)
pyglet.resource.path.append('./images')
pyglet.resource.reindex()
center_x = int(window.width/2)
center_y = int(window.height/2)
def center_anchor(img):
img.anchor_x = img.width // 2
img.anchor_y = img.height // 2
def wrap(value, width):
if width == 0:
return 0
while value > width:
value -= width
while value < 0:
value += width
return value
radians_in_circle = math.pi * 2
def to_radians(degrees):
return math.pi * degrees / 180.0
def to_degrees(radians):
return (180 * radians) / math.pi
def make_vec(xxx_todo_changeme, xxx_todo_changeme1):
"""distance and angle from (x1,y1) to (x2,y2)"""
(x1, y1) = xxx_todo_changeme
(x2, y2) = xxx_todo_changeme1
dx = x1 - x2
dy = y1 - y2
distance = math.sqrt(dx**2 + dy**2)
if distance == 0:
return (0,0)
angle = math.acos(float(dx) / distance)
if dy < 0:
angle = 2*math.pi - angle
return (distance, angle)
def vec_to_xy(distance, angle):
x = distance * math.cos(angle)
y = distance * math.sin(angle)
return (x,y)
def dist_vec_to(source, target):
return make_vec(
(source.x, source.y),
(target.x, target.y))
def degree_angle_diff(angle1, angle2):
# assumes degrees
diff = wrap(angle1 - angle2, 360.)
if diff > 180:
diff = 360.0 - diff
return diff
planet_image = pyglet.resource.image('mars.png')
center_anchor(planet_image)
ship_image = pyglet.resource.image('ship.png')
center_anchor(ship_image)
ship_image_on = pyglet.resource.image('ship_on.png')
center_anchor(ship_image_on)
bullet_image = pyglet.resource.image('bullet.png')
center_anchor(bullet_image)
alien_image = pyglet.resource.image('alien.png')
center_anchor(alien_image)
class Planet(pyglet.sprite.Sprite):
def __init__(self, image, x=0, y=0, batch=None):
super(Planet, self).__init__(image, x, y, batch=batch)
self.x = x
self.y = y
self.mass = 5000000 # experiment!
self.scale = 1.0
self.radius = self.scale * (self.image.height + self.image.width) / 4
def force_on(self, target):
G = 1 # experiment!
distance, angle = dist_vec_to(self, target)
return ((G * self.mass) / (distance**2), angle)
def update(self, dt):
# Check collisions
distance, angle = dist_vec_to(self, ship)
if distance <= ship.radius + self.radius:
ship.reset()
ship.alive = False
# Gravity!
force, angle = self.force_on(ship)
force_x = force * math.cos(angle) * dt
force_y = force * math.sin(angle) * dt
ship.dx += force_x
ship.dy += force_y
class Ship(pyglet.sprite.Sprite, key.KeyStateHandler):
def __init__(self, image, x=0, y=0, dx=0, dy=0, rotv=0, batch=None):
super(Ship, self).__init__(image, x, y, batch=batch)
self.x = x
self.y = y
self.dx = dx
self.dy = dy
self.rotation = rotv
self.thrust = 150.0
self.rot_spd = 100.0
self.alive = True
self.radius = self.image.width / 2
self.max_speed = 100
self.shot_timer = 0.2
self.reload_timer = self.shot_timer
self.bullets = []
self.score = 0
def reset(self):
ship.life_timer = 2.0 # seconds until respawn
self.x = center_x + 300; self.y = center_y
self.dx = 0; self.dy = 150
self.rotation = -90
def update(self, dt):
self.image = ship_image
score.text = "Score: %d" % self.score
if not self.alive:
#print "Dead! Respawn in %s" % self.life_timer
self.life_timer -= dt
if self.life_timer > 0:
return
else:
self.reset()
self.score -= 100
self.alive = True
# Update rotation
if self[key.LEFT]:
self.rotation -= self.rot_spd * dt
if self[key.RIGHT]:
self.rotation += self.rot_spd * dt
self.rotation = wrap(self.rotation, 360.)
# Get x/y components of orientation
# Pyglet and python math angles don't correspond, but reversing the x axis fixes that
rotation_x = math.cos(to_radians(self.rotation))
rotation_y = math.sin(to_radians(-self.rotation))
# Update velocity
if self[key.UP]:
self.image = ship_image_on
self.dx += self.thrust * rotation_x * dt
self.dy += self.thrust * rotation_y * dt
# Shoot bullets
if self.reload_timer > 0:
self.reload_timer -= dt
if self[key.SPACE]:
if self.reload_timer <= 0:
self.bullets.append(Bullet(self.x, self.y, rotation_x*500+self.dx, rotation_y*500+self.dy, bullets))
self.reload_timer = self.shot_timer
self.x += self.dx * dt
self.y += self.dy * dt
self.x = wrap(self.x, window.width)
self.y = wrap(self.y, window.height)
class Bullet(pyglet.sprite.Sprite):
def __init__(self, x=0, y=0, dx=0, dy=0, batch=None):
super(Bullet, self).__init__(bullet_image, x, y, batch=batch)
self.x = x
self.y = y
self.dx = dx
self.dy = dy
self.radius = self.image.width / 2
self.timer = 1.5
def update(self, dt):
self.x += self.dx * dt
self.y += self.dy * dt
self.x = wrap(self.x, window.width)
self.y = wrap(self.y, window.height)
self.timer -= dt
# collide with planet, or remove after 5 seconds
distance, angle = dist_vec_to(planet, self)
if distance <= planet.radius or self.timer < 0:
ship.bullets.remove(self)
return
# check collision with Alien
dist, angle = dist_vec_to(self, alien)
if dist < alien.radius:
# hit alien
alien.reset()
alien.alive = False
ship.bullets.remove(self)
ship.score += 100
return
class Alien(pyglet.sprite.Sprite):
def __init__(self, image, x=0, y=0, dx=0, dy=0, batch=None):
super(Alien, self).__init__(image, x, y, batch=batch)
self.x = x
self.y = y
self.dx = dx
self.dy = dy
self.radius = self.image.width / 2
self.life_timer = 2.0
self.accel_spd = 200.0
self.max_spd = 400.0
self.alive = True
self.AI = "FTANG!"
def reset(self):
self.alive = True
self.life_timer = 2.0 # seconds until respawn
self.x = random.random() * window.width
self.y = random.random() * window.height
self.dx = random.random() * (self.max_spd/2)
self.dy = random.random() * (self.max_spd/2)
def update(self, dt):
if not self.alive:
self.life_timer -= dt
if self.life_timer > 0:
return
else:
self.reset()
# movement - random acceleration
if random.random() < 0.2:
accel_dir = random.random() * math.pi*2
accel_amt = random.random() * self.accel_spd
accel_x, accel_y = vec_to_xy(accel_amt, accel_dir)
self.dx += accel_x
self.dy += accel_y
# limit the alien's speed to max_spd
self.dx = min(self.dx, self.max_spd)
self.dx = max(self.dx, -self.max_spd)
self.dy = min(self.dy, self.max_spd)
self.dy = max(self.dy, -self.max_spd)
self.x += self.dx * dt
self.y += self.dy * dt
self.x = wrap(self.x, window.width)
self.y = wrap(self.y, window.height)
# check collisions with the player
player_dist, player_angle = dist_vec_to(self, ship)
if player_dist < (ship.radius + self.radius) * 0.75:
# BANG! got the player
self.reset()
self.alive = False
ship.reset()
ship.alive = False
# take potshots at the player
# Ship is not affected by gravity, doesn't hit the planet
# TODO: lead the target, ie. calculate where the player is going to be and shoot there
planet = Planet(planet_image, center_x, center_y)
bullets = pyglet.graphics.Batch()
ship = Ship(ship_image)
ship.reset()
alien = Alien(alien_image)
alien.reset()
score = pyglet.text.Label('Speed: 0',
font_name='Arial',
font_size=36,
x=10, y=10,
anchor_x='left', anchor_y='bottom')
score.color = (255, 255, 255, 255)
@window.event
def on_draw():
window.clear()
planet.draw()
if alien.alive:
alien.draw()
bullets.draw()
if ship.alive:
ship.draw()
score.draw()
# Call update 60 times a second
def update(dt):
planet.update(dt)
alien.update(dt)
for bullet in ship.bullets:
bullet.update(dt)
ship.update(dt)
window.push_handlers(ship)
pyglet.clock.schedule_interval(update, 1/60.0)
pyglet.app.run()
| AnthonyBriggs/Python-101 | hello_python_source_py3/chapter 09/ship-6-alien.py | Python | mit | 9,407 |
def percDown(self,i):
while (i * 2) <= self.currentSize:
mc = self.minChild(i)
if self.heapList[i] > self.heapList[mc]:
tmp = self.heapList[i]
self.heapList[i] = self.heapList[mc]
self.heapList[mc] = tmp
i = mc
def minChild(self,i):
if i * 2 + 1 > self.currentSize:
return i * 2
else:
if self.heapList[i*2] < self.heapList[i*2+1]:
return i * 2
else:
return i * 2 + 1
| robin1885/algorithms-exercises-using-python | source-code-from-author-book/Listings-for-Second-Edition/listing_6_20.py | Python | mit | 488 |
#!/usr/bin/env python3
import unittest
import logging
import importlib
import copy
import os
from docopt import docopt
from unittest.mock import patch
import pytest
import ciftify.utils
logging.disable(logging.CRITICAL)
ciftify_recon_all = importlib.import_module('ciftify.bin.ciftify_recon_all')
class ConvertFreesurferSurface(unittest.TestCase):
meshes = ciftify_recon_all.define_meshes('/somewhere/hcp/subject_1',
"164", ["32"], '/tmp/temp_dir', False)
@patch('ciftify.bin.ciftify_recon_all.run')
def test_secondary_type_option_adds_to_set_structure_command(self, mock_run):
secondary_type = 'GRAY_WHITE'
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'],
surface_secondary_type=secondary_type)
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
set_structure_present = False
for item in arg_list:
args = item[0][0]
if '-set-structure' in args:
set_structure_present = True
assert '-surface-secondary-type' in args
assert secondary_type in args
# If this fails the wb_command -set-structure call is not being made
# at all. Is expected at least once regardless of secondary-type option
assert set_structure_present
@patch('ciftify.bin.ciftify_recon_all.run')
def test_secondary_type_not_set_if_option_not_used(self, mock_run):
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'])
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
set_structure_present = False
for item in arg_list:
args = item[0][0]
if '-set-structure' in args:
set_structure_present = True
assert '-surface-secondary-type' not in args
# If this fails the wb_command -set-structure call is not being made
# at all. Is expected at least once regardless of secondary-type option
assert set_structure_present
@patch('ciftify.bin.ciftify_recon_all.run')
def test_wbcommand_surface_apply_affine_called_when_cras_option_set(self,
mock_run):
cras_file = '/somewhere/cras.mat'
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'],
cras_mat=cras_file)
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
surface_apply_calls = 0
for item in arg_list:
args = item[0][0]
if '-surface-apply-affine' in args and cras_file in args:
surface_apply_calls += 1
# The wb_command -surface-apply-affine command should be run once for
# each hemisphere
assert surface_apply_calls == 2
@patch('ciftify.bin.ciftify_recon_all.run')
def test_no_wbcommand_added_when_cras_option_not_set(self, mock_run):
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'])
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
surface_apply_calls = 0
for item in arg_list:
args = item[0][0]
if '-surface-apply-affine' in args:
surface_apply_calls += 1
assert surface_apply_calls == 0
@patch('ciftify.bin.ciftify_recon_all.run')
def test_add_to_spec_option_adds_wbcommand_call(self, mock_run):
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'],
add_to_spec=True)
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
spec_added_calls = 0
for item in arg_list:
args = item[0][0]
if '-add-to-spec-file' in args:
spec_added_calls += 1
# Should add one call for each hemisphere
assert spec_added_calls == 2
@patch('ciftify.bin.ciftify_recon_all.run')
def test_add_to_spec_option_not_present_when_option_not_set(self, mock_run):
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'],
add_to_spec=False)
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
spec_added_calls = 0
for item in arg_list:
args = item[0][0]
if '-add-to-spec-file' in args:
spec_added_calls += 1
assert spec_added_calls == 0
class CreateRegSphere(unittest.TestCase):
@patch('ciftify.bin.ciftify_recon_all.run_MSMSulc_registration')
@patch('ciftify.bin.ciftify_recon_all.run_fs_reg_LR')
def test_reg_sphere_is_not_set_to_none_for_any_mode(self, mock_fs_reg,
mock_msm_reg):
"""
Should fail if MSMSulc registration is implemented without supplying a
value for reg_sphere
"""
# settings stub, to allow tests to be written.
class Settings(object):
def __init__(self, name):
self.high_res = 999
self.reg_name = name
self.ciftify_data_dir = '/somedir/'
self.msm_config = None
# Test reg_sphere set when in FS mode
settings = Settings('FS')
meshes = {'AtlasSpaceNative' : ''}
subject_id = 'some_id'
reg_sphere = ciftify_recon_all.create_reg_sphere(settings, subject_id, meshes)
assert reg_sphere is not None
# Test reg_sphere set when in MSMSulc mode
settings = Settings('MSMSulc')
reg_sphere = ciftify_recon_all.create_reg_sphere(settings, subject_id, meshes)
assert reg_sphere is not None
class CopyAtlasRoiFromTemplate(unittest.TestCase):
@patch('ciftify.bin.ciftify_recon_all.link_to_template_file')
def test_does_nothing_when_roi_src_does_not_exist(self, mock_link):
class Settings(object):
def __init__(self):
self.subject = self.Subject()
self.ciftify_data_dir = '/someotherpath/ciftify/data'
self.work_dir = '/somepath/hcp'
class Subject(object):
def __init__(self):
id = 'some_id'
settings = Settings()
mesh_settings = {'meshname' : 'some_mesh'}
ciftify_recon_all.copy_atlas_roi_from_template(settings, mesh_settings)
assert mock_link.call_count == 0
class DilateAndMaskMetric(unittest.TestCase):
@patch('ciftify.bin.ciftify_recon_all.run')
def test_does_nothing_when_dscalars_map_doesnt_mask_medial_wall(self,
mock_run):
# Stubs to allow testing
dscalars = {'some_map' : {'mask_medialwall' : False}}
mesh = {'tmpdir' : '/tmp/temp_dir',
'meshname' : 'some_mesh'}
ciftify_recon_all.dilate_and_mask_metric('some_id', mesh, dscalars)
assert mock_run.call_count == 0
@patch('os.makedirs')
@patch('ciftify.config.find_fsl')
class TestSettings(unittest.TestCase):
arguments = docopt(ciftify_recon_all.__doc__,
'--hcp-data-dir /somepath/pipelines/hcp --fs-subjects-dir /somepath/pipelines/freesurfer --surf-reg FS STUDY_SITE_ID_01')
subworkdir = '/somepath/pipelines/hcp/STUDY_SITE_ID_01'
yaml_config = {'high_res' : "164",
'low_res' : ["32"],
'grayord_res' : [2],
'dscalars' : {},
'registration' : {'src_dir' : 'T1w',
'dest_dir' : 'MNINonLinear',
'xfms_dir' : 'MNINonLinear/xfms'},
'FSL_fnirt' : {'2mm' : {'FNIRTConfig' : 'etc/flirtsch/T1_2_MNI152_2mm.cnf'}}}
def set_mock_env(self, mock_ciftify, mock_fsl, mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.bin.ciftify_recon_all.WorkFlowSettings._WorkFlowSettings__read_settings')
@patch('os.path.exists')
def test_fs_root_dir_set_to_user_value_when_given(self, mock_exists,
mock_settings, mock_ciftify, mock_fsl, mock_makedirs):
self.set_mock_env(mock_ciftify, mock_fsl, mock_makedirs)
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
mock_settings.return_value = self.yaml_config
settings = ciftify_recon_all.Settings(self.arguments)
assert settings.fs_root_dir == self.arguments['--fs-subjects-dir']
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.config.find_freesurfer_data')
@patch('os.path.exists')
def test_exits_when_no_fs_dir_given_and_cannot_find_shell_value(self,
mock_exists, mock_fs, mock_ciftify, mock_fsl, mock_makedirs):
self.set_mock_env(mock_ciftify, mock_fsl, mock_makedirs)
# This is to avoid sys.exit calls due to the mock directories not
# existing.
mock_exists.return_value = True
# work with a deep copy of arguments to avoid modifications having any
# effect on later tests
args_copy = copy.deepcopy(self.arguments)
args_copy['--fs-subjects-dir'] = None
# Just in case the shell environment has the variable set...
mock_fs.return_value = None
with pytest.raises(SystemExit):
settings = ciftify_recon_all.Settings(args_copy)
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.bin.ciftify_recon_all.WorkFlowSettings._WorkFlowSettings__read_settings')
@patch('os.path.exists')
def test_dscalars_doesnt_contain_msmsulc_settings_when_reg_name_is_FS(
self, mock_exists, mock_yaml_settings, mock_ciftify, mock_fsl,
mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
# This is to avoid sys.exit calls due to mock directories not
# existing.
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
mock_yaml_settings.return_value = self.yaml_config
settings = ciftify_recon_all.Settings(self.arguments)
if settings.reg_name == 'FS':
assert 'ArealDistortion_MSMSulc' not in settings.dscalars.keys()
else:
assert True
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.bin.ciftify_recon_all.WorkFlowSettings._WorkFlowSettings__read_settings')
@patch('os.path.exists')
def test_msm_config_set_to_none_in_fs_mode(self, mock_exists,
mock_yaml_settings, mock_ciftify, mock_fsl, mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
# This is to avoid sys.exit calls due to mock directories not
# existing.
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
mock_yaml_settings.return_value = self.yaml_config
args_copy = copy.deepcopy(self.arguments)
args_copy['--surf-reg'] = "FS"
settings = ciftify_recon_all.Settings(self.arguments)
assert settings.msm_config is None
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.config.verify_msm_available')
@patch('ciftify.bin.ciftify_recon_all.Settings.check_msm_config', return_value = True)
@patch('ciftify.bin.ciftify_recon_all.WorkFlowSettings._WorkFlowSettings__read_settings')
@patch('os.path.exists')
def test_msm_config_set_to_default_when_user_config_not_given(self,
mock_exists, mock_yaml_settings, mock_msm_check, mock_msm_check1,
mock_ciftify, mock_fsl, mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
# This is to avoid sys.exit calls due to mock directories not
# existing.
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
mock_yaml_settings.return_value = self.yaml_config
# Modify copy of arguments, so changes dont effect other tests
args = copy.deepcopy(self.arguments)
args['--surf-reg'] = 'MSMSulc'
args['--MSM-config'] = None
settings = ciftify_recon_all.Settings(args)
assert settings.msm_config is not None
@patch('ciftify.config.find_ciftify_global')
@patch('os.path.exists')
def test_sys_exit_raised_when_user_msm_config_doesnt_exist(self, mock_exists,
mock_ciftify, mock_fsl, mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
user_config = "/some/path/nonexistent_config"
mock_exists.side_effect = lambda path: False if path == user_config else True
args = copy.deepcopy(self.arguments)
args['--surf-reg'] = 'MSMSulc'
args['--MSM-config'] = user_config
with pytest.raises(SystemExit):
settings = ciftify_recon_all.Settings(args)
@patch('ciftify.config.find_ciftify_global')
@patch('os.path.exists')
def test_sys_exit_raised_when_nonlin_xfm_given_alone(self, mock_exists,
mock_ciftify, mock_fsl, mock_makedirs):
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
args = copy.deepcopy(self.arguments)
args['--read-non-lin-xfm'] = '/some/file'
with pytest.raises(SystemExit):
settings = ciftify_recon_all.Settings(args)
@patch('ciftify.config.find_ciftify_global')
@patch('os.path.exists')
def test_sys_exit_raised_when_lin_xfm_given_alone(self, mock_exists,
mock_ciftify, mock_fsl, mock_makedirs):
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
args = copy.deepcopy(self.arguments)
args['--read-lin-premat'] = '/some/file'
with pytest.raises(SystemExit):
settings = ciftify_recon_all.Settings(args)
@patch('ciftify.utils.check_input_readable')
@patch('os.path.exists')
def test_xfms_set_if_given(self, mock_exists, mock_inputreadble,
mock_fsl, mock_makedirs):
mock_fsl.return_value = '/somepath/FSL'
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
args = copy.deepcopy(self.arguments)
args['--read-lin-premat'] = '/some/file1'
args['--read-non-lin-xfm'] = '/some/file2'
settings = ciftify_recon_all.Settings(args)
# Test should never reach this line
assert settings.registration['User_AtlasTransform_Linear'] == '/some/file1'
assert settings.registration['User_AtlasTransform_NonLinear'] == '/some/file2'
| edickie/ciftify | tests/test_ciftify_recon_all.py | Python | mit | 15,712 |
import http
from openbrokerapi.service_broker import LastOperation, OperationState
from tests import BrokerTestCase
class LastBindingOperationTest(BrokerTestCase):
def setUp(self):
self.broker.service_id.return_value = 'service-guid-here'
def test_last_operation_called_just_with_required_fields(self):
self.broker.last_binding_operation.return_value = LastOperation(OperationState.IN_PROGRESS, 'Running...')
self.client.get(
'/v2/service_instances/here-instance_id/service_bindings/binding_id/last_operation',
headers={
'X-Broker-Api-Version': '2.13',
'Authorization': self.auth_header
})
self.broker.last_binding_operation.assert_called_once_with('here-instance_id', 'binding_id', None, None, None)
def test_last_operation_called_with_operation_data(self):
self.broker.last_binding_operation.return_value = LastOperation(OperationState.IN_PROGRESS, 'Running...')
query = 'service_id=&plan_id=456&operation=service-guid-here%20operation-data'
self.client.get(
'/v2/service_instances/here-instance_id/service_bindings/binding_id/last_operation?%s' % query,
headers={
'X-Broker-Api-Version': '2.13',
'Authorization': self.auth_header
})
self.broker.last_binding_operation.assert_called_once_with('here-instance_id', 'binding_id',
'service-guid-here operation-data', "", "456")
def test_returns_200_with_given_state(self):
self.broker.last_binding_operation.return_value = LastOperation(OperationState.IN_PROGRESS, 'Running...')
query = 'service_id=123&plan_id=456&operation=service-guid-here%20operation-data'
response = self.client.get(
'/v2/service_instances/here-instance_id/service_bindings/binding_id/last_operation?%s' % query,
headers={
'X-Broker-Api-Version': '2.13',
'Authorization': self.auth_header
})
self.broker.last_binding_operation.assert_called_once_with('here-instance_id', 'binding_id',
'service-guid-here operation-data', "123", "456")
self.assertEqual(response.status_code, http.HTTPStatus.OK)
self.assertEqual(response.json, dict(
state=OperationState.IN_PROGRESS.value,
description='Running...'
))
| eruvanos/openbrokerapi | tests/test_last_binding_operation.py | Python | mit | 2,530 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# seamless documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 3 17:16:13 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinx.ext.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'seamless'
copyright = '2016-2017, Sjoerd de Vries'
author = 'Sjoerd de Vries'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'seamlessdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'seamless.tex', 'seamless documentation',
'Sjoerd de Vries', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'seamless', 'seamless documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'seamless', 'seamless documentation',
author, 'seamless', 'One line description of project.',
'Miscellaneous'),
]
| sjdv1982/seamless | docs/archive/documentation-OLD/sphinx-source/conf.py | Python | mit | 5,272 |
# CVS conversion code inspired by hg-cvs-import and git-cvsimport
import os, locale, re, socket
from cStringIO import StringIO
from mercurial import util
from common import NoRepo, commit, converter_source, checktool
class convert_cvs(converter_source):
def __init__(self, ui, path, rev=None):
super(convert_cvs, self).__init__(ui, path, rev=rev)
cvs = os.path.join(path, "CVS")
if not os.path.exists(cvs):
raise NoRepo("%s does not look like a CVS checkout" % path)
self.cmd = ui.config('convert', 'cvsps', 'cvsps -A -u --cvs-direct -q')
cvspsexe = self.cmd.split(None, 1)[0]
for tool in (cvspsexe, 'cvs'):
checktool(tool)
self.changeset = {}
self.files = {}
self.tags = {}
self.lastbranch = {}
self.parent = {}
self.socket = None
self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
self.encoding = locale.getpreferredencoding()
self._parse()
self._connect()
def _parse(self):
if self.changeset:
return
maxrev = 0
cmd = self.cmd
if self.rev:
# TODO: handle tags
try:
# patchset number?
maxrev = int(self.rev)
except ValueError:
try:
# date
util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
except util.Abort:
raise util.Abort('revision %s is not a patchset number or date' % self.rev)
d = os.getcwd()
try:
os.chdir(self.path)
id = None
state = 0
filerevids = {}
for l in util.popen(cmd):
if state == 0: # header
if l.startswith("PatchSet"):
id = l[9:-2]
if maxrev and int(id) > maxrev:
# ignore everything
state = 3
elif l.startswith("Date"):
date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
date = util.datestr(date)
elif l.startswith("Branch"):
branch = l[8:-1]
self.parent[id] = self.lastbranch.get(branch, 'bad')
self.lastbranch[branch] = id
elif l.startswith("Ancestor branch"):
ancestor = l[17:-1]
# figure out the parent later
self.parent[id] = self.lastbranch[ancestor]
elif l.startswith("Author"):
author = self.recode(l[8:-1])
elif l.startswith("Tag:") or l.startswith("Tags:"):
t = l[l.index(':')+1:]
t = [ut.strip() for ut in t.split(',')]
if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
self.tags.update(dict.fromkeys(t, id))
elif l.startswith("Log:"):
# switch to gathering log
state = 1
log = ""
elif state == 1: # log
if l == "Members: \n":
# switch to gathering members
files = {}
oldrevs = []
log = self.recode(log[:-1])
state = 2
else:
# gather log
log += l
elif state == 2: # members
if l == "\n": # start of next entry
state = 0
p = [self.parent[id]]
if id == "1":
p = []
if branch == "HEAD":
branch = ""
if branch:
latest = None
# the last changeset that contains a base
# file is our parent
for r in oldrevs:
latest = max(filerevids.get(r, None), latest)
if latest:
p = [latest]
# add current commit to set
c = commit(author=author, date=date, parents=p,
desc=log, branch=branch)
self.changeset[id] = c
self.files[id] = files
else:
colon = l.rfind(':')
file = l[1:colon]
rev = l[colon+1:-2]
oldrev, rev = rev.split("->")
files[file] = rev
# save some information for identifying branch points
oldrevs.append("%s:%s" % (oldrev, file))
filerevids["%s:%s" % (rev, file)] = id
elif state == 3:
# swallow all input
continue
self.heads = self.lastbranch.values()
finally:
os.chdir(d)
def _connect(self):
root = self.cvsroot
conntype = None
user, host = None, None
cmd = ['cvs', 'server']
self.ui.status("connecting to %s\n" % root)
if root.startswith(":pserver:"):
root = root[9:]
m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
root)
if m:
conntype = "pserver"
user, passw, serv, port, root = m.groups()
if not user:
user = "anonymous"
if not port:
port = 2401
else:
port = int(port)
format0 = ":pserver:%s@%s:%s" % (user, serv, root)
format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
if not passw:
passw = "A"
pf = open(os.path.join(os.environ["HOME"], ".cvspass"))
for line in pf.read().splitlines():
part1, part2 = line.split(' ', 1)
if part1 == '/1':
# /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
part1, part2 = part2.split(' ', 1)
format = format1
else:
# :pserver:user@example.com:/cvsroot/foo Ah<Z
format = format0
if part1 == format:
passw = part2
break
pf.close()
sck = socket.socket()
sck.connect((serv, port))
sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
"END AUTH REQUEST", ""]))
if sck.recv(128) != "I LOVE YOU\n":
raise util.Abort("CVS pserver authentication failed")
self.writep = self.readp = sck.makefile('r+')
if not conntype and root.startswith(":local:"):
conntype = "local"
root = root[7:]
if not conntype:
# :ext:user@host/home/user/path/to/cvsroot
if root.startswith(":ext:"):
root = root[5:]
m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
# Do not take Windows path "c:\foo\bar" for a connection strings
if os.path.isdir(root) or not m:
conntype = "local"
else:
conntype = "rsh"
user, host, root = m.group(1), m.group(2), m.group(3)
if conntype != "pserver":
if conntype == "rsh":
rsh = os.environ.get("CVS_RSH") or "ssh"
if user:
cmd = [rsh, '-l', user, host] + cmd
else:
cmd = [rsh, host] + cmd
# popen2 does not support argument lists under Windows
cmd = [util.shellquote(arg) for arg in cmd]
cmd = util.quotecommand(' '.join(cmd))
self.writep, self.readp = os.popen2(cmd, 'b')
self.realroot = root
self.writep.write("Root %s\n" % root)
self.writep.write("Valid-responses ok error Valid-requests Mode"
" M Mbinary E Checked-in Created Updated"
" Merged Removed\n")
self.writep.write("valid-requests\n")
self.writep.flush()
r = self.readp.readline()
if not r.startswith("Valid-requests"):
raise util.Abort("server sucks")
if "UseUnchanged" in r:
self.writep.write("UseUnchanged\n")
self.writep.flush()
r = self.readp.readline()
def getheads(self):
return self.heads
def _getfile(self, name, rev):
def chunkedread(fp, count):
# file-objects returned by socked.makefile() do not handle
# large read() requests very well.
chunksize = 65536
output = StringIO()
while count > 0:
data = fp.read(min(count, chunksize))
if not data:
raise util.Abort("%d bytes missing from remote file" % count)
count -= len(data)
output.write(data)
return output.getvalue()
if rev.endswith("(DEAD)"):
raise IOError
args = ("-N -P -kk -r %s --" % rev).split()
args.append(self.cvsrepo + '/' + name)
for x in args:
self.writep.write("Argument %s\n" % x)
self.writep.write("Directory .\n%s\nco\n" % self.realroot)
self.writep.flush()
data = ""
while 1:
line = self.readp.readline()
if line.startswith("Created ") or line.startswith("Updated "):
self.readp.readline() # path
self.readp.readline() # entries
mode = self.readp.readline()[:-1]
count = int(self.readp.readline()[:-1])
data = chunkedread(self.readp, count)
elif line.startswith(" "):
data += line[1:]
elif line.startswith("M "):
pass
elif line.startswith("Mbinary "):
count = int(self.readp.readline()[:-1])
data = chunkedread(self.readp, count)
else:
if line == "ok\n":
return (data, "x" in mode and "x" or "")
elif line.startswith("E "):
self.ui.warn("cvs server: %s\n" % line[2:])
elif line.startswith("Remove"):
l = self.readp.readline()
l = self.readp.readline()
if l != "ok\n":
raise util.Abort("unknown CVS response: %s" % l)
else:
raise util.Abort("unknown CVS response: %s" % line)
def getfile(self, file, rev):
data, mode = self._getfile(file, rev)
self.modecache[(file, rev)] = mode
return data
def getmode(self, file, rev):
return self.modecache[(file, rev)]
def getchanges(self, rev):
self.modecache = {}
files = self.files[rev]
cl = files.items()
cl.sort()
return (cl, {})
def getcommit(self, rev):
return self.changeset[rev]
def gettags(self):
return self.tags
def getchangedfiles(self, rev, i):
files = self.files[rev].keys()
files.sort()
return files
| carlgao/lenga | images/lenny64-peon/usr/share/python-support/mercurial-common/hgext/convert/cvs.py | Python | mit | 11,997 |
import unittest
import time
from datetime import datetime
from app import create_app, db
from app.models import User, AnonymousUser, Role, Permission, Follow
class UserModelTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
Role.insert_roles()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
def test_valid_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token()
self.assertTrue(u.confirm(token))
def test_invalid_confirmation_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_confirmation_token()
self.assertFalse(u2.confirm(token))
def test_expired_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token(1)
time.sleep(2)
self.assertFalse(u.confirm(token))
def test_valid_reset_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_reset_token()
self.assertTrue(u.reset_password(token, 'dog'))
self.assertTrue(u.verify_password('dog'))
def test_invalid_reset_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_reset_token()
self.assertFalse(u2.reset_password(token, 'horse'))
self.assertTrue(u2.verify_password('dog'))
def test_valid_email_change_token(self):
u = User(email='john@example.com', password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_email_change_token('susan@example.org')
self.assertTrue(u.change_email(token))
self.assertTrue(u.email == 'susan@example.org')
def test_invalid_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_email_change_token('david@example.net')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_duplicate_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u2.generate_email_change_token('john@example.com')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_roles_and_permissions(self):
u = User(email='john@example.com', password='cat')
self.assertTrue(u.can(Permission.WRITE_ARTICLES))
self.assertFalse(u.can(Permission.MODERATE_COMMENTS))
def test_anonymous_user(self):
u = AnonymousUser()
self.assertFalse(u.can(Permission.FOLLOW))
def test_timestamps(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
self.assertTrue(
(datetime.utcnow() - u.member_since).total_seconds() < 3)
self.assertTrue(
(datetime.utcnow() - u.last_seen).total_seconds() < 3)
def test_ping(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
time.sleep(2)
last_seen_before = u.last_seen
u.ping()
self.assertTrue(u.last_seen > last_seen_before)
def test_gravatar(self):
u = User(email='john@example.com', password='cat')
with self.app.test_request_context('/'):
gravatar = u.gravatar()
gravatar_256 = u.gravatar(size=256)
gravatar_pg = u.gravatar(rating='pg')
gravatar_retro = u.gravatar(default='retro')
with self.app.test_request_context('/', base_url='https://example.com'):
gravatar_ssl = u.gravatar()
self.assertTrue('http://www.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6'in gravatar)
self.assertTrue('s=256' in gravatar_256)
self.assertTrue('r=pg' in gravatar_pg)
self.assertTrue('d=retro' in gravatar_retro)
self.assertTrue('https://secure.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6' in gravatar_ssl)
def test_follows(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
self.assertFalse(u1.is_following(u2))
self.assertFalse(u1.is_followed_by(u2))
timestamp_before = datetime.utcnow()
u1.follow(u2)
db.session.add(u1)
db.session.commit()
timestamp_after = datetime.utcnow()
self.assertTrue(u1.is_following(u2))
self.assertFalse(u1.is_followed_by(u2))
self.assertTrue(u2.is_followed_by(u1))
# count() include yourself!
self.assertTrue(u1.followed.count() == 2)
self.assertTrue(u2.followers.count() == 2)
f = u1.followed.all()[-1]
self.assertTrue(f.followed == u2)
self.assertTrue(timestamp_before <= f.timestamp <= timestamp_after)
f = u2.followers.all()[-1]
self.assertTrue(f.follower == u1)
u1.unfollow(u2)
db.session.add(u1)
db.session.commit()
self.assertTrue(u1.followed.count() == 1)
self.assertTrue(u2.followers.count() == 1)
self.assertTrue(Follow.query.count() == 2)
u2.follow(u1)
db.session.add(u1)
db.session.add(u2)
db.session.commit()
db.session.delete(u2)
db.session.commit()
self.assertTrue(Follow.query.count() == 1)
| caser789/xuejiao-blog | tests/test_user_model.py | Python | mit | 6,887 |
# coding: utf-8
#
# GIL limit python multi-thread effectiveness.
# But is seems fine, because these operation have so many socket IO
# So it seems no need to use multiprocess
#
import uiautomator2 as u2
import adbutils
import threading
from logzero import logger
def worker(d: u2.Device):
d.app_start("io.appium.android.apis", stop=True)
d(text="App").wait()
for el in d.xpath("@android:id/list").child("/android.widget.TextView").all():
logger.info("%s click %s", d.serial, el.text)
el.click()
d.press("back")
logger.info("%s DONE", d.serial)
for dev in adbutils.adb.device_list():
print("Dev:", dev)
d = u2.connect(dev.serial)
t = threading.Thread(target=worker, args=(d,))
t.start()
| openatx/uiautomator2 | examples/multi-thread-example.py | Python | mit | 747 |
"""autogenerated by genpy from Ref/image.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import Ref.msg
import std_msgs.msg
class image(genpy.Message):
_md5sum = "f5f5ba95d6a5e2a82d553cdc6692532a"
_type = "Ref/image"
_has_header = True #flag to mark the presence of a Header object
_full_text = """Header header
Position[] bots
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: Ref/Position
float32 x
float32 y
float32 rot
"""
__slots__ = ['header','bots']
_slot_types = ['std_msgs/Header','Ref/Position[]']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,bots
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(image, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.bots is None:
self.bots = []
else:
self.header = std_msgs.msg.Header()
self.bots = []
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.bots)
buff.write(_struct_I.pack(length))
for val1 in self.bots:
_x = val1
buff.write(_struct_3f.pack(_x.x, _x.y, _x.rot))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.bots is None:
self.bots = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.bots = []
for i in range(0, length):
val1 = Ref.msg.Position()
_x = val1
start = end
end += 12
(_x.x, _x.y, _x.rot,) = _struct_3f.unpack(str[start:end])
self.bots.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.bots)
buff.write(_struct_I.pack(length))
for val1 in self.bots:
_x = val1
buff.write(_struct_3f.pack(_x.x, _x.y, _x.rot))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.bots is None:
self.bots = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.bots = []
for i in range(0, length):
val1 = Ref.msg.Position()
_x = val1
start = end
end += 12
(_x.x, _x.y, _x.rot,) = _struct_3f.unpack(str[start:end])
self.bots.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_3I = struct.Struct("<3I")
_struct_3f = struct.Struct("<3f")
| robotics-silver-surfer/surfer-main | lab3/Ref/src/Ref/msg/_image.py | Python | mit | 6,271 |
#!/usr/bin/env python
import os
import sys
sys.path.insert(0, os.pardir)
from testing_harness import TestHarness
class StatepointTestHarness(TestHarness):
def __init__(self):
self._sp_name = None
self._tallies = False
self._opts = None
self._args = None
def _test_output_created(self):
"""Make sure statepoint files have been created."""
sps = ('statepoint.03.*', 'statepoint.06.*', 'statepoint.09.*')
for sp in sps:
self._sp_name = sp
TestHarness._test_output_created(self)
if __name__ == '__main__':
harness = StatepointTestHarness()
harness.main()
| mjlong/openmc | tests/test_statepoint_batch/test_statepoint_batch.py | Python | mit | 654 |
from __future__ import absolute_import
from .check_info import check_info_annotation
from .rank_scores import build_rank_score_dict
from .build_info import (build_info_string, build_info_dict)
from .build_compounds import build_compounds_dict
from .build_models import build_models_dict
from .build_vep import (build_vep_string, build_vep_annotation)
from .split_genotype import split_genotype
from .format_variant import format_variant
from .split_variants import split_variants
| moonso/vcf_parser | vcf_parser/utils/__init__.py | Python | mit | 481 |
# -*- coding: utf-8
"""
Tests related to the Cuttle class.
"""
import os
import unittest
import warnings
import time
from cuttle.reef import Cuttle, Column
from cuttlepool import CuttlePool
from cuttlepool.cuttlepool import PoolConnection
DB = '_cuttle_test_db'
DB2 = '_cuttle_test_db2'
HOST = 'localhost'
class BaseDbTestCase(unittest.TestCase):
def setUp(self):
self.Pool = CuttlePool
self.Connection = PoolConnection
self.credentials = dict(host=HOST)
self.sql_type = os.environ['TEST_CUTTLE'].lower()
if self.sql_type == 'mysql':
import pymysql
from mysql_credentials import USER, PASSWD
self.Cursor = pymysql.cursors.Cursor
self.connect = pymysql.connect
self.credentials.update(dict(user=USER, passwd=PASSWD))
self.db = Cuttle(self.sql_type, db=DB, **self.credentials)
class Heros(self.db.Model):
columns = [
Column('hero_id', 'INT', auto_increment=True, primary_key=True),
Column('hero_name', 'VARCHAR', maximum=16)
]
self.testtable1 = Heros
self.create_heros_statement = (
'CREATE TABLE IF NOT EXISTS {} (\n'
'hero_id INT AUTO_INCREMENT PRIMARY KEY,\n'
'hero_name VARCHAR(16)\n'
')').format(self.testtable1().name)
self.heros_schema = (('hero_id', 'int(11)', 'NO', 'PRI', None, 'auto_increment'),
('hero_name', 'varchar(16)', 'YES', '', None, ''))
def tearDown(self):
warnings.filterwarnings('ignore')
self.db.drop_db()
def createPool(self, **kwargs):
warnings.filterwarnings('ignore')
return CuttlePool(self.connect, **kwargs)
class DbNestedModelTestCase(BaseDbTestCase):
def setUp(self):
super(DbNestedModelTestCase, self).setUp()
class UselessTable(self.db.Model):
pass
self.uselesstable = UselessTable
class Villains(UselessTable):
columns = [
Column('villain_id', 'INT'),
Column('villain_name', 'VARCHAR', maximum=16)
]
self.testtable2 = Villains
class TwoDbTestCase(BaseDbTestCase):
def setUp(self):
super(TwoDbTestCase, self).setUp()
self.db2 = Cuttle(self.sql_type, db=DB2, **self.credentials)
class ThrowAway(self.db2.Model):
columns = [
Column('throwaway', 'INT')
]
self.testtable2 = ThrowAway
def tearDown(self):
super(TwoDbTestCase, self).tearDown()
self.db2.drop_db()
class CuttleInstanceTestCase(unittest.TestCase):
def test_improper_sql_type(self):
with self.assertRaises(ValueError):
db = Cuttle('wrongsql', db='db')
def test_no_db(self):
with self.assertRaises(ValueError):
db = Cuttle('mysql')
def test_name_property(self):
db_name = 'get_schwifty'
db = Cuttle('mysql', db=db_name)
self.assertEqual(db.name, db_name)
class CuttleCreateDbTestCase(BaseDbTestCase):
def test_create_db(self):
self.db.create_db()
pool = self.createPool(db=DB, **self.credentials)
con = pool.get_connection()
cur = con.cursor()
# get databases
cur.execute('SHOW DATABASES')
dbs = cur.fetchall()
self.assertIn((DB,), dbs)
def test_table_schema(self):
self.db.create_db()
pool = self.createPool(db=DB, **self.credentials)
con = pool.get_connection()
cur = con.cursor()
# get tables
cur.execute('SHOW TABLES')
tbls = cur.fetchall()
self.assertEqual(((self.testtable1().name,),), tbls)
# get table schema
cur.execute('DESCRIBE {}'.format(self.testtable1().name))
tblschma = cur.fetchall()
self.assertEqual(self.heros_schema, tblschma)
class CuttleCreateMultiDbTestCase(TwoDbTestCase):
def test_create_two_dbs(self):
self.db.create_db()
self.db2.create_db()
pool1 = self.createPool(db=DB, **self.credentials)
pool2 = self.createPool(db=DB2, **self.credentials)
con1 = pool1.get_connection()
cur1 = con1.cursor()
con2 = pool2.get_connection()
cur2 = con2.cursor()
# get databases
cur1.execute('SHOW DATABASES')
dbs = cur1.fetchall()
self.assertIn((DB,), dbs)
self.assertIn((DB2,), dbs)
# get tables
cur1.execute('SHOW TABLES')
tbls1 = cur1.fetchall()
cur2.execute('SHOW TABLES')
tbls2 = cur2.fetchall()
self.assertIn((self.testtable1().name,), tbls1)
self.assertNotIn((self.testtable2().name,), tbls1)
self.assertIn((self.testtable2().name,), tbls2)
self.assertNotIn((self.testtable1().name,), tbls2)
class CuttleCreateDbNestedModelsTestCase(DbNestedModelTestCase):
def test_correct_tables_made(self):
self.db.create_db()
pool = self.createPool(db=DB, **self.credentials)
con = pool.get_connection()
cur = con.cursor()
# get tables
cur.execute('SHOW TABLES')
tbls = cur.fetchall()
self.assertIn((self.testtable1().name,), tbls)
self.assertIn((self.testtable2().name,), tbls)
self.assertNotIn((self.uselesstable().name,), tbls)
class CuttleDropDbTestCase(BaseDbTestCase):
def setUp(self):
super(CuttleDropDbTestCase, self).setUp()
self.db.create_db()
def test_drop_db(self):
pool = self.createPool(**self.credentials)
con = pool.get_connection()
cur = con.cursor()
# get databases
cur.execute('SHOW DATABASES')
dbs = cur.fetchall()
# make sure database actually exists
self.assertIn((DB,), dbs)
# drop the database
self.db.drop_db()
# get databases
cur.execute('SHOW DATABASES')
dbs = cur.fetchall()
# make sure database no longer exists
self.assertNotIn((DB,), dbs)
| smitchell556/cuttle | tests/test_cuttle_class.py | Python | mit | 6,111 |
# -*- coding: utf-8 -*-
"""
Simple sockjs-tornado chat application. By default will listen on port 8080.
"""
import sys
import os
import json
from urllib import urlencode
import tornado.ioloop
import tornado.web
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
import sockjs.tornado
sys.path.append( os.path.join(os.path.dirname(__file__), '..') )
os.environ['DJANGO_SETTINGS_MODULE'] = 'dj_backend.settings'
from importlib import import_module
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
DJ_BACKEND = 'http://localhost:8000'
try:
from .local_sock_settings import *
except ImportError:
pass
class IndexHandler(tornado.web.RequestHandler):
"""Regular HTTP handler to serve the chatroom page"""
def get(self):
self.render('index.html')
class ChatConnection(sockjs.tornado.SockJSConnection):
"""Chat connection implementation"""
session_blocks = dict()
session_code_storage = dict()
http_client = AsyncHTTPClient()
def on_open(self, info):
self.joined_session = []
@gen.coroutine
def on_message(self, message):
recv_data = json.loads(message)
if recv_data['type']=='session_join':
session_name = recv_data['data']
if self.session_blocks.get(session_name, False) == False:
self.session_blocks[session_name] = set()
session_id = self.session.conn_info.cookies['sessionid'].value
dj_session = SessionStore(session_key=session_id)
dj_user_id = dj_session.get('_auth_user_id')
if dj_user_id:
user_response = yield self.http_client.fetch(DJ_BACKEND + '/api/users/%d/?format=json' % dj_user_id)
dj_user = json.loads(user_response.body)
self.dj_user = dj_user
self.user_name = self.dj_user['username']
else:
self.user_name = dj_session.get('insta_name')
self.dj_user = None
self.session_blocks[session_name].add(self)
self.joined_session.append(session_name)
self.session_list_update(session_name)
send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = self.session_code_storage[session_name]
self.send(json.dumps(send_data))
elif recv_data['type']=='code_update':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'code_update'
send_data['data'] = recv_data['data']
self.session_code_storage[session_name] = recv_data['data']
self.broadcast(self.session_blocks[session_name] - set([self]), json.dumps(send_data))
elif recv_data['type']=='chat_message_send':
session_name = recv_data['session']
send_data = {}
send_data['type'] = 'chat_message_receive'
chat_data = {'username': self.user_name}
if self.dj_user:
chat_data['avatar_url'] = self.dj_user['avatar_url']
chat_data['message'] = recv_data['data']
send_data['data'] = json.dumps(chat_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
elif recv_data['type']=='run_code':
session_name = recv_data['session']
code = recv_data['data']
language = recv_data['language']
post_data = urlencode({'code': code, 'language': language})
result = yield self.http_client.fetch(DJ_BACKEND + '/pair_session/run_code/',
method='POST', body=post_data)
result_data = {'result': result.body}
send_data = {}
send_data['type'] = 'run_code_result'
send_data['data'] = json.dumps(result_data)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
def on_close(self):
for session in self.joined_session:
self.session_blocks[session].remove(self)
self.session_list_update(session)
def session_list_update(self, session_name):
session_list = []
for session in self.session_blocks[session_name]:
user = {'username': session.user_name}
if session.dj_user:
user['avatar_url'] = session.dj_user['avatar_url']
session_list.append(user)
send_data = {}
send_data['type'] = 'session_list_update';
send_data['data'] = json.dumps(session_list)
self.broadcast(self.session_blocks[session_name], json.dumps(send_data))
if __name__ == "__main__":
import logging
logging.getLogger().setLevel(logging.INFO)
# 1. Create chat router
ChatRouter = sockjs.tornado.SockJSRouter(ChatConnection, '/chat')
# 2. Create Tornado application
app = tornado.web.Application(
[(r"/", IndexHandler)] + ChatRouter.urls,
debug=True
)
# 3. Make Tornado app listen on port 8080
app.listen(8888)
# 4. Start IOLoop
tornado.ioloop.IOLoop.instance().start()
| woojing/pairgramming | dj_backend/sock_serv.py | Python | mit | 5,135 |
#
# GDAX/AuthenticatedClient.py
# Daniel Paquin
#
# For authenticated requests to the GDAX exchange
import hmac
import hashlib
import time
import requests
import base64
import json
from requests.auth import AuthBase
from gdax.public_client import PublicClient
class AuthenticatedClient(PublicClient):
def __init__(self, key, b64secret, passphrase, api_url="https://api.gdax.com", product_id="BTC-USD"):
super(self.__class__, self).__init__(api_url, product_id)
self.auth = GdaxAuth(key, b64secret, passphrase)
def get_account(self, account_id):
r = requests.get(self.url + '/accounts/' + account_id, auth=self.auth)
# r.raise_for_status()
return r.json()
def get_accounts(self):
return self.get_account('')
def get_account_history(self, account_id):
result = []
r = requests.get(self.url + '/accounts/{}/ledger'.format(account_id), auth=self.auth)
# r.raise_for_status()
result.append(r.json())
if "cb-after" in r.headers:
self.history_pagination(account_id, result, r.headers["cb-after"])
return result
def history_pagination(self, account_id, result, after):
r = requests.get(self.url + '/accounts/{}/ledger?after={}'.format(account_id, str(after)), auth=self.auth)
# r.raise_for_status()
if r.json():
result.append(r.json())
if "cb-after" in r.headers:
self.history_pagination(account_id, result, r.headers["cb-after"])
return result
def get_account_holds(self, account_id):
result = []
r = requests.get(self.url + '/accounts/{}/holds'.format(account_id), auth=self.auth)
# r.raise_for_status()
result.append(r.json())
if "cb-after" in r.headers:
self.holds_pagination(account_id, result, r.headers["cb-after"])
return result
def holds_pagination(self, account_id, result, after):
r = requests.get(self.url + '/accounts/{}/holds?after={}'.format(account_id, str(after)), auth=self.auth)
# r.raise_for_status()
if r.json():
result.append(r.json())
if "cb-after" in r.headers:
self.holds_pagination(account_id, result, r.headers["cb-after"])
return result
def buy(self, **kwargs):
kwargs["side"] = "buy"
if "product_id" not in kwargs:
kwargs["product_id"] = self.product_id
r = requests.post(self.url + '/orders',
data=json.dumps(kwargs),
auth=self.auth)
return r.json()
def sell(self, **kwargs):
kwargs["side"] = "sell"
r = requests.post(self.url + '/orders',
data=json.dumps(kwargs),
auth=self.auth)
return r.json()
def cancel_order(self, order_id):
r = requests.delete(self.url + '/orders/' + order_id, auth=self.auth)
# r.raise_for_status()
return r.json()
def cancel_all(self, data=None, product=''):
if type(data) is dict:
if "product" in data:
product = data["product"]
r = requests.delete(self.url + '/orders/',
data=json.dumps({'product_id': product or self.product_id}), auth=self.auth)
# r.raise_for_status()
return r.json()
def get_order(self, order_id):
r = requests.get(self.url + '/orders/' + order_id, auth=self.auth)
# r.raise_for_status()
return r.json()
def get_orders(self):
result = []
r = requests.get(self.url + '/orders/', auth=self.auth)
# r.raise_for_status()
result.append(r.json())
if 'cb-after' in r.headers:
self.paginate_orders(result, r.headers['cb-after'])
return result
def paginate_orders(self, result, after):
r = requests.get(self.url + '/orders?after={}'.format(str(after)))
# r.raise_for_status()
if r.json():
result.append(r.json())
if 'cb-after' in r.headers:
self.paginate_orders(result, r.headers['cb-after'])
return result
def get_fills(self, order_id='', product_id='', before='', after='', limit=''):
result = []
url = self.url + '/fills?'
if order_id:
url += "order_id={}&".format(str(order_id))
if product_id:
url += "product_id={}&".format(product_id or self.product_id)
if before:
url += "before={}&".format(str(before))
if after:
url += "after={}&".format(str(after))
if limit:
url += "limit={}&".format(str(limit))
r = requests.get(url, auth=self.auth)
# r.raise_for_status()
result.append(r.json())
if 'cb-after' in r.headers and limit is not len(r.json()):
return self.paginate_fills(result, r.headers['cb-after'], order_id=order_id, product_id=product_id)
return result
def paginate_fills(self, result, after, order_id='', product_id=''):
url = self.url + '/fills?after={}&'.format(str(after))
if order_id:
url += "order_id={}&".format(str(order_id))
if product_id:
url += "product_id={}&".format(product_id or self.product_id)
r = requests.get(url, auth=self.auth)
# r.raise_for_status()
if r.json():
result.append(r.json())
if 'cb-after' in r.headers:
return self.paginate_fills(result, r.headers['cb-after'], order_id=order_id, product_id=product_id)
return result
def get_fundings(self, result='', status='', after=''):
if not result:
result = []
url = self.url + '/funding?'
if status:
url += "status={}&".format(str(status))
if after:
url += 'after={}&'.format(str(after))
r = requests.get(url, auth=self.auth)
# r.raise_for_status()
result.append(r.json())
if 'cb-after' in r.headers:
return self.get_fundings(result, status=status, after=r.headers['cb-after'])
return result
def repay_funding(self, amount='', currency=''):
payload = {
"amount": amount,
"currency": currency # example: USD
}
r = requests.post(self.url + "/funding/repay", data=json.dumps(payload), auth=self.auth)
# r.raise_for_status()
return r.json()
def margin_transfer(self, margin_profile_id="", transfer_type="", currency="", amount=""):
payload = {
"margin_profile_id": margin_profile_id,
"type": transfer_type,
"currency": currency, # example: USD
"amount": amount
}
r = requests.post(self.url + "/profiles/margin-transfer", data=json.dumps(payload), auth=self.auth)
# r.raise_for_status()
return r.json()
def get_position(self):
r = requests.get(self.url + "/position", auth=self.auth)
# r.raise_for_status()
return r.json()
def close_position(self, repay_only=""):
payload = {
"repay_only": repay_only or False
}
r = requests.post(self.url + "/position/close", data=json.dumps(payload), auth=self.auth)
# r.raise_for_status()
return r.json()
def deposit(self, amount="", currency="", payment_method_id=""):
payload = {
"amount": amount,
"currency": currency,
"payment_method_id": payment_method_id
}
r = requests.post(self.url + "/deposits/payment-method", data=json.dumps(payload), auth=self.auth)
# r.raise_for_status()
return r.json()
def coinbase_deposit(self, amount="", currency="", coinbase_account_id=""):
payload = {
"amount": amount,
"currency": currency,
"coinbase_account_id": coinbase_account_id
}
r = requests.post(self.url + "/deposits/coinbase-account", data=json.dumps(payload), auth=self.auth)
# r.raise_for_status()
return r.json()
def withdraw(self, amount="", currency="", payment_method_id=""):
payload = {
"amount": amount,
"currency": currency,
"payment_method_id": payment_method_id
}
r = requests.post(self.url + "/withdrawals/payment-method", data=json.dumps(payload), auth=self.auth)
# r.raise_for_status()
return r.json()
def coinbase_withdraw(self, amount="", currency="", coinbase_account_id=""):
payload = {
"amount": amount,
"currency": currency,
"coinbase_account_id": coinbase_account_id
}
r = requests.post(self.url + "/withdrawals/coinbase", data=json.dumps(payload), auth=self.auth)
# r.raise_for_status()
return r.json()
def crypto_withdraw(self, amount="", currency="", crypto_address=""):
payload = {
"amount": amount,
"currency": currency,
"crypto_address": crypto_address
}
r = requests.post(self.url + "/withdrawals/crypto", data=json.dumps(payload), auth=self.auth)
# r.raise_for_status()
return r.json()
def get_payment_methods(self):
r = requests.get(self.url + "/payment-methods", auth=self.auth)
# r.raise_for_status()
return r.json()
def get_coinbase_accounts(self):
r = requests.get(self.url + "/coinbase-accounts", auth=self.auth)
# r.raise_for_status()
return r.json()
def create_report(self, report_type="", start_date="", end_date="", product_id="", account_id="", report_format="",
email=""):
payload = {
"type": report_type,
"start_date": start_date,
"end_date": end_date,
"product_id": product_id,
"account_id": account_id,
"format": report_format,
"email": email
}
r = requests.post(self.url + "/reports", data=json.dumps(payload), auth=self.auth)
# r.raise_for_status()
return r.json()
def get_report(self, report_id=""):
r = requests.get(self.url + "/reports/" + report_id, auth=self.auth)
# r.raise_for_status()
return r.json()
def get_trailing_volume(self):
r = requests.get(self.url + "/users/self/trailing-volume", auth=self.auth)
# r.raise_for_status()
return r.json()
class GdaxAuth(AuthBase):
# Provided by GDAX: https://docs.gdax.com/#signing-a-message
def __init__(self, api_key, secret_key, passphrase):
self.api_key = api_key
self.secret_key = secret_key
self.passphrase = passphrase
def __call__(self, request):
timestamp = str(time.time())
message = timestamp + request.method + request.path_url + (request.body or '')
message = message.encode('ascii')
hmac_key = base64.b64decode(self.secret_key)
signature = hmac.new(hmac_key, message, hashlib.sha256)
signature_b64 = base64.b64encode(signature.digest())
request.headers.update({
'Content-Type': 'Application/JSON',
'CB-ACCESS-SIGN': signature_b64,
'CB-ACCESS-TIMESTAMP': timestamp,
'CB-ACCESS-KEY': self.api_key,
'CB-ACCESS-PASSPHRASE': self.passphrase
})
return request
| acontry/GDAX-Python | gdax/authenticated_client.py | Python | mit | 11,447 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'billdetails.end_date'
db.alter_column(u'employee_billdetails', 'end_date', self.gf('django.db.models.fields.DateField')(null=True))
# Changing field 'billdetails.start_date'
db.alter_column(u'employee_billdetails', 'start_date', self.gf('django.db.models.fields.DateField')(null=True))
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'billdetails.end_date'
raise RuntimeError("Cannot reverse this migration. 'billdetails.end_date' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'billdetails.end_date'
db.alter_column(u'employee_billdetails', 'end_date', self.gf('django.db.models.fields.DateField')())
# User chose to not deal with backwards NULL issues for 'billdetails.start_date'
raise RuntimeError("Cannot reverse this migration. 'billdetails.start_date' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'billdetails.start_date'
db.alter_column(u'employee_billdetails', 'start_date', self.gf('django.db.models.fields.DateField')())
models = {
u'employee.billdetails': {
'Meta': {'object_name': 'billdetails'},
'bill_type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'emp_name': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['employee.Employee']"}),
'emp_proj': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['employee.Project']"}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
u'employee.employee': {
'Add1': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'Add2': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'City': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'Designation': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'Major_Subject': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'Meta': {'object_name': 'Employee'},
'Qualification': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'Skill_sets': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'Visa_Status': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'Zip_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'bill': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {}),
'doj': ('django.db.models.fields.DateField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '50'}),
'exp': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '2'}),
'id': ('django.db.models.fields.IntegerField', [], {'max_length': '6', 'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'mobile': ('django.db.models.fields.IntegerField', [], {'max_length': '12'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'personal_email': ('django.db.models.fields.EmailField', [], {'max_length': '50', 'blank': 'True'}),
'proj': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['employee.Project']"}),
'start_date': ('django.db.models.fields.DateField', [], {'blank': 'True'})
},
u'employee.project': {
'Meta': {'object_name': 'Project'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['employee'] | asm-technologies/management | employee/migrations/0006_auto__chg_field_billdetails_end_date__chg_field_billdetails_start_date.py | Python | mit | 4,781 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Provide ways to loading molecules for the test instances.
"""
import os
import re
__license__ = 'X11'
__DATA_DIRECTORY = os.path.dirname(os.path.realpath(__file__)) + '/../../data/'
__DATASET_DIRECTORY = __DATA_DIRECTORY + 'datasets/'
__MOLECULES_FILE_CACHE = {}
class DatasetReference(object):
"""Definition of a dataset reference.
"""
def __init__(self, dataset, selection, group):
self.dataset = dataset
self.selection = selection
self.group = group
class Molecules(object):
"""Object with molecules for a single test instance.
"""
def __init__(self):
self.test = []
self.train = {
"actives": [],
"inactives": []
}
def list_datasets(as_path=False):
"""
:param as_path:
:return: Datasets in the platform.
"""
datasets = [name for name in os.listdir(__DATASET_DIRECTORY)
if os.path.isdir(__DATASET_DIRECTORY + name)]
if as_path:
return [__DATASET_DIRECTORY + name for name in datasets]
else:
return datasets
def list_selections(dataset, as_path=False):
"""
:param dataset: Name of the dataset.
:param as_path:
:return: Selections in the dataset.
"""
directory = __DATASET_DIRECTORY + dataset + '/selections/'
selections = [name for name in os.listdir(directory)
if os.path.isdir(directory + name)]
if as_path:
return [directory + name for name in selections]
else:
return selections
def list_groups(dataset, selection, as_path=False):
"""
:param dataset: Name of the dataset.
:param selection: Name of the selection.
:param as_path:
:return: Groups in given selection and datasets.
"""
directory = __DATASET_DIRECTORY + dataset + '/selections/' + selection + '/'
if as_path:
return [directory + name for name in os.listdir(directory)]
else:
return os.listdir(directory)
def list_instances_from_reference(
dataset_reference, as_path=False):
return list_instances(dataset_reference.dataset,
dataset_reference.selection,
dataset_reference.group, as_path)
def list_instances(dataset, selection, group, as_path=False):
"""
:param dataset: Name of the dataset.
:param selection: Name of the selection.
:param group: Name of the group.
:param as_path:
:return: Instances for given dataset, selection and group.
"""
directory = __DATASET_DIRECTORY + dataset + '/selections/' + \
selection + '/' + group + '/'
instances_names = [name for name in os.listdir(directory)
if name.startswith("s_")]
if as_path:
return [directory + name for name in instances_names]
else:
return instances_names
def __load_molecules(path):
"""
:param path:
:return: Valid molecules from given file.
"""
global __MOLECULES_FILE_CACHE
if path in __MOLECULES_FILE_CACHE:
return __MOLECULES_FILE_CACHE[path]
if len(__MOLECULES_FILE_CACHE) > 2:
__MOLECULES_FILE_CACHE = {}
import rdkit
from rdkit import Chem
molecules = [molecule for molecule in rdkit.Chem.SDMolSupplier(str(path))
if molecule is not None]
__MOLECULES_FILE_CACHE[path] = molecules
return molecules
def load_molecules(dataset_reference, instance_data):
"""
:param dataset_reference:
:param instance_data: Data of the instance.
:return:
"""
sdf_directory = __DATASET_DIRECTORY + dataset_reference.dataset + \
'/molecules/sdf/'
molecules = {}
for file in instance_data['data']['files']:
sdf_path = sdf_directory + file + '.sdf'
for molecule in __load_molecules(sdf_path):
molecules[molecule.GetProp('_Name')] = molecule
result = Molecules()
for item in instance_data['data']['test']:
result.test.append(molecules[item['name']])
for item in instance_data['data']['train']['decoys']:
result.train["inactives"].append(molecules[item['name']])
for item in instance_data['data']['train']['ligands']:
result.train["actives"].append(molecules[item['name']])
return result
def resolve(dataset_filter='.*', selection_filter='.*', group_filter='.*'):
"""
:param dataset_filter:
:param selection_filter:
:param group_filter:
:return: Array of matches to given filters.
"""
result = []
re_dataset = re.compile(dataset_filter)
re_selection = re.compile(selection_filter)
re_group = re.compile(group_filter)
for dataset in list_datasets():
if not re_dataset.match(dataset):
continue
for selection in list_selections(dataset):
if not re_selection.match(selection):
continue
for group in list_groups(dataset, selection):
if not re_group.match(group):
continue
result.append(DatasetReference(dataset, selection, group))
return result
def dataset_to_path(dataset_reference):
"""
:param dataset_reference:
:return: Path to group directory.
"""
return __DATASET_DIRECTORY + dataset_reference.dataset + '/selections/' + \
dataset_reference.selection + '/' + dataset_reference.group
def list_collections(as_path=False):
"""
:param as_path:
:return: List of collections.
"""
if as_path:
return [__DATA_DIRECTORY + name
for name in os.listdir(__DATA_DIRECTORY + '/collections/')]
else:
return os.listdir(__DATA_DIRECTORY + '/collections/')
def list_datasets_for_collection(collection, default_selection=None):
"""
:param collection:
:param default_selection:
:return: Groups of datasets.
"""
collection_dir = __DATA_DIRECTORY + '/collections/' + collection + '/'
result = {}
for name in os.listdir(collection_dir):
datasets_in_collection = []
result[name] = datasets_in_collection
with open(collection_dir + name) as stream:
for line in stream:
line = line.rstrip().split(',')
datasets_in_collection.append([line[0], line[1], line[2]])
return result
if __name__ == '__main__':
raise Exception('This module should be used only as a library!')
| skodapetr/lbvs-environment | scripts/libs/data.py | Python | mit | 6,451 |
#!/usr/bin/env python3
# Copyright (c) 2009-2019 The Bitcoin Core developers
# Copyright (c) 2014-2019 The DigiByte Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Check RPC argument consistency."""
from collections import defaultdict
import os
import re
import sys
# Source files (relative to root) to scan for dispatch tables
SOURCES = [
"src/rpc/server.cpp",
"src/rpc/blockchain.cpp",
"src/rpc/mining.cpp",
"src/rpc/misc.cpp",
"src/rpc/net.cpp",
"src/rpc/rawtransaction.cpp",
"src/wallet/rpcwallet.cpp",
]
# Source file (relative to root) containing conversion mapping
SOURCE_CLIENT = 'src/rpc/client.cpp'
# Argument names that should be ignored in consistency checks
IGNORE_DUMMY_ARGS = {'dummy', 'arg0', 'arg1', 'arg2', 'arg3', 'arg4', 'arg5', 'arg6', 'arg7', 'arg8', 'arg9'}
class RPCCommand:
def __init__(self, name, args):
self.name = name
self.args = args
class RPCArgument:
def __init__(self, names, idx):
self.names = names
self.idx = idx
self.convert = False
def parse_string(s):
assert s[0] == '"'
assert s[-1] == '"'
return s[1:-1]
def process_commands(fname):
"""Find and parse dispatch table in implementation file `fname`."""
cmds = []
in_rpcs = False
with open(fname, "r", encoding="utf8") as f:
for line in f:
line = line.rstrip()
if not in_rpcs:
if re.match("static const CRPCCommand .*\[\] =", line):
in_rpcs = True
else:
if line.startswith('};'):
in_rpcs = False
elif '{' in line and '"' in line:
m = re.search('{ *("[^"]*"), *("[^"]*"), *&([^,]*), *{([^}]*)} *},', line)
assert m, 'No match to table expression: %s' % line
name = parse_string(m.group(2))
args_str = m.group(4).strip()
if args_str:
args = [RPCArgument(parse_string(x.strip()).split('|'), idx) for idx, x in enumerate(args_str.split(','))]
else:
args = []
cmds.append(RPCCommand(name, args))
assert not in_rpcs and cmds, "Something went wrong with parsing the C++ file: update the regexps"
return cmds
def process_mapping(fname):
"""Find and parse conversion table in implementation file `fname`."""
cmds = []
in_rpcs = False
with open(fname, "r", encoding="utf8") as f:
for line in f:
line = line.rstrip()
if not in_rpcs:
if line == 'static const CRPCConvertParam vRPCConvertParams[] =':
in_rpcs = True
else:
if line.startswith('};'):
in_rpcs = False
elif '{' in line and '"' in line:
m = re.search('{ *("[^"]*"), *([0-9]+) *, *("[^"]*") *},', line)
assert m, 'No match to table expression: %s' % line
name = parse_string(m.group(1))
idx = int(m.group(2))
argname = parse_string(m.group(3))
cmds.append((name, idx, argname))
assert not in_rpcs and cmds
return cmds
def main():
root = sys.argv[1]
# Get all commands from dispatch tables
cmds = []
for fname in SOURCES:
cmds += process_commands(os.path.join(root, fname))
cmds_by_name = {}
for cmd in cmds:
cmds_by_name[cmd.name] = cmd
# Get current convert mapping for client
client = SOURCE_CLIENT
mapping = set(process_mapping(os.path.join(root, client)))
print('* Checking consistency between dispatch tables and vRPCConvertParams')
# Check mapping consistency
errors = 0
for (cmdname, argidx, argname) in mapping:
try:
rargnames = cmds_by_name[cmdname].args[argidx].names
except IndexError:
print('ERROR: %s argument %i (named %s in vRPCConvertParams) is not defined in dispatch table' % (cmdname, argidx, argname))
errors += 1
continue
if argname not in rargnames:
print('ERROR: %s argument %i is named %s in vRPCConvertParams but %s in dispatch table' % (cmdname, argidx, argname, rargnames), file=sys.stderr)
errors += 1
# Check for conflicts in vRPCConvertParams conversion
# All aliases for an argument must either be present in the
# conversion table, or not. Anything in between means an oversight
# and some aliases won't work.
for cmd in cmds:
for arg in cmd.args:
convert = [((cmd.name, arg.idx, argname) in mapping) for argname in arg.names]
if any(convert) != all(convert):
print('ERROR: %s argument %s has conflicts in vRPCConvertParams conversion specifier %s' % (cmd.name, arg.names, convert))
errors += 1
arg.convert = all(convert)
# Check for conversion difference by argument name.
# It is preferable for API consistency that arguments with the same name
# have the same conversion, so bin by argument name.
all_methods_by_argname = defaultdict(list)
converts_by_argname = defaultdict(list)
for cmd in cmds:
for arg in cmd.args:
for argname in arg.names:
all_methods_by_argname[argname].append(cmd.name)
converts_by_argname[argname].append(arg.convert)
for argname, convert in converts_by_argname.items():
if all(convert) != any(convert):
if argname in IGNORE_DUMMY_ARGS:
# these are testing or dummy, don't warn for them
continue
print('WARNING: conversion mismatch for argument named %s (%s)' %
(argname, list(zip(all_methods_by_argname[argname], converts_by_argname[argname]))))
sys.exit(errors > 0)
if __name__ == '__main__':
main()
| digibyte/digibyte | test/lint/check-rpc-mappings.py | Python | mit | 6,062 |